mirror of
https://github.com/alvierahman90/otfmacros.git
synced 2024-12-15 12:01:59 +00:00
add comments
This commit is contained in:
parent
de7be9c0f2
commit
d1218773a4
@ -26,7 +26,9 @@ def get_args():
|
|||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def is_vowel(letter):
|
def pluralize(word):
|
||||||
|
""" Returns the plural form of a word. """
|
||||||
|
def is_vowel(letter):
|
||||||
if not isinstance(letter, str):
|
if not isinstance(letter, str):
|
||||||
raise ValueError("Argument 'letter' must be type str")
|
raise ValueError("Argument 'letter' must be type str")
|
||||||
if len(letter) != 1:
|
if len(letter) != 1:
|
||||||
@ -34,10 +36,6 @@ def is_vowel(letter):
|
|||||||
return letter in 'aeiou'
|
return letter in 'aeiou'
|
||||||
|
|
||||||
|
|
||||||
def pluralize(word):
|
|
||||||
"""
|
|
||||||
Returns the plural form of a word.
|
|
||||||
"""
|
|
||||||
# TODO add more complex plural forms
|
# TODO add more complex plural forms
|
||||||
if word[-1] in 'sxz' or word[-2:] in ['ch', 'sh']:
|
if word[-1] in 'sxz' or word[-2:] in ['ch', 'sh']:
|
||||||
return word + 'es'
|
return word + 'es'
|
||||||
@ -53,6 +51,7 @@ def pluralize(word):
|
|||||||
|
|
||||||
|
|
||||||
def upper_check(token, word):
|
def upper_check(token, word):
|
||||||
|
""" Check if word needs to be capitalized and capitalise appropriately if that is the case. """
|
||||||
all_caps = True
|
all_caps = True
|
||||||
|
|
||||||
for letter in token:
|
for letter in token:
|
||||||
@ -71,6 +70,13 @@ def upper_check(token, word):
|
|||||||
|
|
||||||
|
|
||||||
def process(input, macros):
|
def process(input, macros):
|
||||||
|
"""
|
||||||
|
This function takes the string `input` and a dict, ` macros`.
|
||||||
|
It substitutes any keys in `macro` with the corresponding value.
|
||||||
|
It also checks for any otf macros defined in the string and appends them to `macros`,
|
||||||
|
replacing that otf macro and any following instances of it.
|
||||||
|
It returns the substituted string.
|
||||||
|
"""
|
||||||
tokens = tokenize(input)
|
tokens = tokenize(input)
|
||||||
macros = macros
|
macros = macros
|
||||||
|
|
||||||
@ -168,9 +174,7 @@ def detokenize(tokens):
|
|||||||
|
|
||||||
|
|
||||||
def get_macros(input, child=False):
|
def get_macros(input, child=False):
|
||||||
"""
|
""" Turn a macros string into a list of tuples of macros """
|
||||||
Turn a macros string into a list of tuples of macros
|
|
||||||
"""
|
|
||||||
response = {}
|
response = {}
|
||||||
|
|
||||||
# turn input into list of tuples
|
# turn input into list of tuples
|
||||||
@ -193,9 +197,7 @@ def get_macros(input, child=False):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
def is_otf_macro_start(token, line):
|
def is_otf_macro_start(token, line):
|
||||||
"""
|
""" Returns true if token is the start of an on the fly macro """
|
||||||
Returns true if token is the start of an on the fly macro
|
|
||||||
"""
|
|
||||||
match = re.search(r'^\.[A-Za-z0-9]+$', token)
|
match = re.search(r'^\.[A-Za-z0-9]+$', token)
|
||||||
if match is None:
|
if match is None:
|
||||||
return False
|
return False
|
||||||
@ -209,9 +211,7 @@ def is_otf_macro_start(token, line):
|
|||||||
|
|
||||||
|
|
||||||
def is_otf_macro_end(token):
|
def is_otf_macro_end(token):
|
||||||
"""
|
""" Returns true if token is the end of an on the fly macro """
|
||||||
Returns true if token is the end of an on the fly macro
|
|
||||||
"""
|
|
||||||
match = re.search(r'(\.,|,\.)', f"{token}")
|
match = re.search(r'(\.,|,\.)', f"{token}")
|
||||||
return match is not None
|
return match is not None
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user