mirror of
https://github.com/alvierahman90/otfmacros.git
synced 2024-12-15 12:01:59 +00:00
180 lines
4.6 KiB
Python
Executable File
180 lines
4.6 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
|
|
import sys
|
|
import re
|
|
|
|
|
|
def get_args():
|
|
""" Get command line arguments """
|
|
|
|
import argparse
|
|
parser = argparse.ArgumentParser()
|
|
parser.add_argument("-m", "--macros", default=["macros"], action="append",
|
|
help="Extra files where macros are stored")
|
|
parser.add_argument("-q", "--quiet", default=False, action="store_true",
|
|
help="Don't output to stdout")
|
|
parser.add_argument("input", help="The file to be processed")
|
|
parser.add_argument("output", help="The location of the output")
|
|
return parser.parse_args()
|
|
|
|
|
|
def is_consonant(letter):
|
|
if not isinstance(letter, str):
|
|
raise ValueError("Argument 'letter' must be type str")
|
|
if len(letter) != 1:
|
|
raise ValueError("Argument 'letter' must be 1 long")
|
|
return not is_vowel(letter)
|
|
|
|
|
|
def is_vowel(letter):
|
|
if not isinstance(letter, str):
|
|
raise ValueError("Argument 'letter' must be type str")
|
|
if len(letter) != 1:
|
|
raise ValueError("Argument 'letter' must be 1 long")
|
|
return letter in 'aeiou'
|
|
|
|
|
|
def pluralize(word, macro=None):
|
|
"""
|
|
Returns the plural form of a word.
|
|
"""
|
|
|
|
if macro:
|
|
if len(macro) == 3:
|
|
return macro[2]
|
|
|
|
# TODO add more complex plural forms
|
|
if word[-1] in 'sxz' or word[-2:] in ['ch', 'sh']:
|
|
return word + 'es'
|
|
if word[-1] == 'y':
|
|
if is_consonant(word[-2]):
|
|
return word[:-1] + 'ies'
|
|
if word[-1] == 'o':
|
|
if is_consonant(word[-2]):
|
|
return word + 'es'
|
|
if word[-1] == 'f':
|
|
return word[:-1] + 'ves'
|
|
return word + 's'
|
|
|
|
|
|
def upper_check(token, word):
|
|
all_caps = True
|
|
|
|
for letter in token:
|
|
if letter.islower():
|
|
all_caps = False
|
|
break
|
|
|
|
if all_caps:
|
|
return word.upper()
|
|
|
|
if len(token) > 1:
|
|
if token[1].isupper():
|
|
return word[:1].upper() + word[1:]
|
|
|
|
return word
|
|
|
|
|
|
def process(tokens, macros):
|
|
output = tokens
|
|
|
|
for line_number, line in enumerate(tokens):
|
|
for token_number, token in enumerate(line):
|
|
if len(token) == 0:
|
|
continue
|
|
|
|
|
|
# cutting of the end and then adding it back once expanded
|
|
end = []
|
|
token = list(token)
|
|
for index, char in reversed(list(enumerate(token))):
|
|
if not char.isalnum():
|
|
end.insert(0, token.pop(index))
|
|
else:
|
|
break
|
|
end = ''.join(end)
|
|
token = ''.join(token)
|
|
|
|
# if no macro is found (or if it is not a macro at all, the value
|
|
# will not be changed
|
|
value = token
|
|
|
|
for macro in macros:
|
|
if macro[0].lower() == token.lower():
|
|
value = macro[1]
|
|
break
|
|
elif macro[0].lower() + 's' == token.lower():
|
|
value = pluralize(macro[1], macro=macro)
|
|
break
|
|
|
|
output[line_number][token_number] = upper_check(token, value)
|
|
|
|
# re adding what was trimmed off
|
|
output[line_number][token_number] += end
|
|
|
|
for line_number, line in enumerate(output):
|
|
output[line_number] = ' '.join(line)
|
|
|
|
output = '\n'.join(output)
|
|
|
|
return output
|
|
|
|
|
|
def tokenize(input):
|
|
"""
|
|
Return of list of tokens from string (convert file contents to format to be
|
|
processed by `process`
|
|
"""
|
|
return [x.split(' ') for x in input.split('\n')]
|
|
|
|
|
|
def get_macros(input):
|
|
"""
|
|
Turn a string into a list of tuples of macros
|
|
"""
|
|
response = []
|
|
|
|
# turn input into unvalidated list of macros
|
|
macros = [re.split('[ \t]', x) for x in input.split('\n')]
|
|
|
|
# validate macros
|
|
for index, macro in enumerate(macros):
|
|
if macro[0] == "source":
|
|
with open(macro[1]) as file:
|
|
response += get_macros(file.read())
|
|
if len(macro) == 2 or len(macro) == 3:
|
|
response.append(tuple(macros[index]))
|
|
|
|
return response
|
|
|
|
|
|
def main(args):
|
|
""" Entry point for script """
|
|
|
|
# get macros
|
|
|
|
macros = []
|
|
for macro_file in args.macros:
|
|
with open(macro_file) as file:
|
|
macros += get_macros(file.read())
|
|
|
|
# get tokens (file contents)
|
|
with open(args.input) as file:
|
|
tokens = tokenize(file.read())
|
|
|
|
# get output
|
|
output = process(tokens, macros)
|
|
|
|
# show and save output
|
|
with open(args.output, 'w+') as file:
|
|
file.write(output)
|
|
|
|
return 0
|
|
|
|
|
|
if __name__ == '__main__':
|
|
try:
|
|
sys.exit(main(get_args()))
|
|
except KeyboardInterrupt:
|
|
sys.exit(0)
|