From cf1d7c0703f9946cb8fc77f5dbd68a5fc8b62ecf Mon Sep 17 00:00:00 2001 From: Trysdyn Black Date: Sat, 27 Mar 2021 23:44:02 -0700 Subject: [PATCH] Initial commit --- .gitignore | 3 ++ main.py | 128 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 131 insertions(+) create mode 100644 main.py diff --git a/.gitignore b/.gitignore index 13d1490..c58a2eb 100644 --- a/.gitignore +++ b/.gitignore @@ -129,3 +129,6 @@ dmypy.json # Pyre type checker .pyre/ +# Spoiler log files +FF6.*.txt +Final_Fantasy_III_*.txt diff --git a/main.py b/main.py new file mode 100644 index 0000000..e1124c3 --- /dev/null +++ b/main.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python3 + +import json +import sys + +def parse_CHARACTERS(data): + replacements = { + 'Looks like': 'looks', + 'World of Ruin location': 'wor_location', + 'Notable equipment': 'equipment' + } + + result = {} + + for c_data in data.split('\n\n')[1:-1]: + info = { + 'stats': {}, + 'spells': {}, + 'natural_magic': False + } + + for line in c_data.split('\n'): + # Name + if line[0:2].isdigit(): + name = line[4:] + + # Stat chart rows + elif line.startswith('|'): + for stat in line[1:-1].split('|'): + if ':' in stat: + stat_name, stat_value = stat.split(':') + stat_name = stat_name.replace('.', '').strip().lower() + info['stats'][stat_name] = int(stat_value) + + # Spell learnset rows + elif line.startswith(' LV'): + spell_level, spell_name = line.split('-', 1) + info['spells'][spell_name.strip()] = int(spell_level.strip().split(' ')[1]) + + # Special k=v strings with comma-delimited lists + elif line.startswith('Commands:'): + info['commands'] = [command.strip() for command in line.split(':')[1].split(',')] + + elif line.startswith('Notable'): + info['equipment'] = [eq.strip() for eq in line.split(':')[1].split(',')] + + # Special bare strings + elif line.startswith('Has natural'): + info['natural_magic'] = True + + # Everything else: normal k=v colon strings + elif ':' in line: + field, value = line.split(':', 1) + if field in replacements.keys(): + field = replacements[field] + field = field.lower() + info[field] = value.strip() + + result[name] = info + + return result + + +def parse_COMMANDS(data): + commands = {} + + # We split by ------ which divides the command name from its data + # As a result we have to pull the last line from each block and remember + # it as the name of the command in the next block. Blorf :) + next_command_name = None + for c_data in data.split('\n-------\n'): + c_data_lines = c_data.split('\n') + if next_command_name: + commands[next_command_name] = '\n'.join(c_data_lines[:-1]) + + next_command_name = c_data_lines[-1].lower() + + return commands + + +def load(filename): + # Load our file, tokenize by section header (starting with ====) + with open(filename, 'r') as infile: + tok_data = infile.read().split('============================================================\n') + + sections = {} + + top_section = True + for s in tok_data: + # The top section needs special handling and contains only seed code + if top_section: + sections["SEED"] = s.split('\n', 1)[0][12:] + top_section = False + continue + + # Everything else we just dump into named sections for now + section_header, section_data = s.split('\n', 1) + sections[section_header[5:]] = section_data + + return sections + +if __name__ == "__main__": + sections = load(sys.argv[1]) + + data = {} + + # This mess tries to run a function named parse_SECTION for each section, + # and just continues to the next section if one doesn't exist. + for k, v in sections.items(): + try: + data[k] = globals()[f"parse_{k}"](v) + except KeyError: + continue + + # Subkey CHARACTERS commands with COMMANDS data + # This turns lists of commands each character has into hashes where + # Command name => Textual desc of command + for character, c_data in data['CHARACTERS'].items(): + new_commands = {} + for command in c_data['commands']: + if command in data['COMMANDS']: + new_commands[command] = data['COMMANDS'][command] + else: + new_commands[command] = command + c_data['commands'] = new_commands + + # Barf this pile of trash out + print(json.dumps(data))