diff --git a/fms_yaml_tools/field_table/combine_field_table_yamls.py b/fms_yaml_tools/field_table/combine_field_table_yamls.py
index 1af1c4f..99e1340 100644
--- a/fms_yaml_tools/field_table/combine_field_table_yamls.py
+++ b/fms_yaml_tools/field_table/combine_field_table_yamls.py
@@ -21,63 +21,86 @@
from os import path, strerror
import errno
-import argparse
+import click
import yaml
from .. import __version__
-""" Combines a series of field_table.yaml files into one file
- Author: Uriel Ramirez 11/20/2023
-"""
-
-def is_duplicate(field_table, new_entry):
+@click.command()
+@click.argument('in-files', nargs=-1)
+@click.option('--debug/--no-debug', type=click.BOOL, show_default=True, default=False,
+ help="Print steps in the conversion")
+@click.option('--output-yaml', type=click.STRING, show_default=True, default="field_table.yaml",
+ help="Path to the output field yable yaml")
+@click.option('--force-write/--no-force-write', type=click.BOOL, show_default=True, default=False,
+ help="Overwrite the output yaml file if it already exists")
+@click.version_option(__version__, "--version")
+def combine_field_table_yaml(in_files, debug, output_yaml, force_write):
+ """ Combines a series of field_table.yaml files into one file \n
+ in-files - Space seperated list with the names of the field_table.yaml files to combine \n
"""
- Check if a field_table entry was already defined in a different file
+ verboseprint = print if debug else lambda *a, **k: None
+ try:
+ field_table = combine_yaml(in_files, verboseprint)
+ out_file_op = "x" # Exclusive write
+ if force_write:
+ out_file_op = "w"
+ verboseprint("Writing the output yaml: " + output_yaml)
+ with open(output_yaml, out_file_op) as myfile:
+ yaml.dump(field_table, myfile, default_flow_style=False)
- Args:
- field_table: List of dictionaries containing all of the field_table
- entries that have been combined
- new_entry: Dictionary of the field_table entry to check
- """
- is_duplicate = False
- return is_duplicate
+ except Exception as err:
+ raise SystemExit(err)
def field_type_exists(field_type, curr_entries):
- for entry in curr_entries:
- if field_type == entry['field_type']:
- return True
- return False
-
-def add_new_field(new_entry, curr_entries):
- new_field_type = new_entry['field_type']
- for entry in curr_entries:
- if new_field_type == entry['field_type']:
- if entry == new_entry:
- # If the field_type already exists but it is exactly the same, move on
- continue
- new_modlist = new_entry['modlist']
- for mod in new_modlist:
- if model_type_exists(mod['model_type'], entry):
- add_new_mod(mod, entry)
- else:
- #If the model type does not exist, just append it
- entry['modlist'].append(mod)
-
-def add_new_mod(new_mod, curr_entries):
- model_type = new_mod['model_type']
- for entry in curr_entries['modlist']:
- if model_type == entry['model_type']:
- if new_mod == entry:
- # If the model_type already exists but it is exactly the same, move on
- continue
- new_varlist = new_mod['varlist']
- curr_varlist = entry['varlist']
- for new_var in new_varlist:
- for curr_var in curr_varlist:
- if new_var == curr_var:
- continue
- curr_varlist.append(new_var)
+ for entry in curr_entries:
+ if field_type == entry['field_type']:
+ return True
+ return False
+
+
+def add_new_field(new_entry, curr_entries, verboseprint):
+ new_field_type = new_entry['field_type']
+ for entry in curr_entries:
+ if new_field_type == entry['field_type']:
+ if entry == new_entry:
+ # If the field_type already exists but it is exactly the same, move on
+ verboseprint("---> The field_type:" + entry['field_type'] + " already exists. Moving on")
+ return
+ verboseprint("---> Checking for a new entry for the field_type:" + entry['field_type'])
+ new_modlist = new_entry['modlist']
+ for mod in new_modlist:
+ if model_type_exists(mod['model_type'], entry):
+ add_new_mod(mod, entry, verboseprint)
+ else:
+ # If the model type does not exist, just append it
+ verboseprint("----> Adding the model_type: " + mod['model_type'] + " to field_type:"
+ + new_entry['field_type'])
+ entry['modlist'].append(mod)
+
+
+def add_new_mod(new_mod, curr_entries, verboseprint):
+ model_type = new_mod['model_type']
+ for entry in curr_entries['modlist']:
+ if model_type == entry['model_type']:
+ if new_mod == entry:
+ # If the model_type already exists but it is exactly the same, move on
+ verboseprint("----> The model_type:" + entry['model_type'] + " already exists. Moving on")
+ return
+ verboseprint("----> Checking for a new entry for the model_type:" + entry['model_type'])
+ new_varlist = new_mod['varlist']
+ curr_varlist = entry['varlist']
+ for new_var in new_varlist:
+ found = False
+ for curr_var in curr_varlist:
+ if new_var == curr_var:
+ found = True
+ verboseprint("-----> variable:" + new_var['variable'] + " already exists. Moving on")
+ break
+ if not found:
+ verboseprint("-----> new variable:" + new_var['variable'] + " found. Adding it.")
+ curr_varlist.append(new_var)
def model_type_exists(model_type, curr_entries):
@@ -86,7 +109,8 @@ def model_type_exists(model_type, curr_entries):
return True
return False
-def combine_yaml(files):
+
+def combine_yaml(files, verboseprint):
"""
Combines a list of yaml files into one
@@ -96,61 +120,29 @@ def combine_yaml(files):
field_table = {}
field_table['field_table'] = []
for f in files:
+ verboseprint("Opening on the field_table yaml:" + f)
# Check if the file exists
if not path.exists(f):
raise FileNotFoundError(errno.ENOENT,
strerror(errno.ENOENT),
f)
with open(f) as fl:
- my_table = yaml.safe_load(fl)
+ verboseprint("Parsing the data_table yaml:" + f)
+ try:
+ my_table = yaml.safe_load(fl)
+ except yaml.YAMLError as err:
+ print("---> Error when parsing the file " + f)
+ raise err
entries = my_table['field_table']
for entry in entries:
if not field_type_exists(entry['field_type'], field_table['field_table']):
+ verboseprint("---> Adding the field_type: " + entry['field_type'])
# If the field table does not exist, just add it to the current field table
field_table['field_table'].append(entry)
else:
- add_new_field(entry, field_table['field_table'])
+ add_new_field(entry, field_table['field_table'], verboseprint)
return field_table
-def main():
- #: parse user input
- parser = argparse.ArgumentParser(
- prog='combine_field_table_yaml',
- description="Combines a list of field_table.yaml files into one file" +
- "Requires pyyaml (https://pyyaml.org/)")
- parser.add_argument('-f', '--in-files',
- dest='in_files',
- type=str,
- nargs='+',
- default=["field_table"],
- help='Space seperated list with the '
- 'Names of the field_table.yaml files to combine')
- parser.add_argument('-o', '--output',
- dest='out_file',
- type=str,
- default='field_table.yaml',
- help="Ouput file name of the converted YAML \
- (Default: 'field_table.yaml')")
- parser.add_argument('-F', '--force',
- action='store_true',
- help="Overwrite the output field table yaml file.")
- parser.add_argument('-V', '--version',
- action="version",
- version=f"%(prog)s {__version__}")
- args = parser.parse_args()
-
- try:
- field_table = combine_yaml(args.in_files)
- out_file_op = "x" # Exclusive write
- if args.force:
- out_file_op = "w"
- with open(args.out_file, out_file_op) as myfile:
- yaml.dump(field_table, myfile, default_flow_style=False)
-
- except Exception as err:
- raise SystemExit(err)
-
-
if __name__ == "__main__":
- main()
+ combine_field_table_yaml(prog_name="combine_field_table_yaml")
diff --git a/fms_yaml_tools/field_table/field_table_to_yaml.py b/fms_yaml_tools/field_table/field_table_to_yaml.py
index 29d5b1d..0307acd 100755
--- a/fms_yaml_tools/field_table/field_table_to_yaml.py
+++ b/fms_yaml_tools/field_table/field_table_to_yaml.py
@@ -1,267 +1,281 @@
#!/usr/bin/env python3
-"""
-***********************************************************************
-* GNU Lesser General Public License
-*
-* This file is part of the GFDL Flexible Modeling System (FMS) YAML tools.
-*
-* FMS_yaml_tools is free software: you can redistribute it and/or modify it under
-* the terms of the GNU Lesser General Public License as published by
-* the Free Software Foundation, either version 3 of the License, or (at
- * your option) any later version.
-*
-* FMS_yaml_tools is distributed in the hope that it will be useful, but WITHOUT
-* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
-* for more details.
-*
-* You should have received a copy of the GNU Lesser General Public
-* License along with FMS. If not, see .
-***********************************************************************
-"""
-
-""" Converts a legacy ascii field_table to a yaml field_table.
- Author: Eric Stofferahn 07/14/2022
-"""
+# ***********************************************************************
+# * GNU Lesser General Public License
+# *
+# * This file is part of the GFDL Flexible Modeling System (FMS) YAML
+# * tools.
+# *
+# * FMS_yaml_tools is free software: you can redistribute it and/or
+# * modify it under the terms of the GNU Lesser General Public License
+# * as published by the Free Software Foundation, either version 3 of the
+# * License, or (at your option) any later version.
+# *
+# * FMS_yaml_tools is distributed in the hope that it will be useful, but
+# * WITHOUT ANY WARRANTY; without even the implied warranty of
+# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# * General Public License for more details.
+# *
+# * You should have received a copy of the GNU Lesser General Public
+# * License along with FMS. If not, see .
+# ***********************************************************************
+import click
import re
-import sys
from collections import OrderedDict
-import argparse
+from .. import __version__
import yaml
-def main():
+
+@click.command()
+# Debug is used to print more information to the screen.
+@click.option('--debug/--no-debug', type=click.BOOL, show_default=True, default=False,
+ help="Print steps in the conversion")
+@click.option('--output-yaml', type=click.STRING, show_default=True, default="field_table.yaml",
+ help="Path to the output field yable yaml")
+@click.option('--force-write/--no-force-write', type=click.BOOL, show_default=True, default=False,
+ help="Overwrite the output yaml file if it already exists")
+@click.version_option(__version__, "--version")
+@click.argument("field-table-name") # This is the path to the field_table to convert
+def field_to_yaml(field_table_name, debug, output_yaml, force_write):
+ """ Converts a legacy ascii field_table to a yaml. \n
+ field-table-name - Path to the field table to convert \n
+ """
# Necessary to dump OrderedDict to yaml format
yaml.add_representer(OrderedDict, lambda dumper, data: dumper.represent_mapping('tag:yaml.org,2002:map', data.items()))
- parser = argparse.ArgumentParser(description="Converts a legacy ascii field_table to a yaml field_table. \
- Requires pyyaml (https://pyyaml.org/) \
- More details on the field_table yaml format can be found in \
- https://github.com/NOAA-GFDL/FMS/tree/main/data_override")
- parser.add_argument('--file', '-f', type=str, help='Name of the field_table file to convert')
- parser.add_argument('--verbose', '-v', action='store_true', help='Increase verbosity')
- parser.set_defaults(v=False)
- global args
- args = parser.parse_args()
- field_table_name = args.file
-
- if args.verbose:
+ if debug:
print(field_table_name)
field_yaml = FieldYaml(field_table_name)
- field_yaml.main()
- field_yaml.writeyaml()
+ field_yaml.main(debug)
+ field_yaml.writeyaml(output_yaml=output_yaml, force_write=force_write)
+
def dont_convert_yaml_val(inval):
- # Yaml does some auto-conversions to boolean that we don't want, this will help fix it
- dontconvertus = ["yes", "Yes", "no", "No", "on", "On", "off", "Off"]
+ # Yaml does some auto-conversions to boolean that we don't want, this will help fix it
+ dontconvertus = ["yes", "Yes", "no", "No", "on", "On", "off", "Off"]
+
+ if not isinstance(inval, str):
+ return yaml.safe_load(inval)
+ if inval in dontconvertus:
+ return inval
+ else:
+ return yaml.safe_load(inval)
- if not isinstance(inval, str):
- return yaml.safe_load(inval)
- if inval in dontconvertus:
- return inval
- else:
- return yaml.safe_load(inval)
class Field:
- """ A Field Object, containing the variable attributes, methods, and subparameters """
- def __init__(self, in_field_type, entry_tuple):
- """ Initialize the Field Object with the provided entries, then process as a species or tracer """
- self.field_type = in_field_type
- self.name = entry_tuple[0]
- self.dict = OrderedDict()
- self.num_subparams = 0
- for in_prop in entry_tuple[1]:
- if 'tracer' == self.field_type:
- self.process_tracer(in_prop)
- else:
- self.process_species(in_prop)
-
- def process_species(self, prop):
- """ Process a species field """
- comma_split = prop.split(',')
- if args.verbose:
- print(self.name)
- print(self.field_type)
- print(comma_split)
- if len(comma_split) > 1:
- eq_splits = [x.split('=') for x in comma_split]
- if args.verbose:
- print('printing eq_splits')
- print(eq_splits)
- for idx, sub_param in enumerate(eq_splits):
- if args.verbose:
- print('printing len(sub_param)')
- print(len(sub_param))
- if len(sub_param) < 2:
- eq_splits[0][1] += f',{sub_param[0]}'
- if args.verbose:
- print(eq_splits)
- eq_splits = [x for x in eq_splits if len(x) > 1]
- for sub_param in eq_splits:
- if ',' in sub_param[1]:
- val = yaml.safe_load("'" + sub_param[1]+ "'")
+ """ A Field Object, containing the variable attributes, methods, and subparameters """
+ def __init__(self, in_field_type, entry_tuple, debug):
+ """ Initialize the Field Object with the provided entries, then process as a species or tracer """
+ self.field_type = in_field_type
+ self.name = entry_tuple[0]
+ self.dict = OrderedDict()
+ for in_prop in entry_tuple[1]:
+ if 'tracer' == self.field_type:
+ self.process_tracer(in_prop, debug)
+ else:
+ self.process_species(in_prop, debug)
+
+ def process_species(self, prop, debug):
+ """ Process a species field """
+ comma_split = prop.split(',')
+ if debug:
+ print(self.name)
+ print(self.field_type)
+ print(comma_split)
+ if len(comma_split) > 1:
+ eq_splits = [x.split('=') for x in comma_split]
+ if debug:
+ print('printing eq_splits')
+ print(eq_splits)
+ for idx, sub_param in enumerate(eq_splits):
+ if debug:
+ print('printing len(sub_param)')
+ print(len(sub_param))
+ if len(sub_param) < 2:
+ eq_splits[0][1] += f',{sub_param[0]}'
+ if debug:
+ print(eq_splits)
+ eq_splits = [x for x in eq_splits if len(x) > 1]
+ for sub_param in eq_splits:
+ if ',' in sub_param[1]:
+ val = yaml.safe_load("'" + sub_param[1] + "'")
+ else:
+ val = dont_convert_yaml_val(sub_param[1])
+ self.dict[sub_param[0].strip()] = val
else:
- val = dont_convert_yaml_val(sub_param[1])
- self.dict[sub_param[0].strip()] = val
- else:
- eq_split = comma_split[0].split('=')
- val = dont_convert_yaml_val(eq_split[1])
- self.dict[eq_split[0].strip()] = val
-
- def process_tracer(self, prop):
- """ Process a tracer field """
- if args.verbose:
- print(len(prop))
- self.dict[prop[0]] = prop[1]
- if len(prop) > 2:
- self.dict[f'subparams{str(self.num_subparams)}'] = [OrderedDict()]
- self.num_subparams += 1
- if args.verbose:
- print(self.name)
- print(self.field_type)
- print(prop[2:])
- for sub_param in prop[2:]:
- eq_split = sub_param.split('=')
- if len(eq_split) < 2:
- self.dict[prop[0]] = 'fm_yaml_null'
- val = dont_convert_yaml_val(eq_split[0])
- if isinstance(val, list):
- val = [dont_convert_yaml_val(b) for b in val]
- self.dict[f'subparams{str(self.num_subparams-1)}'][0][prop[1].strip()] = val
+ eq_split = comma_split[0].split('=')
+ val = dont_convert_yaml_val(eq_split[1])
+ self.dict[eq_split[0].strip()] = val
+
+ def process_tracer(self, prop, debug):
+ """ Process a tracer field """
+ if debug:
+ print(len(prop))
+ if len(prop) > 2:
+ self.dict[prop[0]] = [OrderedDict([('value', prop[1])])]
+ if debug:
+ print(self.name)
+ print(self.field_type)
+ print(prop[2:])
+ for sub_param in prop[2:]:
+ eq_split = sub_param.split('=')
+ if len(eq_split) < 2:
+ self.dict[prop[0]][0]['value'] = 'fm_yaml_null'
+ val = dont_convert_yaml_val(eq_split[0])
+ if isinstance(val, list):
+ val = [dont_convert_yaml_val(b) for b in val]
+ self.dict[prop[0]][0][prop[1].strip()] = val
+ else:
+ val = dont_convert_yaml_val(eq_split[-1])
+ if isinstance(val, list):
+ val = [dont_convert_yaml_val(b) for b in val]
+ self.dict[prop[0]][0][eq_split[0].strip()] = val
else:
- val = dont_convert_yaml_val(eq_split[-1])
- if isinstance(val, list):
- val = [dont_convert_yaml_val(b) for b in val]
- self.dict[f'subparams{str(self.num_subparams-1)}'][0][eq_split[0].strip()] = val
-
+ val = dont_convert_yaml_val(prop[1])
+ if isinstance(val, list):
+ val = [dont_convert_yaml_val(b) for b in val]
+ self.dict[prop[0]] = val
+
+
def list_items(brief_text, brief_od):
- """ Given text and an OrderedDict, make an OrderedDict and convert to list """
- return list(OrderedDict([(brief_text, brief_od)]).items())
+ """ Given text and an OrderedDict, make an OrderedDict and convert to list """
+ return list(OrderedDict([(brief_text, brief_od)]).items())
+
def listify_ordered_dict(in_list, in_list2, in_od):
- """ Given two lists and an OrderedDict, return a list of OrderedDicts. Note this function is recursive. """
- if len(in_list) > 1:
- x = in_list.pop()
- y = in_list2.pop()
- return [OrderedDict(list_items(x, k) + list_items(y, listify_ordered_dict(in_list, in_list2, v))) for k, v in in_od.items()]
- else:
- x = in_list[0]
- y = in_list2[0]
- return [OrderedDict(list_items(x, k) + list_items(y, v)) for k, v in in_od.items()]
-
+ """ Given two lists and an OrderedDict, return a list of OrderedDicts. Note this function is recursive. """
+ if len(in_list) > 1:
+ x = in_list.pop()
+ y = in_list2.pop()
+ return [OrderedDict(list_items(x, k) +
+ list_items(y, listify_ordered_dict(in_list, in_list2, v))) for k, v in in_od.items()]
+ else:
+ x = in_list[0]
+ y = in_list2[0]
+ return [OrderedDict(list_items(x, k) + list_items(y, v)) for k, v in in_od.items()]
+
+
def zip_uneven(in_even, in_odd):
- """ Re-splice two uneven lists that have been split apart by a stride of 2 """
- result = [None]*(len(in_even)+len(in_odd))
- result[::2] = in_even
- result[1::2] = in_odd
- return result
+ """ Re-splice two uneven lists that have been split apart by a stride of 2 """
+ result = [None]*(len(in_even)+len(in_odd))
+ result[::2] = in_even
+ result[1::2] = in_odd
+ return result
+
def pound_signs_within_quotes(in_lines):
- """ Change pound signs within quotes to the word poundsign so they aren't expunged when eliminating comments. """
- odds = [x.split('"')[1::2] for x in in_lines]
- evens = [x.split('"')[::2] for x in in_lines]
- for idx, line in enumerate(odds):
- odds[idx] = [re.sub('#','poundsign',x) for x in line]
- newfilelines = [zip_uneven(e,o) for e, o in zip(evens,odds)]
- return ''.join(['"'.join(x) for x in newfilelines])
+ """ Change pound signs within quotes to the word poundsign so they aren't expunged when eliminating comments. """
+ odds = [x.split('"')[1::2] for x in in_lines]
+ evens = [x.split('"')[::2] for x in in_lines]
+ for idx, line in enumerate(odds):
+ odds[idx] = [re.sub('#', 'poundsign', x) for x in line]
+ newfilelines = [zip_uneven(e, o) for e, o in zip(evens, odds)]
+ return ''.join(['"'.join(x) for x in newfilelines])
+
def process_field_file(my_file):
- """ Parse ascii field table into nested lists for further processing """
- with open(my_file, 'r') as fh:
- filelines = fh.readlines()
- # Change literal pound signs to the word poundsign
- whole_file = pound_signs_within_quotes(filelines)
- # Eliminate tabs and quotes
- whole_file = whole_file.replace('"', '').replace('\t', '')
- # Eliminate anything after a comment marker (#)
- whole_file = re.sub("\#"+r'.*'+"\n",'\n',whole_file)
- # Replace the word poundsign with a literal pound sign (#)
- whole_file = re.sub("poundsign","#",whole_file)
- # Eliminate extraneous spaces, but not in value names
- whole_file = re.sub(" *\n *",'\n',whole_file)
- whole_file = re.sub(" *, *",',',whole_file)
- whole_file = re.sub(" */\n",'/\n',whole_file)
- # Eliminate trailing commas (rude)
- whole_file = whole_file.replace(',\n', '\n')
- # Eliminate newline before end of entry
- whole_file = re.sub("\n/",'/',whole_file)
- # Eliminate spaces at very beginning and end
- whole_file = whole_file.strip()
- # Eliminate very last slash
- whole_file = whole_file.strip('/')
- # Split entries based upon the "/" ending character
- into_lines = [x for x in re.split("/\n", whole_file) if x]
- # Eliminate blank lines
- into_lines = [re.sub(r'\n+','\n',x) for x in into_lines]
- into_lines = [x[1:] if '\n' in x[:1] else x for x in into_lines]
- into_lines = [x[:-1] if '\n' in x[-1:] else x for x in into_lines]
- # Split already split entries along newlines to form nested list
- nested_lines = [x.split('\n') for x in into_lines]
- # Split nested lines into "heads" (field_type, model, var_name) and "tails" (the rest)
- heads = [x[0] for x in nested_lines]
- tails = [x[1:] for x in nested_lines]
- return heads, tails
-
+ """ Parse ascii field table into nested lists for further processing """
+ with open(my_file, 'r') as fh:
+ filelines = fh.readlines()
+ # Change literal pound signs to the word poundsign
+ whole_file = pound_signs_within_quotes(filelines)
+ # Eliminate tabs and quotes
+ whole_file = whole_file.replace('"', '').replace('\t', '')
+ # Eliminate anything after a comment marker (#)
+ whole_file = re.sub("\\#"+r'.*'+"\n", '\n', whole_file)
+ # Replace the word poundsign with a literal pound sign (#)
+ whole_file = re.sub("poundsign", "#", whole_file)
+ # Eliminate extraneous spaces, but not in value names
+ whole_file = re.sub(" *\n *", '\n', whole_file)
+ whole_file = re.sub(" *, *", ',', whole_file)
+ whole_file = re.sub(" */\n", '/\n', whole_file)
+ # Eliminate trailing commas (rude)
+ whole_file = whole_file.replace(',\n', '\n')
+ # Eliminate newline before end of entry
+ whole_file = re.sub("\n/", '/', whole_file)
+ # Eliminate spaces at very beginning and end
+ whole_file = whole_file.strip()
+ # Eliminate very last slash
+ whole_file = whole_file.strip('/')
+ # Split entries based upon the "/" ending character
+ into_lines = [x for x in re.split("/\n", whole_file) if x]
+ # Eliminate blank lines
+ into_lines = [re.sub(r'\n+', '\n', x) for x in into_lines]
+ into_lines = [x[1:] if '\n' in x[:1] else x for x in into_lines]
+ into_lines = [x[:-1] if '\n' in x[-1:] else x for x in into_lines]
+ # Split already split entries along newlines to form nested list
+ nested_lines = [x.split('\n') for x in into_lines]
+ # Split nested lines into "heads" (field_type, model, var_name) and "tails" (the rest)
+ heads = [x[0] for x in nested_lines]
+ tails = [x[1:] for x in nested_lines]
+ return heads, tails
+
+
class FieldYaml:
- def __init__(self, field_file):
- self.filename = field_file
- self.out_yaml = OrderedDict()
- self.heads, self.tails = process_field_file(self.filename)
-
- def init_ordered_keys(self):
- """ Get unique combination of field_type and model... in order provided """
- self.ordered_keys = OrderedDict.fromkeys([tuple([y.lower() for y in x.split(',')[:2]]) for x in self.heads])
-
- def initialize_lists(self):
- """ Initialize out_yaml and ordered_keys """
- for k in self.ordered_keys.keys():
- self.ordered_keys[k] = []
- if k[0] not in self.out_yaml.keys():
- self.out_yaml[k[0]] = OrderedDict()
- if k[1] not in self.out_yaml[k[0]].keys():
- self.out_yaml[k[0]][k[1]] = OrderedDict()
-
- def populate_entries(self):
- """ Populate entries as OrderedDicts """
- for h, t in zip(self.heads, self.tails):
- head_list = [y.lower() for y in h.split(',')]
- tail_list = [x.split(',') for x in t]
- if (head_list[0], head_list[1]) in self.ordered_keys.keys():
- if 'tracer' == head_list[0]:
- self.ordered_keys[(head_list[0], head_list[1])].append((head_list[2], tail_list))
- else:
- self.ordered_keys[(head_list[0], head_list[1])].append((head_list[2], t))
-
- def make_objects(self):
- """ Make Tracer and Species objects and assign to out_yaml """
- for k in self.ordered_keys.keys():
- for j in self.ordered_keys[k]:
- my_entry = Field(k[0], j)
- self.out_yaml[k[0]][k[1]][my_entry.name] = my_entry.dict
-
- def convert_yaml(self):
- """ Convert to list-style yaml """
- lists_yaml = listify_ordered_dict(['model_type', 'field_type'], ['varlist', 'modlist'], self.out_yaml)
- for i in range(len(lists_yaml)):
- for j in range(len(lists_yaml[i]['modlist'])):
- lists_yaml[i]['modlist'][j]['varlist'] = [OrderedDict(list(OrderedDict([('variable', k)]).items()) +
- list(v.items())) for k, v in lists_yaml[i]['modlist'][j]['varlist'].items()]
- self.lists_wh_yaml = {"field_table": lists_yaml}
-
- def writeyaml(self):
- """ Write yaml out to file """
- raw_out = yaml.dump(self.lists_wh_yaml, None, default_flow_style=False)
- final_out = re.sub('subparams\d*:','subparams:',raw_out)
- with open(f'{self.filename}.yaml', 'w') as yaml_file:
- yaml_file.write(final_out)
-
- def main(self):
- self.init_ordered_keys()
- self.initialize_lists()
- self.populate_entries()
- self.make_objects()
- self.convert_yaml()
+ def __init__(self, field_file):
+ self.filename = field_file
+ self.out_yaml = OrderedDict()
+ self.heads, self.tails = process_field_file(self.filename)
+
+ def init_ordered_keys(self):
+ """ Get unique combination of field_type and model... in order provided """
+ self.ordered_keys = OrderedDict.fromkeys([tuple([y.lower() for y in x.split(',')[:2]]) for x in self.heads])
+
+ def initialize_lists(self):
+ """ Initialize out_yaml and ordered_keys """
+ for k in self.ordered_keys.keys():
+ self.ordered_keys[k] = []
+ if k[0] not in self.out_yaml.keys():
+ self.out_yaml[k[0]] = OrderedDict()
+ if k[1] not in self.out_yaml[k[0]].keys():
+ self.out_yaml[k[0]][k[1]] = OrderedDict()
+
+ def populate_entries(self):
+ """ Populate entries as OrderedDicts """
+ for h, t in zip(self.heads, self.tails):
+ head_list = [y.lower() for y in h.split(',')]
+ tail_list = [x.split(',') for x in t]
+ if (head_list[0], head_list[1]) in self.ordered_keys.keys():
+ if 'tracer' == head_list[0]:
+ self.ordered_keys[(head_list[0], head_list[1])].append((head_list[2], tail_list))
+ else:
+ self.ordered_keys[(head_list[0], head_list[1])].append((head_list[2], t))
+
+ def make_objects(self, debug):
+ """ Make Tracer and Species objects and assign to out_yaml """
+ for k in self.ordered_keys.keys():
+ for j in self.ordered_keys[k]:
+ my_entry = Field(k[0], j, debug)
+ self.out_yaml[k[0]][k[1]][my_entry.name] = my_entry.dict
+
+ def convert_yaml(self):
+ """ Convert to list-style yaml """
+ lists_yaml = listify_ordered_dict(['model_type', 'field_type'], ['varlist', 'modlist'], self.out_yaml)
+ for i in range(len(lists_yaml)):
+ for j in range(len(lists_yaml[i]['modlist'])):
+ lists_yaml[i]['modlist'][j]['varlist'] = [OrderedDict(list(OrderedDict([('variable', k)]).items()) +
+ list(v.items()))
+ for k, v in lists_yaml[i]['modlist'][j]['varlist'].items()]
+ self.lists_wh_yaml = {"field_table": lists_yaml}
+
+ def writeyaml(self, output_yaml="field_table.yaml", force_write=False):
+ """ Write yaml out to file """
+ raw_out = yaml.dump(self.lists_wh_yaml, None, default_flow_style=False)
+ out_file_op = "x" # Exclusive write
+ if force_write:
+ out_file_op = "w"
+
+ with open(output_yaml, out_file_op) as yaml_file:
+ yaml_file.write(raw_out)
+
+ def main(self, debug):
+ self.init_ordered_keys()
+ self.initialize_lists()
+ self.populate_entries()
+ self.make_objects(debug)
+ self.convert_yaml()
+
if __name__ == '__main__':
- main()
+ field_to_yaml(prog_name="field_to_yaml")
diff --git a/setup.cfg b/setup.cfg
index 3429267..e306187 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -37,13 +37,14 @@ packages =
[options.entry_points]
console_scripts =
+ validate-schema = fms_yaml_tools.schema.validate_schema:valyaml
data-table-to-yaml = fms_yaml_tools.data_table.data_table_to_yaml:data_to_yaml
is-valid-data-table-yaml = fms_yaml_tools.data_table.is_valid_data_table_yaml:validate_data_yaml
combine-data-table-yamls = fms_yaml_tools.data_table.combine_data_table_yamls:combine_data_table_yaml
diag-table-to-yaml = fms_yaml_tools.diag_table.diag_table_to_yaml:diag_to_yaml
is-valid-diag-table-yaml = fms_yaml_tools.diag_table.is_valid_diag_table_yaml:validate_diag_yaml
combine-diag-table-yamls = fms_yaml_tools.diag_table.combine_diag_table_yamls:combine_diag_table_yaml
- field-table-to-yaml = fms_yaml_tools.field_table.field_table_to_yaml:main
+ field-table-to-yaml = fms_yaml_tools.field_table.field_table_to_yaml:field_to_yaml
is-valid-field-table-yaml = fms_yaml_tools.field_table.is_valid_field_table_yaml:validate_field_yaml
- combine-field-table-yamls = fms_yaml_tools.field_table.combine_field_table_yamls:main
+ combine-field-table-yamls = fms_yaml_tools.field_table.combine_field_table_yamls:combine_field_table_yaml
diag-yaml-list = fms_yaml_tools.diag_table.diag_yaml_list:dyl