Skip to content

Yaml spec

YamlSpec

Bases: ABC

Object for handling yaml spec files: Powers, Bestiary, etc

Source code in automation/templates/yaml_spec.py
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
class YamlSpec(ABC):
    """Object for handling yaml spec files: Powers, Bestiary, etc"""

    def __init__(self, input_files) -> None:
        """Takes input files, loads raw data, gets stem and name for saving

        If file not found at input_file relative path, adds ./automation/_input/
        """
        self._md_TOC = ""
        self._raw_data = {}
        self._categories = set()
        self._category_hierarchy = None
        self._content = dict()
        self._fields = None
        self._default_root = os.getenv("THEGAME_ROOT") or "./automation/"
        self._filepath_default_input = self._default_root + "_input/"
        self._filepath_default_output = self._default_root + "_output/"
        self._filepath_mechanics = "./docs/src/1_Mechanics/"
        self._tried_loading = False
        self._limit_types = []
        self._as_dict = {}
        self._type_dict = {}  # Declaring on build_contents

        input_files = ensure_list(input_files)
        if not input_files:
            logger.critical(
                "YamlSpec received empty file list. Check that file name contains"
                + " relevant keywords like 'power', 'vuln', 'best', 'pc', or 'item'"
            )
        elif len(input_files) > 1:
            self._stem = (
                # When multiple inputs, take prefix before '_', add 'Combined'
                os.path.splitext(os.path.basename(input_files[0]))[0]
                + "_Combined"
            )
        elif len(input_files) == 1:
            self._stem = os.path.splitext(os.path.basename(input_files[0]))[0]

        self._filepath_output = (
            self._filepath_default_output
            if "SAMPLE" in self._stem or "test" in self._stem
            else self._filepath_mechanics
        )

        for input_file in input_files:  # If provided mult files, combine
            if not os.path.exists(input_file):
                input_file = self._filepath_default_input + input_file
            logger.debug(f"Loading {input_file}")
            self._raw_data.update(load_yaml(input_file))
        self._template = self._raw_data.pop("Template", None)
        self._name = self._stem.split("_")[-1]

    # ------------------------------- FILEPATH UTILITIES -------------------------------
    @property
    def filepath_default_input(self):
        return self._filepath_default_input

    @property
    def filepath_default_output(self):
        return self._filepath_default_output

    @property
    def filepath_output(self):
        return self._filepath_output

    @filepath_output.setter  # Allows setting new output fp
    def filepath_output(self, filepath_output: str):
        # TODO: add path verification here
        self._filepath_output = filepath_output

    # -------------------------------- CORE PROPERTIES ---------------------------------
    def _build_contents(self, this_data_class, id_component="", **kwargs):
        """Loop over items in the raw dict format. Generate list and dict versions"""
        self._type_dict = {k: [] for k in self._limit_types}
        self._tried_loading = True
        for k, v in self.raw_data.items():
            type = v.get("Type", None)
            if not type:
                logger.warning(f"YAML item missing Type: {k}")
            elif type in self._limit_types:
                id = v["Name"] + str(v.get(id_component)) if v.get("Name") else k
                _ = v.setdefault("Name", k)
                self._as_dict.update({k: this_data_class(id=id, **v, **kwargs)})
                self._type_dict[type].append(k)  # Build type dict
            elif type not in self._limit_types:
                logger.debug(f"Skipping YAML item due to excluded Type: {k}")

    def _build_categories(self, build_with="Type") -> OrderedDict:
        """Return OrderedDict with {tuple(Type) : [list of beasts]} as key value pairs"""
        if not self._categories:
            for k, v in self.as_dict.items():
                cat_tuple = tuple(ensure_list(getattr(v, build_with)))
                self._categories.setdefault(cat_tuple, [])
                self._categories[cat_tuple].append(k)  # Changed v.Name -> k
                self._categories_set.add(cat_tuple)
                self._csv_fields = self._csv_fields.union(list(v.csv_dict.keys()))
                self._categories = sort_dict(
                    self._categories, sorted(self._categories_set)
                )
        return self._categories

    @property
    def raw_data(self):
        return self._raw_data

    @abstractmethod
    def as_dict(self):
        pass

    @abstractmethod
    def categories(self) -> dict:
        """{(hierarchy tuple): [List of names]} dict"""
        pass

    @property
    def type_dict(self) -> dict:
        """Cache of items by type e.g. {Major: [a, b]}"""
        if not self._tried_loading:
            self._build_contents
        return self._type_dict

    # ------------------------------- MARKDOWN UTILITIES -------------------------------
    @property
    def category_hierarchy(self) -> list[tuple[str, int, tuple]]:
        """List of tuples containing (item, indentation, and set of (sub)-categories)

        Returns:
            list[tuple]: [(item, indent, (category, subcategory, sub-sub, etc.))]
        """
        if not self._category_hierarchy:
            categories, indents, category_set, prev_category_tuple = [], [], [], tuple()
            for category_tuple in self.categories.keys():
                for idx, category in enumerate(category_tuple):  # indent lvl, category
                    prev_category = (  # previous category at same heading level
                        prev_category_tuple[idx]
                        if idx < len(prev_category_tuple)
                        else None
                    )
                    if category != prev_category:  # if new, add
                        categories.append(category)
                        indents.append(idx)
                        # subset of tuple relevant to heading level
                        category_set.append(category_tuple[0 : idx + 1])
                prev_category_tuple = category_tuple
            self._category_hierarchy = list(zip(categories, indents, category_set))
        return self._category_hierarchy

    @property
    def md_TOC(self) -> str:
        """Generate markdown Table of Contents with category_hierarchy"""
        if not self._md_TOC:
            TOC = "<!-- MarkdownTOC add_links=True -->\n"
            for category, indent, _ in self.category_hierarchy:
                TOC += make_link(category, indent)
            self._md_TOC = TOC + "<!-- /MarkdownTOC -->\n"
        return self._md_TOC

    def make_entries(self, category_set: set) -> str:
        """All entries into bulleted lists with key prefixes.

        Args:
            category_set (set): unique set of categories (category, subcategory)"""
        entries = ""
        for item_name in self.categories.get(category_set, []):
            entries += self.as_dict[item_name].markdown
        return entries

    def write_md(self, output_fp: str = None, TOC: bool = False):
        """Write markdown

        Args:
            output_fp (str, optional): relative path for writing output file. Default
                None meaning save to ../docs/src/1_Mechanics/ path with same file name
            TOC (bool, optional): Write table of contents. Default False
        """
        if not output_fp:
            output_fp = self.filepath_output + self._stem + ".md"
        output = (
            "<!-- markdownlint-disable MD013 MD024 -->\n"
            + "<!-- DEVELOPERS: Please edit corresponding yaml -->\n"
        )
        if TOC:
            output += self.md_TOC
        for category, indent, category_set in self.category_hierarchy:
            output += make_header(category, indent)
            output += self.make_entries(category_set)
        with open(output_fp, "w", newline="") as f:
            f.write(output)
        logger.info(f"Wrote md: {output_fp}")

    # --------------------------------- CSV UTILITIES ----------------------------------

    @abstractmethod
    def csv_fields(self) -> list:
        """Column names for csv

        Returns:
            fields (list): list of column headers for CSV"""
        pass

    def write_csv(self, output_fp: str = None, delimiter: str = "\t"):
        """Write CSV from YAML, default is tab-delimited

        Args:
            output_fp (str): relative filepath. Default none, which means local
                _output subfolder
            delimiter (str): column delimiter. `\t` for tab or `,` for comma. If other,
                must provide extension in ext
        """
        suffix_dict = {"\t": ".tsv", ",": ".csv"}
        if not output_fp:
            output_fp = self.filepath_output + self._stem + suffix_dict[delimiter]
        rows = []
        with open(output_fp, "w", newline="") as f_output:
            csv_output = csv.DictWriter(
                f_output,
                fieldnames=self.csv_fields,
                delimiter=delimiter,
            )
            csv_output.writeheader()
            for i in self.as_dict.values():
                rows.append(i.csv_dict)
            csv_output.writerows(rows)
        logger.info(f"Wrote csv: {output_fp}")

__init__(input_files)

Takes input files, loads raw data, gets stem and name for saving

If file not found at input_file relative path, adds ./automation/_input/

Source code in automation/templates/yaml_spec.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
def __init__(self, input_files) -> None:
    """Takes input files, loads raw data, gets stem and name for saving

    If file not found at input_file relative path, adds ./automation/_input/
    """
    self._md_TOC = ""
    self._raw_data = {}
    self._categories = set()
    self._category_hierarchy = None
    self._content = dict()
    self._fields = None
    self._default_root = os.getenv("THEGAME_ROOT") or "./automation/"
    self._filepath_default_input = self._default_root + "_input/"
    self._filepath_default_output = self._default_root + "_output/"
    self._filepath_mechanics = "./docs/src/1_Mechanics/"
    self._tried_loading = False
    self._limit_types = []
    self._as_dict = {}
    self._type_dict = {}  # Declaring on build_contents

    input_files = ensure_list(input_files)
    if not input_files:
        logger.critical(
            "YamlSpec received empty file list. Check that file name contains"
            + " relevant keywords like 'power', 'vuln', 'best', 'pc', or 'item'"
        )
    elif len(input_files) > 1:
        self._stem = (
            # When multiple inputs, take prefix before '_', add 'Combined'
            os.path.splitext(os.path.basename(input_files[0]))[0]
            + "_Combined"
        )
    elif len(input_files) == 1:
        self._stem = os.path.splitext(os.path.basename(input_files[0]))[0]

    self._filepath_output = (
        self._filepath_default_output
        if "SAMPLE" in self._stem or "test" in self._stem
        else self._filepath_mechanics
    )

    for input_file in input_files:  # If provided mult files, combine
        if not os.path.exists(input_file):
            input_file = self._filepath_default_input + input_file
        logger.debug(f"Loading {input_file}")
        self._raw_data.update(load_yaml(input_file))
    self._template = self._raw_data.pop("Template", None)
    self._name = self._stem.split("_")[-1]

categories() abstractmethod

{(hierarchy tuple): [List of names]} dict

Source code in automation/templates/yaml_spec.py
118
119
120
121
@abstractmethod
def categories(self) -> dict:
    """{(hierarchy tuple): [List of names]} dict"""
    pass

type_dict: dict property

Cache of items by type e.g. {Major: [a, b]}

category_hierarchy: list[tuple[str, int, tuple]] property

List of tuples containing (item, indentation, and set of (sub)-categories)

Returns:

Type Description
list[tuple[str, int, tuple]]

list[tuple]: [(item, indent, (category, subcategory, sub-sub, etc.))]

md_TOC: str property

Generate markdown Table of Contents with category_hierarchy

make_entries(category_set)

All entries into bulleted lists with key prefixes.

Parameters:

Name Type Description Default
category_set set

unique set of categories (category, subcategory)

required
Source code in automation/templates/yaml_spec.py
166
167
168
169
170
171
172
173
174
def make_entries(self, category_set: set) -> str:
    """All entries into bulleted lists with key prefixes.

    Args:
        category_set (set): unique set of categories (category, subcategory)"""
    entries = ""
    for item_name in self.categories.get(category_set, []):
        entries += self.as_dict[item_name].markdown
    return entries

write_md(output_fp=None, TOC=False)

Write markdown

Parameters:

Name Type Description Default
output_fp str

relative path for writing output file. Default None meaning save to ../docs/src/1_Mechanics/ path with same file name

None
TOC bool

Write table of contents. Default False

False
Source code in automation/templates/yaml_spec.py
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
def write_md(self, output_fp: str = None, TOC: bool = False):
    """Write markdown

    Args:
        output_fp (str, optional): relative path for writing output file. Default
            None meaning save to ../docs/src/1_Mechanics/ path with same file name
        TOC (bool, optional): Write table of contents. Default False
    """
    if not output_fp:
        output_fp = self.filepath_output + self._stem + ".md"
    output = (
        "<!-- markdownlint-disable MD013 MD024 -->\n"
        + "<!-- DEVELOPERS: Please edit corresponding yaml -->\n"
    )
    if TOC:
        output += self.md_TOC
    for category, indent, category_set in self.category_hierarchy:
        output += make_header(category, indent)
        output += self.make_entries(category_set)
    with open(output_fp, "w", newline="") as f:
        f.write(output)
    logger.info(f"Wrote md: {output_fp}")

csv_fields() abstractmethod

Column names for csv

Returns:

Name Type Description
fields list

list of column headers for CSV

Source code in automation/templates/yaml_spec.py
201
202
203
204
205
206
207
@abstractmethod
def csv_fields(self) -> list:
    """Column names for csv

    Returns:
        fields (list): list of column headers for CSV"""
    pass

write_csv(output_fp=None, delimiter='\t')

Write CSV from YAML, default is tab-delimited

Parameters:

Name Type Description Default
output_fp str

relative filepath. Default none, which means local _output subfolder

None
delimiter str

column delimiter. for tab or , for comma. If other, must provide extension in ext

'\t'
Source code in automation/templates/yaml_spec.py
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
def write_csv(self, output_fp: str = None, delimiter: str = "\t"):
    """Write CSV from YAML, default is tab-delimited

    Args:
        output_fp (str): relative filepath. Default none, which means local
            _output subfolder
        delimiter (str): column delimiter. `\t` for tab or `,` for comma. If other,
            must provide extension in ext
    """
    suffix_dict = {"\t": ".tsv", ",": ".csv"}
    if not output_fp:
        output_fp = self.filepath_output + self._stem + suffix_dict[delimiter]
    rows = []
    with open(output_fp, "w", newline="") as f_output:
        csv_output = csv.DictWriter(
            f_output,
            fieldnames=self.csv_fields,
            delimiter=delimiter,
        )
        csv_output.writeheader()
        for i in self.as_dict.values():
            rows.append(i.csv_dict)
        csv_output.writerows(rows)
    logger.info(f"Wrote csv: {output_fp}")