tox fixed and docstrings

pull/156/head
KB-perByte 2022-04-04 21:06:58 +05:30
parent 0cbb257293
commit 1b34ebd896
No known key found for this signature in database
GPG Key ID: F49472BE1744C3D5
2 changed files with 42 additions and 18 deletions

View File

@ -1208,15 +1208,10 @@ try:
except ImportError: except ImportError:
from jinja2.filters import environmentfilter as pass_environment from jinja2.filters import environmentfilter as pass_environment
# import debugpy
# debugpy.listen(3000)
# debugpy.wait_for_client()
@pass_environment @pass_environment
def _consolidate(*args, **kwargs): def _consolidate(*args, **kwargs):
"""keep specific keys from a data recursively""" """Consolidate facts together on common attributes"""
keys = [ keys = [
"data_source", "data_source",
@ -1226,7 +1221,9 @@ def _consolidate(*args, **kwargs):
] ]
data = dict(zip(keys, args[1:])) data = dict(zip(keys, args[1:]))
data.update(kwargs) data.update(kwargs)
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="consolidate") aav = AnsibleArgSpecValidator(
data=data, schema=DOCUMENTATION, name="consolidate"
)
valid, errors, updated_data = aav.validate() valid, errors, updated_data = aav.validate()
if not valid: if not valid:
raise AnsibleFilterError(errors) raise AnsibleFilterError(errors)
@ -1234,9 +1231,9 @@ def _consolidate(*args, **kwargs):
class FilterModule(object): class FilterModule(object):
"""consolidate""" """Consolidate"""
def filters(self): def filters(self):
"""a mapping of filter names to functions""" """A mapping of filter names to functions"""
return {"consolidate": _consolidate} return {"consolidate": _consolidate}

View File

@ -18,24 +18,39 @@ import itertools
def _raise_error(filter, msg): def _raise_error(filter, msg):
"""Raise an error message, prepend with filter name """Raise an error message, prepend with filter name
:param msg: The message
:type msg: str Args:
:raises: AnsibleError filter (str): Filter name
msg (str): Message specific to filter supplied
Raises:
AnsibleFilterError: AnsibleError with filter name and message
""" """
error = f"Error when using plugin 'consolidate': '{filter}' reported {msg}" error = f"Error when using plugin 'consolidate': '{filter}' reported {msg}"
raise AnsibleFilterError(error) raise AnsibleFilterError(error)
def fail_on_filter(validator_func): def fail_on_filter(validator_func):
"""decorator to fail on supplied filters""" """decorator to fail on supplied filters
Args:
validator_func (func): Function that generates failure messages
Returns:
raw: Value without errors if generated and not failed
"""
def update_err(*args, **kwargs): def update_err(*args, **kwargs):
res, err = validator_func(*args, **kwargs) res, err = validator_func(*args, **kwargs)
if err.get("match_key_err"): if err.get("match_key_err"):
_raise_error("fail_missing_match_key", ", ".join(err["match_key_err"])) _raise_error(
"fail_missing_match_key", ", ".join(err["match_key_err"])
)
if err.get("match_val_err"): if err.get("match_val_err"):
_raise_error("fail_missing_match_value", ", ".join(err["match_val_err"])) _raise_error(
"fail_missing_match_value", ", ".join(err["match_val_err"])
)
if err.get("duplicate_err"): if err.get("duplicate_err"):
_raise_error("fail_duplicate", ", ".join(err["duplicate_err"])) _raise_error("fail_duplicate", ", ".join(err["duplicate_err"]))
return res return res
@ -48,7 +63,15 @@ def check_missing_match_key_duplicate(
data_sources, fail_missing_match_key, fail_duplicate data_sources, fail_missing_match_key, fail_duplicate
): ):
"""Checks if the match_key specified is present in all the supplied data, """Checks if the match_key specified is present in all the supplied data,
also checks for duplicate data accross all the data sources""" also checks for duplicate data accross all the data sources
Args:
data_sources (list): list of dicts as data sources
fail_missing_match_key (bool): Fails if match_keys not present in data set
fail_duplicate (bool): Fails if duplicate data present in a data
Returns:
list: list of unique keys based on specified match_keys
"""
results, errors_match_key, errors_duplicate = [], [], [] results, errors_match_key, errors_duplicate = [], [], []
for ds_idx, data_source in enumerate(data_sources): for ds_idx, data_source in enumerate(data_sources):
match_key = data_source["match_key"] match_key = data_source["match_key"]
@ -65,7 +88,9 @@ def check_missing_match_key_duplicate(
continue continue
if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate: if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate:
errors_duplicate.append(f"Duplicate values in data source {ds_idx}") errors_duplicate.append(
f"Duplicate values in data source {ds_idx}"
)
results.append(set(ds_values)) results.append(set(ds_values))
return results, { return results, {
"match_key_err": errors_match_key, "match_key_err": errors_match_key,
@ -110,7 +135,9 @@ def consolidate_facts(data_sources, all_values):
for data_source in data_sources: for data_source in data_sources:
match_key = data_source["match_key"] match_key = data_source["match_key"]
source = data_source["prefix"] source = data_source["prefix"]
data_dict = {d[match_key]: d for d in data_source["data"] if match_key in d} data_dict = {
d[match_key]: d for d in data_source["data"] if match_key in d
}
for value in sorted(all_values): for value in sorted(all_values):
if value not in consolidated_facts: if value not in consolidated_facts:
consolidated_facts[value] = {} consolidated_facts[value] = {}