2022-04-04 12:25:01 +00:00
|
|
|
#
|
|
|
|
# -*- coding: utf-8 -*-
|
2022-04-04 15:26:07 +00:00
|
|
|
# Copyright 2022 Red Hat
|
2022-04-04 12:25:01 +00:00
|
|
|
# GNU General Public License v3.0+
|
|
|
|
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
|
|
#
|
|
|
|
|
|
|
|
"""
|
2022-04-04 15:26:07 +00:00
|
|
|
The consolidate plugin code
|
2022-04-04 12:25:01 +00:00
|
|
|
"""
|
|
|
|
from __future__ import absolute_import, division, print_function
|
|
|
|
|
|
|
|
__metaclass__ = type
|
|
|
|
|
|
|
|
from ansible.errors import AnsibleFilterError
|
|
|
|
import itertools
|
|
|
|
|
|
|
|
|
|
|
|
def _raise_error(filter, msg):
|
|
|
|
"""Raise an error message, prepend with filter name
|
2022-04-04 15:36:58 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
filter (str): Filter name
|
|
|
|
msg (str): Message specific to filter supplied
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
AnsibleFilterError: AnsibleError with filter name and message
|
2022-04-04 12:25:01 +00:00
|
|
|
"""
|
2022-04-06 05:50:16 +00:00
|
|
|
error = "Error when using plugin 'consolidate': '{filter}' reported {msg}".format(
|
|
|
|
filter=filter, msg=msg
|
|
|
|
)
|
2022-04-04 12:25:01 +00:00
|
|
|
raise AnsibleFilterError(error)
|
|
|
|
|
|
|
|
|
|
|
|
def fail_on_filter(validator_func):
|
2022-04-08 18:01:45 +00:00
|
|
|
"""Decorator to fail on supplied filters
|
2022-04-04 15:36:58 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
validator_func (func): Function that generates failure messages
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
raw: Value without errors if generated and not failed
|
|
|
|
"""
|
2022-04-04 15:26:07 +00:00
|
|
|
|
2022-04-04 12:25:01 +00:00
|
|
|
def update_err(*args, **kwargs):
|
2022-04-08 18:01:45 +00:00
|
|
|
"""Filters return value or raises error as per supplied parameters
|
2022-04-04 12:25:01 +00:00
|
|
|
|
2022-04-08 18:01:45 +00:00
|
|
|
Returns:
|
|
|
|
any: Return value to the function call
|
|
|
|
"""
|
2022-04-04 12:25:01 +00:00
|
|
|
res, err = validator_func(*args, **kwargs)
|
|
|
|
if err.get("match_key_err"):
|
2022-04-04 15:36:58 +00:00
|
|
|
_raise_error(
|
|
|
|
"fail_missing_match_key", ", ".join(err["match_key_err"])
|
|
|
|
)
|
2022-04-04 12:25:01 +00:00
|
|
|
if err.get("match_val_err"):
|
2022-04-04 15:36:58 +00:00
|
|
|
_raise_error(
|
|
|
|
"fail_missing_match_value", ", ".join(err["match_val_err"])
|
|
|
|
)
|
2022-04-04 12:25:01 +00:00
|
|
|
if err.get("duplicate_err"):
|
|
|
|
_raise_error("fail_duplicate", ", ".join(err["duplicate_err"]))
|
|
|
|
return res
|
|
|
|
|
|
|
|
return update_err
|
|
|
|
|
|
|
|
|
|
|
|
@fail_on_filter
|
|
|
|
def check_missing_match_key_duplicate(
|
|
|
|
data_sources, fail_missing_match_key, fail_duplicate
|
|
|
|
):
|
2022-04-08 18:01:45 +00:00
|
|
|
"""Check if the match_key specified is present in all the supplied data,
|
|
|
|
also check for duplicate data accross all the data sources
|
2022-04-04 15:36:58 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
data_sources (list): list of dicts as data sources
|
|
|
|
fail_missing_match_key (bool): Fails if match_keys not present in data set
|
|
|
|
fail_duplicate (bool): Fails if duplicate data present in a data
|
|
|
|
Returns:
|
|
|
|
list: list of unique keys based on specified match_keys
|
|
|
|
"""
|
2022-04-04 12:25:01 +00:00
|
|
|
results, errors_match_key, errors_duplicate = [], [], []
|
2022-04-08 18:01:45 +00:00
|
|
|
for ds_idx, data_source in enumerate(data_sources, start=1):
|
2022-04-04 12:25:01 +00:00
|
|
|
match_key = data_source["match_key"]
|
|
|
|
ds_values = []
|
|
|
|
|
2022-04-08 18:01:45 +00:00
|
|
|
for dd_idx, data_dict in enumerate(data_source["data"], start=1):
|
2022-04-04 12:25:01 +00:00
|
|
|
try:
|
|
|
|
ds_values.append(data_dict[match_key])
|
|
|
|
except KeyError:
|
|
|
|
if fail_missing_match_key:
|
|
|
|
errors_match_key.append(
|
2022-04-06 05:50:16 +00:00
|
|
|
"Missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}".format(
|
|
|
|
match_key=match_key, ds_idx=ds_idx, dd_idx=dd_idx
|
|
|
|
)
|
2022-04-04 12:25:01 +00:00
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate:
|
2022-04-04 15:36:58 +00:00
|
|
|
errors_duplicate.append(
|
2022-04-06 05:50:16 +00:00
|
|
|
"Duplicate values in data source {ds_idx}".format(
|
|
|
|
ds_idx=ds_idx
|
|
|
|
)
|
2022-04-04 15:36:58 +00:00
|
|
|
)
|
2022-04-04 12:25:01 +00:00
|
|
|
results.append(set(ds_values))
|
|
|
|
return results, {
|
|
|
|
"match_key_err": errors_match_key,
|
|
|
|
"duplicate_err": errors_duplicate,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
@fail_on_filter
|
2022-04-04 15:26:07 +00:00
|
|
|
def check_missing_match_values(matched_keys, fail_missing_match_value):
|
|
|
|
"""Checks values to match be consistent over all the whole data source
|
|
|
|
|
|
|
|
Args:
|
|
|
|
matched_keys (list): list of unique keys based on specified match_keys
|
|
|
|
fail_missing_match_value (bool): Fail if match_key value is missing in a data set
|
|
|
|
Returns:
|
|
|
|
set: set of unique values
|
|
|
|
"""
|
2022-04-04 12:25:01 +00:00
|
|
|
errors_match_values = []
|
2022-04-04 15:26:07 +00:00
|
|
|
all_values = set(itertools.chain.from_iterable(matched_keys))
|
2022-04-04 12:25:01 +00:00
|
|
|
if fail_missing_match_value:
|
2022-04-08 18:01:45 +00:00
|
|
|
for ds_idx, ds_values in enumerate(matched_keys, start=1):
|
2022-04-04 12:25:01 +00:00
|
|
|
missing_match = all_values - ds_values
|
|
|
|
if missing_match:
|
2022-04-06 05:50:16 +00:00
|
|
|
m_matches = ", ".join(missing_match)
|
2022-04-04 12:25:01 +00:00
|
|
|
errors_match_values.append(
|
2022-04-06 05:50:16 +00:00
|
|
|
"Missing match value {m_matches} in data source {ds_idx}".format(
|
|
|
|
ds_idx=ds_idx, m_matches=m_matches
|
|
|
|
)
|
2022-04-04 12:25:01 +00:00
|
|
|
)
|
|
|
|
return all_values, {"match_val_err": errors_match_values}
|
|
|
|
|
|
|
|
|
|
|
|
def consolidate_facts(data_sources, all_values):
|
2022-04-04 15:26:07 +00:00
|
|
|
"""Iterate over all the data sources and consolidate the data
|
|
|
|
|
|
|
|
Args:
|
|
|
|
data_sources (list): supplied data sources
|
|
|
|
all_values (set): a set of keys to iterate over
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list: list of consolidated data
|
|
|
|
"""
|
|
|
|
|
2022-04-04 12:25:01 +00:00
|
|
|
consolidated_facts = {}
|
|
|
|
for data_source in data_sources:
|
|
|
|
match_key = data_source["match_key"]
|
2022-04-08 18:01:45 +00:00
|
|
|
source = data_source["name"]
|
2022-04-04 15:36:58 +00:00
|
|
|
data_dict = {
|
|
|
|
d[match_key]: d for d in data_source["data"] if match_key in d
|
|
|
|
}
|
2022-04-04 12:25:01 +00:00
|
|
|
for value in sorted(all_values):
|
|
|
|
if value not in consolidated_facts:
|
|
|
|
consolidated_facts[value] = {}
|
|
|
|
consolidated_facts[value][source] = data_dict.get(value, {})
|
|
|
|
return consolidated_facts
|
|
|
|
|
|
|
|
|
|
|
|
def consolidate(
|
2022-04-08 18:01:45 +00:00
|
|
|
data_sources,
|
2022-04-04 12:25:01 +00:00
|
|
|
fail_missing_match_key=False,
|
|
|
|
fail_missing_match_value=False,
|
|
|
|
fail_duplicate=False,
|
|
|
|
):
|
2022-04-04 15:26:07 +00:00
|
|
|
"""Calls data validation and consolidation functions
|
|
|
|
|
|
|
|
Args:
|
|
|
|
data_source (list): list of dicts as data sources
|
|
|
|
fail_missing_match_key (bool, optional): Fails if match_keys not present in data set. Defaults to False.
|
|
|
|
fail_missing_match_value (bool, optional): Fails if matching attribute missing in a data. Defaults to False.
|
|
|
|
fail_duplicate (bool, optional): Fails if duplicate data present in a data. Defaults to False.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list: list of dicts of validated and consolidated data
|
2022-04-04 12:25:01 +00:00
|
|
|
"""
|
2022-04-04 15:26:07 +00:00
|
|
|
|
2022-04-04 12:25:01 +00:00
|
|
|
key_sets = check_missing_match_key_duplicate(
|
2022-04-08 18:01:45 +00:00
|
|
|
data_sources, fail_missing_match_key, fail_duplicate
|
2022-04-04 12:25:01 +00:00
|
|
|
)
|
|
|
|
key_vals = check_missing_match_values(key_sets, fail_missing_match_value)
|
2022-04-08 18:01:45 +00:00
|
|
|
consolidated_facts = consolidate_facts(data_sources, key_vals)
|
2022-04-04 15:26:07 +00:00
|
|
|
return consolidated_facts
|