pull/156/head
KB-perByte 2022-04-04 20:56:07 +05:30
parent f337f82d59
commit 0cbb257293
No known key found for this signature in database
GPG Key ID: F49472BE1744C3D5
2 changed files with 46 additions and 36 deletions

View File

@ -1,6 +1,6 @@
# #
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright 2021 Red Hat # Copyright 2022 Red Hat
# GNU General Public License v3.0+ # GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# #
@ -1234,7 +1234,7 @@ def _consolidate(*args, **kwargs):
class FilterModule(object): class FilterModule(object):
"""keep_keys""" """consolidate"""
def filters(self): def filters(self):

View File

@ -1,12 +1,12 @@
# #
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright 2021 Red Hat # Copyright 2022 Red Hat
# GNU General Public License v3.0+ # GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# #
""" """
The keep_keys plugin code The consolidate plugin code
""" """
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
@ -27,17 +27,15 @@ def _raise_error(filter, msg):
def fail_on_filter(validator_func): def fail_on_filter(validator_func):
"""decorator to fail on supplied filters"""
def update_err(*args, **kwargs): def update_err(*args, **kwargs):
res, err = validator_func(*args, **kwargs) res, err = validator_func(*args, **kwargs)
if err.get("match_key_err"): if err.get("match_key_err"):
_raise_error( _raise_error("fail_missing_match_key", ", ".join(err["match_key_err"]))
"fail_missing_match_key", ", ".join(err["match_key_err"])
)
if err.get("match_val_err"): if err.get("match_val_err"):
_raise_error( _raise_error("fail_missing_match_value", ", ".join(err["match_val_err"]))
"fail_missing_match_value", ", ".join(err["match_val_err"])
)
if err.get("duplicate_err"): if err.get("duplicate_err"):
_raise_error("fail_duplicate", ", ".join(err["duplicate_err"])) _raise_error("fail_duplicate", ", ".join(err["duplicate_err"]))
return res return res
@ -49,13 +47,9 @@ def fail_on_filter(validator_func):
def check_missing_match_key_duplicate( def check_missing_match_key_duplicate(
data_sources, fail_missing_match_key, fail_duplicate data_sources, fail_missing_match_key, fail_duplicate
): ):
"""Validate the operation """Checks if the match_key specified is present in all the supplied data,
:param operation: The operation also checks for duplicate data accross all the data sources"""
:type operation: str
:raises: AnsibleFilterError
"""
results, errors_match_key, errors_duplicate = [], [], [] results, errors_match_key, errors_duplicate = [], [], []
# Check for missing and duplicate match key
for ds_idx, data_source in enumerate(data_sources): for ds_idx, data_source in enumerate(data_sources):
match_key = data_source["match_key"] match_key = data_source["match_key"]
ds_values = [] ds_values = []
@ -71,9 +65,7 @@ def check_missing_match_key_duplicate(
continue continue
if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate: if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate:
errors_duplicate.append( errors_duplicate.append(f"Duplicate values in data source {ds_idx}")
f"Duplicate values in data source {ds_idx}"
)
results.append(set(ds_values)) results.append(set(ds_values))
return results, { return results, {
"match_key_err": errors_match_key, "match_key_err": errors_match_key,
@ -82,11 +74,19 @@ def check_missing_match_key_duplicate(
@fail_on_filter @fail_on_filter
def check_missing_match_values(results, fail_missing_match_value): def check_missing_match_values(matched_keys, fail_missing_match_value):
"""Checks values to match be consistent over all the whole data source
Args:
matched_keys (list): list of unique keys based on specified match_keys
fail_missing_match_value (bool): Fail if match_key value is missing in a data set
Returns:
set: set of unique values
"""
errors_match_values = [] errors_match_values = []
all_values = set(itertools.chain.from_iterable(results)) all_values = set(itertools.chain.from_iterable(matched_keys))
if fail_missing_match_value: if fail_missing_match_value:
for ds_idx, ds_values in enumerate(results): for ds_idx, ds_values in enumerate(matched_keys):
missing_match = all_values - ds_values missing_match = all_values - ds_values
if missing_match: if missing_match:
errors_match_values.append( errors_match_values.append(
@ -96,13 +96,21 @@ def check_missing_match_values(results, fail_missing_match_value):
def consolidate_facts(data_sources, all_values): def consolidate_facts(data_sources, all_values):
"""Iterate over all the data sources and consolidate the data
Args:
data_sources (list): supplied data sources
all_values (set): a set of keys to iterate over
Returns:
list: list of consolidated data
"""
consolidated_facts = {} consolidated_facts = {}
for data_source in data_sources: for data_source in data_sources:
match_key = data_source["match_key"] match_key = data_source["match_key"]
source = data_source["prefix"] source = data_source["prefix"]
data_dict = { data_dict = {d[match_key]: d for d in data_source["data"] if match_key in d}
d[match_key]: d for d in data_source["data"] if match_key in d
}
for value in sorted(all_values): for value in sorted(all_values):
if value not in consolidated_facts: if value not in consolidated_facts:
consolidated_facts[value] = {} consolidated_facts[value] = {}
@ -116,19 +124,21 @@ def consolidate(
fail_missing_match_value=False, fail_missing_match_value=False,
fail_duplicate=False, fail_duplicate=False,
): ):
"""keep selective keys recursively from a given data" """Calls data validation and consolidation functions
:param data: The data passed in (data|keep_keys(...))
:type data: raw Args:
:param target: List of keys on with operation is to be performed data_source (list): list of dicts as data sources
:type data: list fail_missing_match_key (bool, optional): Fails if match_keys not present in data set. Defaults to False.
:type elements: string fail_missing_match_value (bool, optional): Fails if matching attribute missing in a data. Defaults to False.
:param matching_parameter: matching type of the target keys with data keys fail_duplicate (bool, optional): Fails if duplicate data present in a data. Defaults to False.
:type data: str
Returns:
list: list of dicts of validated and consolidated data
""" """
# write code here
key_sets = check_missing_match_key_duplicate( key_sets = check_missing_match_key_duplicate(
data_source, fail_missing_match_key, fail_duplicate data_source, fail_missing_match_key, fail_duplicate
) )
key_vals = check_missing_match_values(key_sets, fail_missing_match_value) key_vals = check_missing_match_values(key_sets, fail_missing_match_value)
datapr = consolidate_facts(data_source, key_vals) consolidated_facts = consolidate_facts(data_source, key_vals)
return datapr return consolidated_facts