From 66bd592df441818270e7f92b8ff6f460302abb77 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 4 Apr 2022 17:55:01 +0530 Subject: [PATCH 01/23] init code --- plugins/filter/consolidate.py | 349 ++++++++++++++++++++++++++++ plugins/plugin_utils/consolidate.py | 126 ++++++++++ 2 files changed, 475 insertions(+) create mode 100644 plugins/filter/consolidate.py create mode 100644 plugins/plugin_utils/consolidate.py diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py new file mode 100644 index 0000000..e5484bc --- /dev/null +++ b/plugins/filter/consolidate.py @@ -0,0 +1,349 @@ +# +# -*- coding: utf-8 -*- +# Copyright 2021 Red Hat +# GNU General Public License v3.0+ +# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# + +""" +The consolidate filter plugin +""" +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ + name: consolidate + author: Sagar Paul (@KB-perByte) + version_added: "2.5.0" + short_description: Keep specific keys from a data recursively. + description: + - This plugin keep only specified keys from a provided data recursively. + - Matching parameter defaults to equals unless C(matching_parameter) is explicitly mentioned. + - Using the parameters below- C(data|ansible.utils.keep_keys(target([....]))) + options: + data_source: + description: + - This option represents a list of dictionaries or a dictionary with any level of nesting data. + - For example C(config_data|ansible.utils.keep_keys(target([....]))), in this case C(config_data) represents this option. + type: list + elements: dict + suboptions: + data: + description: Specify the target keys to keep in list format. + type: raw + match_key: + description: Specify the target keys to keep in list format. + type: str + prefix: + description: Specify the target keys to keep in list format. + type: str + fail_missing_match_key: + description: Specify the target keys to keep in list format. + type: bool + fail_missing_match_value: + description: Specify the target keys to keep in list format. + type: bool + fail_duplicate: + description: Specify the matching configuration of target keys and data attributes. + type: bool +""" + +EXAMPLES = r""" + +##example.yaml +interfaces: + - name: eth0 + enabled: true + duplex: auto + speed: auto + note: + - Connected green wire + - name: eth1 + description: Configured by Ansible - Interface 1 + mtu: 1500 + speed: auto + duplex: auto + enabled: true + note: + - Connected blue wire + - Configured by Paul + vifs: + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: eth2 + description: Configured by Ansible - Interface 2 (ADMIN DOWN) + mtu: 600 + enabled: false + +##Playbook +vars_files: + - "example.yaml" +tasks: + - name: keep selective keys from dict/list of dict data + ansible.builtin.set_fact: + data: "{{ interfaces }}" + + - debug: + msg: "{{ data|ansible.utils.keep_keys(target=['description', 'name', 'mtu', 'duplex', 'enabled', 'vifs', 'vlan_id']) }}" + +##Output +# TASK [keep selective keys from python dict/list of dict] **************************************************************************************** +# ok: [localhost] => { +# "ansible_facts": { +# "data": [ +# { +# "duplex": "auto", +# "enabled": true, +# "name": "eth0", +# "note": [ +# "Connected green wire" +# ], +# "speed": "auto" +# }, +# { +# "description": "Configured by Ansible - Interface 1", +# "duplex": "auto", +# "enabled": true, +# "mtu": 1500, +# "name": "eth1", +# "note": [ +# "Connected blue wire", +# "Configured by Paul" +# ], +# "speed": "auto", +# "vifs": [ +# { +# "comment": "Needs reconfiguration", +# "description": "Eth1 - VIF 100", +# "enabled": true, +# "mtu": 400, +# "vlan_id": 100 +# }, +# { +# "description": "Eth1 - VIF 101", +# "enabled": true, +# "vlan_id": 101 +# } +# ] +# }, +# { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "enabled": false, +# "mtu": 600, +# "name": "eth2" +# } +# ] +# }, +# "changed": false +# } +# Read vars_file 'example.yaml' + +# TASK [debug] ************************************************************************************************************* +# ok: [localhost] => { +# "msg": [ +# { +# "duplex": "auto", +# "enabled": true, +# "name": "eth0" +# }, +# { +# "description": "Configured by Ansible - Interface 1", +# "duplex": "auto", +# "enabled": true, +# "mtu": 1500, +# "name": "eth1", +# "vifs": [ +# { +# "description": "Eth1 - VIF 100", +# "enabled": true, +# "mtu": 400, +# "vlan_id": 100 +# }, +# { +# "description": "Eth1 - VIF 101", +# "enabled": true, +# "vlan_id": 101 +# } +# ] +# }, +# { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "enabled": false, +# "mtu": 600, +# "name": "eth2" +# } +# ] +# } + +##example.yaml +interfaces: + - name: eth0 + enabled: true + duplex: auto + speed: auto + note: + - Connected green wire + - name: eth1 + description: Configured by Ansible - Interface 1 + mtu: 1500 + speed: auto + duplex: auto + enabled: true + note: + - Connected blue wire + - Configured by Paul + vifs: + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: eth2 + description: Configured by Ansible - Interface 2 (ADMIN DOWN) + mtu: 600 + enabled: false + +##Playbook +vars_files: + - "example.yaml" +tasks: + - name: keep selective keys from dict/list of dict data + ansible.builtin.set_fact: + data: "{{ interfaces }}" + + - debug: + msg: "{{ data|ansible.utils.keep_keys(target=['desc', 'name'], matching_parameter= 'starts_with') }}" + +##Output +# TASK [keep selective keys from python dict/list of dict] ************************** +# ok: [localhost] => { +# "ansible_facts": { +# "data": [ +# { +# "duplex": "auto", +# "enabled": true, +# "name": "eth0", +# "note": [ +# "Connected green wire" +# ], +# "speed": "auto" +# }, +# { +# "description": "Configured by Ansible - Interface 1", +# "duplex": "auto", +# "enabled": true, +# "mtu": 1500, +# "name": "eth1", +# "note": [ +# "Connected blue wire", +# "Configured by Paul" +# ], +# "speed": "auto", +# "vifs": [ +# { +# "comment": "Needs reconfiguration", +# "description": "Eth1 - VIF 100", +# "enabled": true, +# "mtu": 400, +# "vlan_id": 100 +# }, +# { +# "description": "Eth1 - VIF 101", +# "enabled": true, +# "vlan_id": 101 +# } +# ] +# }, +# { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "enabled": false, +# "mtu": 600, +# "name": "eth2" +# } +# ] +# }, +# "changed": false +# } +# Read vars_file 'example.yaml' + +# TASK [debug] ********************************************************************************** +# ok: [localhost] => { +# "msg": [ +# { +# "name": "eth0" +# }, +# { +# "description": "Configured by Ansible - Interface 1", +# "name": "eth1", +# "vifs": [ +# { +# "description": "Eth1 - VIF 100" +# }, +# { +# "description": "Eth1 - VIF 101" +# } +# ] +# }, +# { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "name": "eth2" +# } +# ] +# } +""" + +from ansible.errors import AnsibleFilterError +from ansible_collections.ansible.utils.plugins.plugin_utils.consolidate import ( + consolidate, +) +from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import ( + AnsibleArgSpecValidator, +) + +try: + from jinja2.filters import pass_environment +except ImportError: + from jinja2.filters import environmentfilter as pass_environment + +import debugpy + +debugpy.listen(3000) +debugpy.wait_for_client() + + +@pass_environment +def _consolidate(*args, **kwargs): + """keep specific keys from a data recursively""" + + keys = [ + "data_source", + "fail_missing_match_key", + "fail_missing_match_value", + "fail_duplicate", + ] + data = dict(zip(keys, args[1:])) + data.update(kwargs) + aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="consolidate") + valid, errors, updated_data = aav.validate() + if not valid: + raise AnsibleFilterError(errors) + return consolidate(**updated_data) + + +class FilterModule(object): + """keep_keys""" + + def filters(self): + + """a mapping of filter names to functions""" + return {"consolidate": _consolidate} diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py new file mode 100644 index 0000000..ed18804 --- /dev/null +++ b/plugins/plugin_utils/consolidate.py @@ -0,0 +1,126 @@ +# +# -*- coding: utf-8 -*- +# Copyright 2021 Red Hat +# GNU General Public License v3.0+ +# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# + +""" +The keep_keys plugin code +""" +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.errors import AnsibleFilterError +import itertools + + +def _raise_error(filter, msg): + """Raise an error message, prepend with filter name + :param msg: The message + :type msg: str + :raises: AnsibleError + """ + error = f"Error when using plugin 'consolidate': '{filter}' reported {msg}" + raise AnsibleFilterError(error) + + +def fail_on_filter(validator_func): + def update_err(*args, **kwargs): + + res, err = validator_func(*args, **kwargs) + if err.get("match_key_err"): + _raise_error("fail_missing_match_key", ", ".join(err["match_key_err"])) + if err.get("match_val_err"): + _raise_error("fail_missing_match_value", ", ".join(err["match_val_err"])) + if err.get("duplicate_err"): + _raise_error("fail_duplicate", ", ".join(err["duplicate_err"])) + return res + + return update_err + + +@fail_on_filter +def check_missing_match_key_duplicate( + data_sources, fail_missing_match_key, fail_duplicate +): + """Validate the operation + :param operation: The operation + :type operation: str + :raises: AnsibleFilterError + """ + results, errors_match_key, errors_duplicate = [], [], [] + # Check for missing and duplicate match key + for ds_idx, data_source in enumerate(data_sources): + match_key = data_source["match_key"] + ds_values = [] + + for dd_idx, data_dict in enumerate(data_source["data"]): + try: + ds_values.append(data_dict[match_key]) + except KeyError: + if fail_missing_match_key: + errors_match_key.append( + f"Missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}" + ) + continue + + if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate: + errors_duplicate.append(f"Duplicate values in data source {ds_idx}") + results.append(set(ds_values)) + return results, { + "match_key_err": errors_match_key, + "duplicate_err": errors_duplicate, + } + + +@fail_on_filter +def check_missing_match_values(results, fail_missing_match_value): + errors_match_values = [] + all_values = set(itertools.chain.from_iterable(results)) + if fail_missing_match_value: + for ds_idx, ds_values in enumerate(results): + missing_match = all_values - ds_values + if missing_match: + errors_match_values.append( + f"Missing match value {', '.join(missing_match)} in data source {ds_idx}" + ) + return all_values, {"match_val_err": errors_match_values} + + +def consolidate_facts(data_sources, all_values): + consolidated_facts = {} + for data_source in data_sources: + match_key = data_source["match_key"] + source = data_source["prefix"] + data_dict = {d[match_key]: d for d in data_source["data"] if match_key in d} + for value in sorted(all_values): + if value not in consolidated_facts: + consolidated_facts[value] = {} + consolidated_facts[value][source] = data_dict.get(value, {}) + return consolidated_facts + + +def consolidate( + data_source, + fail_missing_match_key=False, + fail_missing_match_value=False, + fail_duplicate=False, +): + """keep selective keys recursively from a given data" + :param data: The data passed in (data|keep_keys(...)) + :type data: raw + :param target: List of keys on with operation is to be performed + :type data: list + :type elements: string + :param matching_parameter: matching type of the target keys with data keys + :type data: str + """ + # write code here + key_sets = check_missing_match_key_duplicate( + data_source, fail_missing_match_key, fail_duplicate + ) + key_vals = check_missing_match_values(key_sets, fail_missing_match_value) + datapr = consolidate_facts(data_source, key_vals) + return datapr From 141c175cff9a451a6abf17dd7a0060daa3087b70 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 4 Apr 2022 17:58:09 +0530 Subject: [PATCH 02/23] sanity fixex updated --- .pre-commit-config.yaml | 2 +- plugins/filter/cidr_merge.py | 3 +- plugins/filter/consolidate.py | 4 +- plugins/filter/from_xml.py | 2 +- plugins/filter/get_path.py | 2 +- plugins/filter/hwaddr.py | 7 +- plugins/filter/index_of.py | 2 +- plugins/filter/ip4_hex.py | 7 +- plugins/filter/ipaddr.py | 5 +- plugins/filter/ipmath.py | 5 +- plugins/filter/ipsubnet.py | 7 +- plugins/filter/ipv4.py | 5 +- plugins/filter/ipv6.py | 5 +- plugins/filter/ipwrap.py | 5 +- plugins/filter/macaddr.py | 7 +- plugins/filter/network_in_network.py | 5 +- plugins/filter/network_in_usable.py | 5 +- plugins/filter/next_nth_usable.py | 5 +- plugins/filter/nthhost.py | 7 +- plugins/filter/param_list_compare.py | 2 +- plugins/filter/previous_nth_usable.py | 5 +- plugins/filter/reduce_on_network.py | 5 +- plugins/filter/slaac.py | 7 +- plugins/filter/to_paths.py | 2 +- plugins/filter/to_xml.py | 2 +- plugins/filter/usable_range.py | 2 +- plugins/plugin_utils/base/cli_parser.py | 2 +- plugins/plugin_utils/base/ipaddr_utils.py | 4 +- plugins/plugin_utils/base/ipaddress_utils.py | 9 +- plugins/plugin_utils/base/utils.py | 3 +- plugins/plugin_utils/consolidate.py | 16 ++- plugins/sub_plugins/cli_parser/json_parser.py | 4 +- .../sub_plugins/cli_parser/textfsm_parser.py | 6 +- plugins/sub_plugins/cli_parser/ttp_parser.py | 6 +- plugins/sub_plugins/cli_parser/xml_parser.py | 7 +- plugins/sub_plugins/validate/jsonschema.py | 8 +- plugins/test/in_any_network.py | 2 +- plugins/test/in_network.py | 2 +- plugins/test/in_one_network.py | 2 +- plugins/test/ip.py | 4 +- plugins/test/ip_address.py | 2 +- plugins/test/ipv4.py | 2 +- plugins/test/ipv4_address.py | 2 +- plugins/test/ipv4_hostmask.py | 2 +- plugins/test/ipv4_netmask.py | 4 +- plugins/test/ipv6.py | 2 +- plugins/test/ipv6_address.py | 2 +- plugins/test/ipv6_ipv4_mapped.py | 4 +- plugins/test/ipv6_sixtofour.py | 4 +- plugins/test/ipv6_teredo.py | 4 +- plugins/test/loopback.py | 4 +- plugins/test/mac.py | 4 +- plugins/test/multicast.py | 4 +- plugins/test/private.py | 4 +- plugins/test/public.py | 4 +- plugins/test/reserved.py | 4 +- plugins/test/resolvable.py | 4 +- plugins/test/subnet_of.py | 4 +- plugins/test/supernet_of.py | 4 +- plugins/test/unspecified.py | 4 +- test-requirements.txt | 2 +- tests/unit/plugins/action/test_cli_parse.py | 100 +++++++----------- tests/unit/plugins/filter/test_ipmath.py | 2 +- .../sub_plugins/validate/test_config.py | 6 +- 64 files changed, 166 insertions(+), 202 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8a3cc65..efc83cd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: args: [--branch, main] - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 19.3b0 + rev: 22.3.0 hooks: - id: black args: [-l, "79"] diff --git a/plugins/filter/cidr_merge.py b/plugins/filter/cidr_merge.py index ef4dc74..e4bfd85 100644 --- a/plugins/filter/cidr_merge.py +++ b/plugins/filter/cidr_merge.py @@ -177,8 +177,7 @@ def cidr_merge(value, action="merge"): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index e5484bc..da301e8 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -333,7 +333,9 @@ def _consolidate(*args, **kwargs): ] data = dict(zip(keys, args[1:])) data.update(kwargs) - aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="consolidate") + aav = AnsibleArgSpecValidator( + data=data, schema=DOCUMENTATION, name="consolidate" + ) valid, errors, updated_data = aav.validate() if not valid: raise AnsibleFilterError(errors) diff --git a/plugins/filter/from_xml.py b/plugins/filter/from_xml.py index b423234..e805ac4 100644 --- a/plugins/filter/from_xml.py +++ b/plugins/filter/from_xml.py @@ -141,7 +141,7 @@ def _from_xml(*args, **kwargs): class FilterModule(object): - """ from_xml """ + """from_xml""" def filters(self): diff --git a/plugins/filter/get_path.py b/plugins/filter/get_path.py index 97a67e6..88d2715 100644 --- a/plugins/filter/get_path.py +++ b/plugins/filter/get_path.py @@ -180,7 +180,7 @@ def _get_path(*args, **kwargs): class FilterModule(object): - """ path filters """ + """path filters""" def filters(self): return {"get_path": _get_path} diff --git a/plugins/filter/hwaddr.py b/plugins/filter/hwaddr.py index e37f9cc..5a7d9c7 100644 --- a/plugins/filter/hwaddr.py +++ b/plugins/filter/hwaddr.py @@ -98,7 +98,7 @@ RETURN = """ @pass_environment def _hwaddr(*args, **kwargs): - """This filter check if string is a HW/MAC address and filter it """ + """This filter check if string is a HW/MAC address and filter it""" keys = ["value", "query", "alias"] data = dict(zip(keys, args[1:])) data.update(kwargs) @@ -112,8 +112,7 @@ def _hwaddr(*args, **kwargs): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -121,7 +120,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/index_of.py b/plugins/filter/index_of.py index 28a6a0f..874c820 100644 --- a/plugins/filter/index_of.py +++ b/plugins/filter/index_of.py @@ -338,7 +338,7 @@ def _index_of(*args, **kwargs): class FilterModule(object): - """ index_of """ + """index_of""" def filters(self): """a mapping of filter names to functions""" diff --git a/plugins/filter/ip4_hex.py b/plugins/filter/ip4_hex.py index 97a5316..093de7a 100644 --- a/plugins/filter/ip4_hex.py +++ b/plugins/filter/ip4_hex.py @@ -109,7 +109,7 @@ def _ip4_hex(*args, **kwargs): def ip4_hex(arg, delimiter=""): - """ Convert an IPv4 address to Hexadecimal notation """ + """Convert an IPv4 address to Hexadecimal notation""" numbers = list(map(int, arg.split("."))) return "{0:02x}{sep}{1:02x}{sep}{2:02x}{sep}{3:02x}".format( *numbers, sep=delimiter @@ -117,8 +117,7 @@ def ip4_hex(arg, delimiter=""): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -126,7 +125,7 @@ class FilterModule(object): } def filters(self): - """ ip4_hex filter """ + """ip4_hex filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/ipaddr.py b/plugins/filter/ipaddr.py index 178170f..9c1a490 100644 --- a/plugins/filter/ipaddr.py +++ b/plugins/filter/ipaddr.py @@ -281,8 +281,7 @@ def _ipaddr(*args, **kwargs): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -290,7 +289,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/ipmath.py b/plugins/filter/ipmath.py index a62a833..0ae81cc 100644 --- a/plugins/filter/ipmath.py +++ b/plugins/filter/ipmath.py @@ -169,8 +169,7 @@ def ipmath(value, amount): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # This filter is designed to do simple IP math/arithmetic @@ -178,7 +177,7 @@ class FilterModule(object): } def filters(self): - """ ipmath filter""" + """ipmath filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/ipsubnet.py b/plugins/filter/ipsubnet.py index 69c4170..c0b208c 100644 --- a/plugins/filter/ipsubnet.py +++ b/plugins/filter/ipsubnet.py @@ -253,7 +253,7 @@ def _ipsubnet(*args, **kwargs): def ipsubnet(value, query="", index="x"): - """ Manipulate IPv4/IPv6 subnets """ + """Manipulate IPv4/IPv6 subnets""" try: vtype = ipaddr(value, "type") @@ -323,8 +323,7 @@ def ipsubnet(value, query="", index="x"): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -332,7 +331,7 @@ class FilterModule(object): } def filters(self): - """ ipsubnet filter """ + """ipsubnet filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/ipv4.py b/plugins/filter/ipv4.py index dff3f70..e43023b 100644 --- a/plugins/filter/ipv4.py +++ b/plugins/filter/ipv4.py @@ -160,8 +160,7 @@ def ipv4(value, query=""): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -169,7 +168,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if netaddr: return self.filter_map else: diff --git a/plugins/filter/ipv6.py b/plugins/filter/ipv6.py index 3bf1156..0a7caa8 100644 --- a/plugins/filter/ipv6.py +++ b/plugins/filter/ipv6.py @@ -178,8 +178,7 @@ def ipv6(value, query=""): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -187,7 +186,7 @@ class FilterModule(object): } def filters(self): - """ ipv6 filter """ + """ipv6 filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/ipwrap.py b/plugins/filter/ipwrap.py index 53c575b..bcdf8e7 100644 --- a/plugins/filter/ipwrap.py +++ b/plugins/filter/ipwrap.py @@ -200,8 +200,7 @@ def ipwrap(value, query=""): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -209,7 +208,7 @@ class FilterModule(object): } def filters(self): - """ ipwrap filter """ + """ipwrap filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/macaddr.py b/plugins/filter/macaddr.py index a285415..7075971 100644 --- a/plugins/filter/macaddr.py +++ b/plugins/filter/macaddr.py @@ -95,7 +95,7 @@ RETURN = """ @pass_environment def _macaddr(*args, **kwargs): - """This filter check if string is a HW/MAC address and filter it """ + """This filter check if string is a HW/MAC address and filter it""" keys = ["value", "query"] data = dict(zip(keys, args[1:])) data.update(kwargs) @@ -113,8 +113,7 @@ def macaddr(value, query=""): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -122,7 +121,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/network_in_network.py b/plugins/filter/network_in_network.py index b071162..6411bc6 100644 --- a/plugins/filter/network_in_network.py +++ b/plugins/filter/network_in_network.py @@ -147,8 +147,7 @@ def network_in_network(value, test): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -156,7 +155,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/network_in_usable.py b/plugins/filter/network_in_usable.py index 8271648..21d79cc 100644 --- a/plugins/filter/network_in_usable.py +++ b/plugins/filter/network_in_usable.py @@ -149,8 +149,7 @@ def network_in_usable(value, test): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -158,7 +157,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/next_nth_usable.py b/plugins/filter/next_nth_usable.py index ce66070..3be81ab 100644 --- a/plugins/filter/next_nth_usable.py +++ b/plugins/filter/next_nth_usable.py @@ -137,8 +137,7 @@ def next_nth_usable(value, offset): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -146,7 +145,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/nthhost.py b/plugins/filter/nthhost.py index 6d13a1e..80aa0c8 100644 --- a/plugins/filter/nthhost.py +++ b/plugins/filter/nthhost.py @@ -110,7 +110,7 @@ def _nthhost(*args, **kwargs): def nthhost(value, query=""): - """ Returns the nth host within a network described by value. """ + """Returns the nth host within a network described by value.""" try: vtype = ipaddr(value, "type") if vtype == "address": @@ -137,8 +137,7 @@ def nthhost(value, query=""): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -146,7 +145,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/param_list_compare.py b/plugins/filter/param_list_compare.py index 54ae9ab..1f3f343 100644 --- a/plugins/filter/param_list_compare.py +++ b/plugins/filter/param_list_compare.py @@ -212,7 +212,7 @@ def param_list_compare(*args, **kwargs): class FilterModule(object): - """ param_list_compare """ + """param_list_compare""" def filters(self): """a mapping of filter names to functions""" diff --git a/plugins/filter/previous_nth_usable.py b/plugins/filter/previous_nth_usable.py index cc4f802..1e73377 100644 --- a/plugins/filter/previous_nth_usable.py +++ b/plugins/filter/previous_nth_usable.py @@ -136,8 +136,7 @@ def previous_nth_usable(value, offset): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -145,7 +144,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/reduce_on_network.py b/plugins/filter/reduce_on_network.py index 6cbdb9e..860d62f 100644 --- a/plugins/filter/reduce_on_network.py +++ b/plugins/filter/reduce_on_network.py @@ -137,8 +137,7 @@ def reduce_on_network(value, network): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -146,7 +145,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/slaac.py b/plugins/filter/slaac.py index 0a44c17..1a9ca7a 100644 --- a/plugins/filter/slaac.py +++ b/plugins/filter/slaac.py @@ -100,7 +100,7 @@ def _slaac(*args, **kwargs): def slaac(value, query=""): - """ Get the SLAAC address within given network """ + """Get the SLAAC address within given network""" try: vtype = ipaddr(value, "type") if vtype == "address": @@ -129,8 +129,7 @@ def slaac(value, query=""): class FilterModule(object): - """IP address and network manipulation filters - """ + """IP address and network manipulation filters""" filter_map = { # IP addresses and networks @@ -138,7 +137,7 @@ class FilterModule(object): } def filters(self): - """ ipaddr filter """ + """ipaddr filter""" if HAS_NETADDR: return self.filter_map else: diff --git a/plugins/filter/to_paths.py b/plugins/filter/to_paths.py index b343599..567b251 100644 --- a/plugins/filter/to_paths.py +++ b/plugins/filter/to_paths.py @@ -141,7 +141,7 @@ def _to_paths(*args, **kwargs): class FilterModule(object): - """ path filters """ + """path filters""" def filters(self): return {"to_paths": _to_paths} diff --git a/plugins/filter/to_xml.py b/plugins/filter/to_xml.py index 511154f..d4652ba 100644 --- a/plugins/filter/to_xml.py +++ b/plugins/filter/to_xml.py @@ -135,7 +135,7 @@ def _to_xml(*args, **kwargs): class FilterModule(object): - """ to_xml """ + """to_xml""" def filters(self): """a mapping of filter names to functions""" diff --git a/plugins/filter/usable_range.py b/plugins/filter/usable_range.py index 288b8e7..089a5c8 100644 --- a/plugins/filter/usable_range.py +++ b/plugins/filter/usable_range.py @@ -188,7 +188,7 @@ def _usable_range(ip): class FilterModule(object): - """ usable_range """ + """usable_range""" def filters(self): diff --git a/plugins/plugin_utils/base/cli_parser.py b/plugins/plugin_utils/base/cli_parser.py index 5f9ea7c..01cbdd7 100644 --- a/plugins/plugin_utils/base/cli_parser.py +++ b/plugins/plugin_utils/base/cli_parser.py @@ -7,7 +7,7 @@ __metaclass__ = type class CliParserBase: - """ The base class for cli parsers + """The base class for cli parsers Provides a _debug function to normalize parser debug output """ diff --git a/plugins/plugin_utils/base/ipaddr_utils.py b/plugins/plugin_utils/base/ipaddr_utils.py index 6c03720..da083fc 100644 --- a/plugins/plugin_utils/base/ipaddr_utils.py +++ b/plugins/plugin_utils/base/ipaddr_utils.py @@ -368,7 +368,7 @@ def _wrap_query(v, vtype, value): def ipaddr(value, query="", version=False, alias="ipaddr"): - """ Check if string is an IP address or network and filter it """ + """Check if string is an IP address or network and filter it""" query_func_extra_args = { "": ("vtype",), @@ -686,7 +686,7 @@ def _win_query(v): # ---- HWaddr / MAC address filters ---- def hwaddr(value, query="", alias="hwaddr"): - """ Check if string is a HW/MAC address and filter it """ + """Check if string is a HW/MAC address and filter it""" query_func_extra_args = {"": ("value",)} diff --git a/plugins/plugin_utils/base/ipaddress_utils.py b/plugins/plugin_utils/base/ipaddress_utils.py index cbe8624..c6c9875 100644 --- a/plugins/plugin_utils/base/ipaddress_utils.py +++ b/plugins/plugin_utils/base/ipaddress_utils.py @@ -29,8 +29,7 @@ except ImportError: def ip_network(ip): - """ PY2 compat shim, PY2 requires unicode - """ + """PY2 compat shim, PY2 requires unicode""" if not HAS_IPADDRESS: raise AnsibleError(missing_required_lib("ipaddress")) @@ -39,8 +38,7 @@ def ip_network(ip): def ip_address(ip): - """ PY2 compat shim, PY2 requires unicode - """ + """PY2 compat shim, PY2 requires unicode""" if not HAS_IPADDRESS: raise AnsibleError(missing_required_lib("ipaddress")) @@ -71,8 +69,7 @@ def _is_subnet_of(network_a, network_b): def _validate_args(plugin, doc, params): - """ argspec validator utility function - """ + """argspec validator utility function""" valid, argspec_result, updated_params = check_argspec( doc, plugin + " test", **params diff --git a/plugins/plugin_utils/base/utils.py b/plugins/plugin_utils/base/utils.py index 66f35b6..da3da57 100644 --- a/plugins/plugin_utils/base/utils.py +++ b/plugins/plugin_utils/base/utils.py @@ -18,8 +18,7 @@ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_valid def _validate_args(plugin, doc, params): - """ argspec validator utility function - """ + """argspec validator utility function""" valid, argspec_result, updated_params = check_argspec( doc, plugin + " test", **params diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py index ed18804..d4d3d7e 100644 --- a/plugins/plugin_utils/consolidate.py +++ b/plugins/plugin_utils/consolidate.py @@ -31,9 +31,13 @@ def fail_on_filter(validator_func): res, err = validator_func(*args, **kwargs) if err.get("match_key_err"): - _raise_error("fail_missing_match_key", ", ".join(err["match_key_err"])) + _raise_error( + "fail_missing_match_key", ", ".join(err["match_key_err"]) + ) if err.get("match_val_err"): - _raise_error("fail_missing_match_value", ", ".join(err["match_val_err"])) + _raise_error( + "fail_missing_match_value", ", ".join(err["match_val_err"]) + ) if err.get("duplicate_err"): _raise_error("fail_duplicate", ", ".join(err["duplicate_err"])) return res @@ -67,7 +71,9 @@ def check_missing_match_key_duplicate( continue if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate: - errors_duplicate.append(f"Duplicate values in data source {ds_idx}") + errors_duplicate.append( + f"Duplicate values in data source {ds_idx}" + ) results.append(set(ds_values)) return results, { "match_key_err": errors_match_key, @@ -94,7 +100,9 @@ def consolidate_facts(data_sources, all_values): for data_source in data_sources: match_key = data_source["match_key"] source = data_source["prefix"] - data_dict = {d[match_key]: d for d in data_source["data"] if match_key in d} + data_dict = { + d[match_key]: d for d in data_source["data"] if match_key in d + } for value in sorted(all_values): if value not in consolidated_facts: consolidated_facts[value] = {} diff --git a/plugins/sub_plugins/cli_parser/json_parser.py b/plugins/sub_plugins/cli_parser/json_parser.py index 9c095f6..f6bd618 100644 --- a/plugins/sub_plugins/cli_parser/json_parser.py +++ b/plugins/sub_plugins/cli_parser/json_parser.py @@ -44,7 +44,7 @@ from ansible_collections.ansible.utils.plugins.plugin_utils.base.cli_parser impo class CliParser(CliParserBase): - """ The json parser class + """The json parser class Convert a string containing valid json into an object """ @@ -52,7 +52,7 @@ class CliParser(CliParserBase): PROVIDE_TEMPLATE_CONTENTS = False def parse(self, *_args, **_kwargs): - """ Std entry point for a cli_parse parse execution + """Std entry point for a cli_parse parse execution :return: Errors or parsed text as structured data :rtype: dict diff --git a/plugins/sub_plugins/cli_parser/textfsm_parser.py b/plugins/sub_plugins/cli_parser/textfsm_parser.py index 3d1e19b..2e075e5 100644 --- a/plugins/sub_plugins/cli_parser/textfsm_parser.py +++ b/plugins/sub_plugins/cli_parser/textfsm_parser.py @@ -53,7 +53,7 @@ except ImportError: class CliParser(CliParserBase): - """ The textfsm parser class + """The textfsm parser class Convert raw text to structured data using textfsm """ @@ -62,7 +62,7 @@ class CliParser(CliParserBase): @staticmethod def _check_reqs(): - """ Check the prerequisites for the textfsm parser + """Check the prerequisites for the textfsm parser :return dict: A dict with errors or a template_path """ @@ -74,7 +74,7 @@ class CliParser(CliParserBase): return {"errors": errors} def parse(self, *_args, **_kwargs): - """ Std entry point for a cli_parse parse execution + """Std entry point for a cli_parse parse execution :return: Errors or parsed text as structured data :rtype: dict diff --git a/plugins/sub_plugins/cli_parser/ttp_parser.py b/plugins/sub_plugins/cli_parser/ttp_parser.py index bd8098d..f3d7960 100644 --- a/plugins/sub_plugins/cli_parser/ttp_parser.py +++ b/plugins/sub_plugins/cli_parser/ttp_parser.py @@ -53,7 +53,7 @@ except ImportError: class CliParser(CliParserBase): - """ The ttp parser class + """The ttp parser class Convert raw text to structured data using ttp """ @@ -62,7 +62,7 @@ class CliParser(CliParserBase): @staticmethod def _check_reqs(): - """ Check the prerequisites for the ttp parser + """Check the prerequisites for the ttp parser :return dict: A dict with errors or a template_path """ @@ -74,7 +74,7 @@ class CliParser(CliParserBase): return {"errors": errors} def parse(self, *_args, **_kwargs): - """ Std entry point for a cli_parse parse execution + """Std entry point for a cli_parse parse execution :return: Errors or parsed text as structured data :rtype: dict diff --git a/plugins/sub_plugins/cli_parser/xml_parser.py b/plugins/sub_plugins/cli_parser/xml_parser.py index ea2e52b..b81ed93 100644 --- a/plugins/sub_plugins/cli_parser/xml_parser.py +++ b/plugins/sub_plugins/cli_parser/xml_parser.py @@ -52,7 +52,7 @@ except ImportError: class CliParser(CliParserBase): - """ The xml parser class + """The xml parser class Convert an xml string to structured data using xmltodict """ @@ -61,8 +61,7 @@ class CliParser(CliParserBase): @staticmethod def _check_reqs(): - """ Check the prerequisites for the xml parser - """ + """Check the prerequisites for the xml parser""" errors = [] if not HAS_XMLTODICT: errors.append(missing_required_lib("xmltodict")) @@ -70,7 +69,7 @@ class CliParser(CliParserBase): return errors def parse(self, *_args, **_kwargs): - """ Std entry point for a cli_parse parse execution + """Std entry point for a cli_parse parse execution :return: Errors or parsed text as structured data :rtype: dict diff --git a/plugins/sub_plugins/validate/jsonschema.py b/plugins/sub_plugins/validate/jsonschema.py index 53e2355..d9d3a90 100644 --- a/plugins/sub_plugins/validate/jsonschema.py +++ b/plugins/sub_plugins/validate/jsonschema.py @@ -205,9 +205,11 @@ class Validate(ValidateBase): "found": validation_error.instance, } self._result["errors"].append(error) - error_message = "At '{schema_path}' {message}. ".format( - schema_path=error["schema_path"], - message=error["message"], + error_message = ( + "At '{schema_path}' {message}. ".format( + schema_path=error["schema_path"], + message=error["message"], + ) ) error_messages.append(error_message) if error_messages: diff --git a/plugins/test/in_any_network.py b/plugins/test/in_any_network.py index 213dc89..e213afe 100644 --- a/plugins/test/in_any_network.py +++ b/plugins/test/in_any_network.py @@ -104,7 +104,7 @@ def _in_any_network(ip, networks): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"in_any_network": _in_any_network} diff --git a/plugins/test/in_network.py b/plugins/test/in_network.py index 9b667b2..fd1ac19 100644 --- a/plugins/test/in_network.py +++ b/plugins/test/in_network.py @@ -120,7 +120,7 @@ def _in_network(ip, network): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"in_network": _in_network} diff --git a/plugins/test/in_one_network.py b/plugins/test/in_one_network.py index 7850252..b3c1b68 100644 --- a/plugins/test/in_one_network.py +++ b/plugins/test/in_one_network.py @@ -103,7 +103,7 @@ def _in_one_network(ip, networks): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"in_one_network": _in_one_network} diff --git a/plugins/test/ip.py b/plugins/test/ip.py index 08dcb9c..a2a5202 100644 --- a/plugins/test/ip.py +++ b/plugins/test/ip.py @@ -114,7 +114,7 @@ RETURN = """ @_need_ipaddress def _ip(ip): - """ Test if something in an IP address or network """ + """Test if something in an IP address or network""" params = {"ip": ip} _validate_args("ip", DOCUMENTATION, params) @@ -127,7 +127,7 @@ def _ip(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ip": _ip} diff --git a/plugins/test/ip_address.py b/plugins/test/ip_address.py index 6682a16..36e3969 100644 --- a/plugins/test/ip_address.py +++ b/plugins/test/ip_address.py @@ -103,7 +103,7 @@ def _ip_address(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ip_address": _ip_address} diff --git a/plugins/test/ipv4.py b/plugins/test/ipv4.py index e66c3c6..fa55853 100644 --- a/plugins/test/ipv4.py +++ b/plugins/test/ipv4.py @@ -100,7 +100,7 @@ def _ipv4(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv4": _ipv4} diff --git a/plugins/test/ipv4_address.py b/plugins/test/ipv4_address.py index bb7d0bb..7ab1bc5 100644 --- a/plugins/test/ipv4_address.py +++ b/plugins/test/ipv4_address.py @@ -102,7 +102,7 @@ def _ipv4_address(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv4_address": _ipv4_address} diff --git a/plugins/test/ipv4_hostmask.py b/plugins/test/ipv4_hostmask.py index e201203..8a86885 100644 --- a/plugins/test/ipv4_hostmask.py +++ b/plugins/test/ipv4_hostmask.py @@ -90,7 +90,7 @@ def _ipv4_hostmask(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv4_hostmask": _ipv4_hostmask} diff --git a/plugins/test/ipv4_netmask.py b/plugins/test/ipv4_netmask.py index e12d0b8..d3934a3 100644 --- a/plugins/test/ipv4_netmask.py +++ b/plugins/test/ipv4_netmask.py @@ -89,7 +89,7 @@ RETURN = """ @_need_ipaddress def _ipv4_netmask(mask): - """ Test for a valid IPv4 netmask""" + """Test for a valid IPv4 netmask""" params = {"mask": mask} _validate_args("ipv4_netmask", DOCUMENTATION, params) @@ -102,7 +102,7 @@ def _ipv4_netmask(mask): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv4_netmask": _ipv4_netmask} diff --git a/plugins/test/ipv6.py b/plugins/test/ipv6.py index a3299df..ea2204d 100644 --- a/plugins/test/ipv6.py +++ b/plugins/test/ipv6.py @@ -102,7 +102,7 @@ def _ipv6(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv6": _ipv6} diff --git a/plugins/test/ipv6_address.py b/plugins/test/ipv6_address.py index ae8a3a7..fa1aa88 100644 --- a/plugins/test/ipv6_address.py +++ b/plugins/test/ipv6_address.py @@ -102,7 +102,7 @@ def _ipv6_address(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv6_address": _ipv6_address} diff --git a/plugins/test/ipv6_ipv4_mapped.py b/plugins/test/ipv6_ipv4_mapped.py index 18c4f10..30e3af5 100644 --- a/plugins/test/ipv6_ipv4_mapped.py +++ b/plugins/test/ipv6_ipv4_mapped.py @@ -88,7 +88,7 @@ RETURN = """ @_need_ipaddress def _ipv6_ipv4_mapped(ip): - """ Test if something appears to be a mapped IPv6 to IPv4 mapped address """ + """Test if something appears to be a mapped IPv6 to IPv4 mapped address""" params = {"ip": ip} _validate_args("ipv6_ipv4_mapped", DOCUMENTATION, params) @@ -102,7 +102,7 @@ def _ipv6_ipv4_mapped(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv6_ipv4_mapped": _ipv6_ipv4_mapped} diff --git a/plugins/test/ipv6_sixtofour.py b/plugins/test/ipv6_sixtofour.py index f0eda79..ef5b752 100644 --- a/plugins/test/ipv6_sixtofour.py +++ b/plugins/test/ipv6_sixtofour.py @@ -90,7 +90,7 @@ RETURN = """ @_need_ipaddress def _ipv6_sixtofour(ip): - """ Test if something appears to be a 6to4 address """ + """Test if something appears to be a 6to4 address""" params = {"ip": ip} _validate_args("ipv6_sixtofour", DOCUMENTATION, params) @@ -104,7 +104,7 @@ def _ipv6_sixtofour(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv6_sixtofour": _ipv6_sixtofour} diff --git a/plugins/test/ipv6_teredo.py b/plugins/test/ipv6_teredo.py index 32bc5d5..1d8bcc9 100644 --- a/plugins/test/ipv6_teredo.py +++ b/plugins/test/ipv6_teredo.py @@ -90,7 +90,7 @@ RETURN = """ @_need_ipaddress def _ipv6_teredo(ip): - """ Test if something is an IPv6 teredo address """ + """Test if something is an IPv6 teredo address""" params = {"ip": ip} _validate_args("ipv6_teredo", DOCUMENTATION, params) @@ -104,7 +104,7 @@ def _ipv6_teredo(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"ipv6_teredo": _ipv6_teredo} diff --git a/plugins/test/loopback.py b/plugins/test/loopback.py index ccd193c..2615998 100644 --- a/plugins/test/loopback.py +++ b/plugins/test/loopback.py @@ -85,7 +85,7 @@ RETURN = """ @_need_ipaddress def _loopback(ip): - """ Test if an IP address is a loopback """ + """Test if an IP address is a loopback""" params = {"ip": ip} _validate_args("loopback", DOCUMENTATION, params) @@ -97,7 +97,7 @@ def _loopback(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"loopback": _loopback} diff --git a/plugins/test/mac.py b/plugins/test/mac.py index f39e935..cb258e6 100644 --- a/plugins/test/mac.py +++ b/plugins/test/mac.py @@ -109,7 +109,7 @@ RETURN = """ def _mac(mac): - """ Test if something appears to be a valid mac address """ + """Test if something appears to be a valid mac address""" params = {"mac": mac} _validate_args("mac", DOCUMENTATION, params) @@ -125,7 +125,7 @@ def _mac(mac): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"mac": _mac} diff --git a/plugins/test/multicast.py b/plugins/test/multicast.py index e7ce40b..bd204a1 100644 --- a/plugins/test/multicast.py +++ b/plugins/test/multicast.py @@ -100,7 +100,7 @@ RETURN = """ @_need_ipaddress def _multicast(ip): - """ Test for a multicast IP address """ + """Test for a multicast IP address""" params = {"ip": ip} _validate_args("multicast", DOCUMENTATION, params) @@ -112,7 +112,7 @@ def _multicast(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"multicast": _multicast} diff --git a/plugins/test/private.py b/plugins/test/private.py index 4c080ce..915bb23 100644 --- a/plugins/test/private.py +++ b/plugins/test/private.py @@ -76,7 +76,7 @@ RETURN = """ @_need_ipaddress def _private(ip): - """ Test if an IP address is private """ + """Test if an IP address is private""" params = {"ip": ip} _validate_args("private", DOCUMENTATION, params) @@ -88,7 +88,7 @@ def _private(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"private": _private} diff --git a/plugins/test/public.py b/plugins/test/public.py index a0973db..abd4048 100644 --- a/plugins/test/public.py +++ b/plugins/test/public.py @@ -74,7 +74,7 @@ RETURN = """ @_need_ipaddress def _public(ip): - """ Test if an IP address is public """ + """Test if an IP address is public""" params = {"ip": ip} _validate_args("public", DOCUMENTATION, params) @@ -86,7 +86,7 @@ def _public(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"public": _public} diff --git a/plugins/test/reserved.py b/plugins/test/reserved.py index 62679b7..52ddcf1 100644 --- a/plugins/test/reserved.py +++ b/plugins/test/reserved.py @@ -75,7 +75,7 @@ RETURN = """ @_need_ipaddress def _reserved(ip): - """ Test for a reserved IP address """ + """Test for a reserved IP address""" params = {"ip": ip} _validate_args("reserved", DOCUMENTATION, params) @@ -87,7 +87,7 @@ def _reserved(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"reserved": _reserved} diff --git a/plugins/test/resolvable.py b/plugins/test/resolvable.py index 9588106..a7b5eaf 100644 --- a/plugins/test/resolvable.py +++ b/plugins/test/resolvable.py @@ -123,7 +123,7 @@ RETURN = """ @_need_ipaddress def _resolvable(host): - """ Test if an IP or name can be resolved via /etc/hosts or DNS """ + """Test if an IP or name can be resolved via /etc/hosts or DNS""" params = {"host": host} _validate_args("resolvable", DOCUMENTATION, params) @@ -148,7 +148,7 @@ def _resolvable(host): class TestModule(object): - """ network jinja tests """ + """network jinja tests""" test_map = {"resolvable": _resolvable} diff --git a/plugins/test/subnet_of.py b/plugins/test/subnet_of.py index 3b08746..0deeb13 100644 --- a/plugins/test/subnet_of.py +++ b/plugins/test/subnet_of.py @@ -80,7 +80,7 @@ RETURN = """ @_need_ipaddress def _subnet_of(network_a, network_b): - """ Test if a network is a subnet of another network """ + """Test if a network is a subnet of another network""" params = {"network_a": network_a, "network_b": network_b} _validate_args("subnet_of", DOCUMENTATION, params) @@ -92,7 +92,7 @@ def _subnet_of(network_a, network_b): class TestModule(object): - """ network jinja test """ + """network jinja test""" test_map = {"subnet_of": _subnet_of} diff --git a/plugins/test/supernet_of.py b/plugins/test/supernet_of.py index 9065a61..3318904 100644 --- a/plugins/test/supernet_of.py +++ b/plugins/test/supernet_of.py @@ -82,7 +82,7 @@ RETURN = """ @_need_ipaddress def _supernet_of(network_a, network_b): - """ Test if an network is a supernet of another network """ + """Test if an network is a supernet of another network""" params = {"network_a": network_a, "network_b": network_b} _validate_args("supernet_of", DOCUMENTATION, params) @@ -94,7 +94,7 @@ def _supernet_of(network_a, network_b): class TestModule(object): - """ network jinja test """ + """network jinja test""" test_map = {"supernet_of": _supernet_of} diff --git a/plugins/test/unspecified.py b/plugins/test/unspecified.py index 6e79e9e..f771679 100644 --- a/plugins/test/unspecified.py +++ b/plugins/test/unspecified.py @@ -101,7 +101,7 @@ RETURN = """ @_need_ipaddress def _unspecified(ip): - """ Test for an unspecified IP address """ + """Test for an unspecified IP address""" params = {"ip": ip} _validate_args("unspecified", DOCUMENTATION, params) @@ -113,7 +113,7 @@ def _unspecified(ip): class TestModule(object): - """ network jinja test""" + """network jinja test""" test_map = {"unspecified": _unspecified} diff --git a/test-requirements.txt b/test-requirements.txt index 8958f9f..f97d3d1 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,4 +1,4 @@ -black==19.3b0 ; python_version > '3.5' +black==22.3.0 ; python_version > '3.5' coverage==4.5.4 flake8 ipaddress ; python_version < '3.0' diff --git a/tests/unit/plugins/action/test_cli_parse.py b/tests/unit/plugins/action/test_cli_parse.py index 901033e..7573bb7 100644 --- a/tests/unit/plugins/action/test_cli_parse.py +++ b/tests/unit/plugins/action/test_cli_parse.py @@ -58,7 +58,7 @@ class TestCli_Parse(unittest.TestCase): @staticmethod def _load_fixture(filename): - """ Load a fixture from the filesystem + """Load a fixture from the filesystem :param filename: The name of the file to load :type filename: str @@ -72,23 +72,20 @@ class TestCli_Parse(unittest.TestCase): return fhand.read() def test_fn_debug(self): - """ Confirm debug doesn't fail and return None - """ + """Confirm debug doesn't fail and return None""" msg = "some message" result = self._plugin._debug(msg) self.assertEqual(result, None) def test_fn_ail_json(self): - """ Confirm fail json replaces basic.py in msg - """ + """Confirm fail json replaces basic.py in msg""" msg = "text (basic.py)" with self.assertRaises(Exception) as error: self._plugin._fail_json(msg) self.assertEqual("text cli_parse", str(error.exception)) def test_fn_check_argspec_pass(self): - """ Confirm a valid argspec passes - """ + """Confirm a valid argspec passes""" kwargs = { "text": "text", "parser": { @@ -102,8 +99,7 @@ class TestCli_Parse(unittest.TestCase): self.assertEqual(valid, True) def test_fn_check_argspec_fail_no_test_or_command(self): - """ Confirm failed argpsec w/o text or command - """ + """Confirm failed argpsec w/o text or command""" kwargs = { "parser": { "name": "ansible.utils.textfsm", @@ -122,8 +118,7 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_check_argspec_fail_no_parser_name(self): - """ Confirm failed argspec no parser name - """ + """Confirm failed argspec no parser name""" kwargs = {"text": "anything", "parser": {"command": "show version"}} valid, result, updated_params = check_argspec( DOCUMENTATION, @@ -137,8 +132,7 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_extended_check_argspec_parser_name_not_coll(self): - """ Confirm failed argpsec parser not collection format - """ + """Confirm failed argpsec parser not collection format""" self._plugin._task.args = { "text": "anything", "parser": { @@ -151,7 +145,7 @@ class TestCli_Parse(unittest.TestCase): self.assertIn("including collection", self._plugin._result["msg"]) def test_fn_extended_check_argspec_missing_tpath_or_command(self): - """ Confirm failed argpsec missing template_path + """Confirm failed argpsec missing template_path or command when text provided """ self._plugin._task.args = { @@ -165,8 +159,7 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_load_parser_pass(self): - """ Confirm each each of the parsers loads from the filesystem - """ + """Confirm each each of the parsers loads from the filesystem""" parser_names = ["json", "textfsm", "ttp", "xml"] for parser_name in parser_names: self._plugin._task.args = { @@ -179,8 +172,7 @@ class TestCli_Parse(unittest.TestCase): self.assertTrue(callable(parser.parse)) def test_fn_load_parser_fail(self): - """ Confirm missing parser fails gracefully - """ + """Confirm missing parser fails gracefully""" self._plugin._task.args = { "text": "anything", "parser": {"name": "a.b.c"}, @@ -191,7 +183,7 @@ class TestCli_Parse(unittest.TestCase): self.assertIn("No module named", self._plugin._result["msg"]) def test_fn_set_parser_command_missing(self): - """ Confirm parser/command is set if missing + """Confirm parser/command is set if missing and command provided """ self._plugin._task.args = { @@ -204,8 +196,7 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_set_parser_command_present(self): - """ Confirm parser/command is not changed if provided - """ + """Confirm parser/command is not changed if provided""" self._plugin._task.args = { "command": "anything", "parser": {"command": "something", "name": "a.b.c"}, @@ -216,15 +207,13 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_set_parser_command_absent(self): - """ Confirm parser/command is not added - """ + """Confirm parser/command is not added""" self._plugin._task.args = {"parser": {}} self._plugin._set_parser_command() self.assertNotIn("command", self._plugin._task.args["parser"]) def test_fn_set_text_present(self): - """ Check task args text is set to stdout - """ + """Check task args text is set to stdout""" expected = "output" self._plugin._result["stdout"] = expected self._plugin._task.args = {} @@ -232,16 +221,14 @@ class TestCli_Parse(unittest.TestCase): self.assertEqual(self._plugin._task.args["text"], expected) def test_fn_set_text_absent(self): - """ Check task args text is set to stdout - """ + """Check task args text is set to stdout""" self._plugin._result["stdout"] = None self._plugin._task.args = {} self._plugin._set_text() self.assertNotIn("text", self._plugin._task.args) def test_fn_os_from_task_vars(self): - """ Confirm os is set based on task vars - """ + """Confirm os is set based on task vars""" checks = [ ("ansible_network_os", "cisco.nxos.nxos", "nxos"), ("ansible_network_os", "NXOS", "nxos"), @@ -254,7 +241,7 @@ class TestCli_Parse(unittest.TestCase): self.assertEqual(result, check[2]) def test_fn_update_template_path_not_exist(self): - """ Check the creation of the template_path if + """Check the creation of the template_path if it doesn't exist in the user provided data """ self._plugin._task.args = { @@ -269,7 +256,7 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_update_template_path_not_exist_os(self): - """ Check the creation of the template_path if + """Check the creation of the template_path if it doesn't exist in the user provided data name based on os provided in task """ @@ -284,7 +271,7 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_update_template_path_mock_find_needle(self): - """ Check the creation of the template_path + """Check the creation of the template_path mock the find needle fn so the template doesn't need to be in the default template folder """ @@ -302,8 +289,7 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_get_template_contents_pass(self): - """ Check the retrieval of the template contents - """ + """Check the retrieval of the template contents""" temp = tempfile.NamedTemporaryFile() contents = "abcdef" with open(temp.name, "w") as fileh: @@ -314,8 +300,7 @@ class TestCli_Parse(unittest.TestCase): self.assertEqual(result, contents) def test_fn_get_template_contents_missing(self): - """ Check the retrieval of the template contents - """ + """Check the retrieval of the template contents""" self._plugin._task.args = {"parser": {"template_path": "non-exist"}} with self.assertRaises(Exception) as error: self._plugin._get_template_contents() @@ -324,15 +309,13 @@ class TestCli_Parse(unittest.TestCase): ) def test_fn_get_template_contents_not_specified(self): - """ Check the none when template_path not specified - """ + """Check the none when template_path not specified""" self._plugin._task.args = {"parser": {}} result = self._plugin._get_template_contents() self.assertIsNone(result) def test_fn_prune_result_pass(self): - """ Test the removal of stdout and stdout_lines from the _result - """ + """Test the removal of stdout and stdout_lines from the _result""" self._plugin._result["stdout"] = "abc" self._plugin._result["stdout_lines"] = "abc" self._plugin._prune_result() @@ -340,15 +323,13 @@ class TestCli_Parse(unittest.TestCase): self.assertNotIn("stdout_lines", self._plugin._result) def test_fn_prune_result_not_exist(self): - """ Test the removal of stdout and stdout_lines from the _result - """ + """Test the removal of stdout and stdout_lines from the _result""" self._plugin._prune_result() self.assertNotIn("stdout", self._plugin._result) self.assertNotIn("stdout_lines", self._plugin._result) def test_fn_run_command_lx_rc0(self): - """ Check run command for non network - """ + """Check run command for non network""" response = "abc" self._plugin._connection.socket_path = None self._plugin._low_level_execute_command = MagicMock() @@ -363,8 +344,7 @@ class TestCli_Parse(unittest.TestCase): self.assertEqual(self._plugin._result["stdout_lines"], response) def test_fn_run_command_lx_rc1(self): - """ Check run command for non network - """ + """Check run command for non network""" response = "abc" self._plugin._connection.socket_path = None self._plugin._low_level_execute_command = MagicMock() @@ -381,8 +361,7 @@ class TestCli_Parse(unittest.TestCase): @patch("ansible.module_utils.connection.Connection.__rpc__") def test_fn_run_command_network(self, mock_rpc): - """ Check run command for network - """ + """Check run command for network""" expected = "abc" mock_rpc.return_value = expected self._plugin._connection.socket_path = ( @@ -394,16 +373,14 @@ class TestCli_Parse(unittest.TestCase): self.assertEqual(self._plugin._result["stdout_lines"], [expected]) def test_fn_run_command_not_specified(self): - """ Check run command for network - """ + """Check run command for network""" self._plugin._task.args = {"command": None} result = self._plugin._run_command() self.assertIsNone(result) @patch("ansible.module_utils.connection.Connection.__rpc__") def test_fn_run_pass_w_fact(self, mock_rpc): - """ Check full module run with valid params - """ + """Check full module run with valid params""" mock_out = self._load_fixture("nxos_show_version.txt") mock_rpc.return_value = mock_out self._plugin._connection.socket_path = ( @@ -431,8 +408,7 @@ class TestCli_Parse(unittest.TestCase): @patch("ansible.module_utils.connection.Connection.__rpc__") def test_fn_run_pass_wo_fact(self, mock_rpc): - """ Check full module run with valid params - """ + """Check full module run with valid params""" mock_out = self._load_fixture("nxos_show_version.txt") mock_rpc.return_value = mock_out self._plugin._connection.socket_path = ( @@ -456,8 +432,7 @@ class TestCli_Parse(unittest.TestCase): self.assertNotIn("ansible_facts", result) def test_fn_run_fail_argspec(self): - """ Check full module run with invalid params - """ + """Check full module run with invalid params""" self._plugin._task.args = { "text": "anything", "parser": { @@ -470,8 +445,7 @@ class TestCli_Parse(unittest.TestCase): self.assertIn("including collection", self._plugin._result["msg"]) def test_fn_run_fail_command(self): - """ Confirm clean fail with rc 1 - """ + """Confirm clean fail with rc 1""" self._plugin._connection.socket_path = None self._plugin._low_level_execute_command = MagicMock() self._plugin._low_level_execute_command.return_value = { @@ -495,8 +469,7 @@ class TestCli_Parse(unittest.TestCase): self.assertEqual(result, expected) def test_fn_run_fail_missing_parser(self): - """Confirm clean fail with missing parser - """ + """Confirm clean fail with missing parser""" self._plugin._task.args = {"text": None, "parser": {"name": "a.b.c"}} task_vars = {"inventory_hostname": "mockdevice"} result = self._plugin.run(task_vars=task_vars) @@ -505,7 +478,7 @@ class TestCli_Parse(unittest.TestCase): @patch("ansible.module_utils.connection.Connection.__rpc__") def test_fn_run_pass_missing_parser_constants(self, mock_rpc): - """ Check full module run using parser w/o + """Check full module run using parser w/o DEFAULT_TEMPLATE_EXTENSION or PROVIDE_TEMPLATE_CONTENTS defined in the parser """ @@ -542,7 +515,7 @@ class TestCli_Parse(unittest.TestCase): @patch("ansible.module_utils.connection.Connection.__rpc__") def test_fn_run_pass_missing_parser_in_parser(self, mock_rpc): - """ Check full module run using parser w/o + """Check full module run using parser w/o a parser function defined in the parser defined in the parser """ @@ -577,8 +550,7 @@ class TestCli_Parse(unittest.TestCase): @patch("ansible.module_utils.connection.Connection.__rpc__") def test_fn_run_net_device_error(self, mock_rpc): - """ Check full module run mock error from network device - """ + """Check full module run mock error from network device""" msg = "I was mocked" mock_rpc.side_effect = AnsibleConnectionError(msg) self._plugin._connection.socket_path = ( diff --git a/tests/unit/plugins/filter/test_ipmath.py b/tests/unit/plugins/filter/test_ipmath.py index a6248f6..80f6183 100644 --- a/tests/unit/plugins/filter/test_ipmath.py +++ b/tests/unit/plugins/filter/test_ipmath.py @@ -28,7 +28,7 @@ class TestIpAddr(unittest.TestCase): self.assertEqual(result, "192.168.1.10") def test_find_previous_fifth_address(self): - """Get the previous fifth address """ + """Get the previous fifth address""" args = ["", "192.168.1.5", -10] result = _ipmath(*args) diff --git a/tests/unit/plugins/sub_plugins/validate/test_config.py b/tests/unit/plugins/sub_plugins/validate/test_config.py index b524457..c4ca62e 100644 --- a/tests/unit/plugins/sub_plugins/validate/test_config.py +++ b/tests/unit/plugins/sub_plugins/validate/test_config.py @@ -71,8 +71,10 @@ def test_invalid_action(validator, test_rule): except AnsibleError as exc: error = to_text(exc) - expected_error = 'Action in criteria {item} is not one of "warn" or "fail"'.format( - item=original + expected_error = ( + 'Action in criteria {item} is not one of "warn" or "fail"'.format( + item=original + ) ) assert error == expected_error From f337f82d5997b06ba804d55387528cd9098fa86c Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 4 Apr 2022 20:02:43 +0530 Subject: [PATCH 03/23] docs and examples --- README.md | 5 +- docs/ansible.utils.consolidate_filter.rst | 1344 +++++++++++++++++++++ plugins/filter/consolidate.py | 1251 ++++++++++++++++--- 3 files changed, 2418 insertions(+), 182 deletions(-) create mode 100644 docs/ansible.utils.consolidate_filter.rst diff --git a/README.md b/README.md index 0c1a552..a4b529c 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ The Ansible ``ansible.utils`` collection includes a variety of plugins that aid This collection has been tested against following Ansible versions: **>=2.9.10**. -For collections that support Ansible 2.9, please ensure you update your `network_os` to use the -fully qualified collection name (for example, `cisco.ios.ios`). +For collections that support Ansible 2.9, please ensure you update your `network_os` to use the +fully qualified collection name (for example, `cisco.ios.ios`). Plugins and modules within a collection may be tested with only specific Ansible versions. A collection may contain metadata that identifies these versions. PEP440 is the schema used to describe the versions of Ansible. @@ -24,6 +24,7 @@ PEP440 is the schema used to describe the versions of Ansible. Name | Description --- | --- [ansible.utils.cidr_merge](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.cidr_merge_filter.rst)|This filter can be used to merge subnets or individual addresses. +[ansible.utils.consolidate](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.consolidate_filter.rst)|Consolidate facts together on common attributes. [ansible.utils.from_xml](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.from_xml_filter.rst)|Convert given XML string to native python dictionary. [ansible.utils.get_path](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.get_path_filter.rst)|Retrieve the value in a variable using a path [ansible.utils.hwaddr](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.hwaddr_filter.rst)|HWaddr / MAC address filters diff --git a/docs/ansible.utils.consolidate_filter.rst b/docs/ansible.utils.consolidate_filter.rst new file mode 100644 index 0000000..cba46fd --- /dev/null +++ b/docs/ansible.utils.consolidate_filter.rst @@ -0,0 +1,1344 @@ +.. _ansible.utils.consolidate_filter: + + +************************* +ansible.utils.consolidate +************************* + +**Consolidate facts together on common attributes.** + + +Version added: 2.5.2 + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- This plugin presents a collective structured data including all supplied facts grouping on common attributes mentioned. +- All other boolean parameter defaults to False unless parameters is explicitly mentioned. +- Using the parameters below- ``data_source|ansible.utils.consolidate(fail_missing_match_key=False``)) + + + + +Parameters +---------- + +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ParameterChoices/DefaultsConfigurationComments
+
+ data_source + +
+ list + / elements=dictionary +
+
+ + +
This option represents a list of dictionaries to perform the operation on.
+
For example facts_source|ansible.utils.consolidate(fail_missing_match_key=False)), in this case facts_source represents this option.
+
+
+ data + +
+ raw +
+
+ + +
Specify facts data that gets consolidated.
+
+
+ match_key + +
+ string +
+
+ + +
Specify key to match on.
+
+
+ prefix + +
+ string +
+
+ + +
Specify the prefix with which the result set be created.
+
+
+ fail_duplicate + +
+ boolean +
+
+
    Choices: +
  • no
  • +
  • yes
  • +
+
+ +
Fail if duplicate values for any key is found.
+
+
+ fail_missing_match_key + +
+ boolean +
+
+
    Choices: +
  • no
  • +
  • yes
  • +
+
+ +
Fail if match_key is not found in a specific data set.
+
+
+ fail_missing_match_value + +
+ boolean +
+
+
    Choices: +
  • no
  • +
  • yes
  • +
+
+ +
Fail if a keys to match in not same accross all data sets.
+
+
+ + + + +Examples +-------- + +.. code-block:: yaml + + # Consolidated facts example + # ------------ + + ##facts.yml + interfaces: + - name: GigabitEthernet0/0 + enabled: true + duplex: auto + speed: auto + note: + - Connected green wire + - name: GigabitEthernet0/1 + description: Configured by Ansible - Interface 1 + mtu: 1500 + speed: auto + duplex: auto + enabled: true + note: + - Connected blue wire + - Configured by Paul + vifs: + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: GigabitEthernet0/2 + description: Configured by Ansible - Interface 2 (ADMIN DOWN) + mtu: 600 + enabled: false + l2_interfaces: + - name: GigabitEthernet0/0 + - mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + - mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + encapsulation: dot1q + l3_interfaces: + - ipv4: + - address: 192.168.0.2/24 + name: GigabitEthernet0/0 + - name: GigabitEthernet0/1 + - name: GigabitEthernet0/2 + - name: Loopback888 + - name: Loopback999 + + ##Playbook + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces + + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" + + ##Output + # ok: [localhost] => { + # "ansible_facts": { + # "data_source": [ + # { + # "data": [ + # { + # "duplex": "auto", + # "enabled": true, + # "name": "GigabitEthernet0/0", + # "note": [ + # "Connected green wire" + # ], + # "speed": "auto" + # }, + # { + # "description": "Configured by Ansible - Interface 1", + # "duplex": "auto", + # "enabled": true, + # "mtu": 1500, + # "name": "GigabitEthernet0/1", + # "note": [ + # "Connected blue wire", + # "Configured by Paul" + # ], + # "speed": "auto", + # "vifs": [ + # { + # "comment": "Needs reconfiguration", + # "description": "Eth1 - VIF 100", + # "enabled": true, + # "mtu": 400, + # "vlan_id": 100 + # }, + # { + # "description": "Eth1 - VIF 101", + # "enabled": true, + # "vlan_id": 101 + # } + # ] + # }, + # { + # "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + # "enabled": false, + # "mtu": 600, + # "name": "GigabitEthernet0/2" + # } + # ], + # "match_key": "name", + # "prefix": "interfaces" + # }, + # { + # "data": [ + # { + # "name": "GigabitEthernet0/0" + # }, + # { + # "mode": "access", + # "name": "GigabitEthernet0/1", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ] + # } + # }, + # { + # "mode": "trunk", + # "name": "GigabitEthernet0/2", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ], + # "encapsulation": "dot1q" + # } + # } + # ], + # "match_key": "name", + # "prefix": "l2_interfaces" + # }, + # { + # "data": [ + # { + # "ipv4": [ + # { + # "address": "192.168.0.2/24" + # } + # ], + # "name": "GigabitEthernet0/0" + # }, + # { + # "name": "GigabitEthernet0/1" + # }, + # { + # "name": "GigabitEthernet0/2" + # }, + # { + # "name": "Loopback888" + # }, + # { + # "name": "Loopback999" + # } + # ], + # "match_key": "name", + # "prefix": "l3_interfaces" + # } + # ] + # }, + # "changed": false + # } + # Read vars_file 'facts.yml' + + # TASK [Combine all the facts based on match_keys] **************************************************************************************************************** + # ok: [localhost] => { + # "ansible_facts": { + # "combined": { + # "GigabitEthernet0/0": { + # "interfaces": { + # "duplex": "auto", + # "enabled": true, + # "name": "GigabitEthernet0/0", + # "note": [ + # "Connected green wire" + # ], + # "speed": "auto" + # }, + # "l2_interfaces": { + # "name": "GigabitEthernet0/0" + # }, + # "l3_interfaces": { + # "ipv4": [ + # { + # "address": "192.168.0.2/24" + # } + # ], + # "name": "GigabitEthernet0/0" + # } + # }, + # "GigabitEthernet0/1": { + # "interfaces": { + # "description": "Configured by Ansible - Interface 1", + # "duplex": "auto", + # "enabled": true, + # "mtu": 1500, + # "name": "GigabitEthernet0/1", + # "note": [ + # "Connected blue wire", + # "Configured by Paul" + # ], + # "speed": "auto", + # "vifs": [ + # { + # "comment": "Needs reconfiguration", + # "description": "Eth1 - VIF 100", + # "enabled": true, + # "mtu": 400, + # "vlan_id": 100 + # }, + # { + # "description": "Eth1 - VIF 101", + # "enabled": true, + # "vlan_id": 101 + # } + # ] + # }, + # "l2_interfaces": { + # "mode": "access", + # "name": "GigabitEthernet0/1", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ] + # } + # }, + # "l3_interfaces": { + # "name": "GigabitEthernet0/1" + # } + # }, + # "GigabitEthernet0/2": { + # "interfaces": { + # "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + # "enabled": false, + # "mtu": 600, + # "name": "GigabitEthernet0/2" + # }, + # "l2_interfaces": { + # "mode": "trunk", + # "name": "GigabitEthernet0/2", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ], + # "encapsulation": "dot1q" + # } + # }, + # "l3_interfaces": { + # "name": "GigabitEthernet0/2" + # } + # }, + # "Loopback888": { + # "interfaces": {}, + # "l2_interfaces": {}, + # "l3_interfaces": { + # "name": "Loopback888" + # } + # }, + # "Loopback999": { + # "interfaces": {}, + # "l2_interfaces": {}, + # "l3_interfaces": { + # "name": "Loopback999" + # } + # } + # } + # }, + # "changed": false + # } + + # Failing on missing match values + # ------------------------------- + + ##facts.yaml + interfaces: + - name: GigabitEthernet0/0 + enabled: true + duplex: auto + speed: auto + note: + - Connected green wire + - name: GigabitEthernet0/1 + description: Configured by Ansible - Interface 1 + mtu: 1500 + speed: auto + duplex: auto + enabled: true + note: + - Connected blue wire + - Configured by Paul + vifs: + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: GigabitEthernet0/2 + description: Configured by Ansible - Interface 2 (ADMIN DOWN) + mtu: 600 + enabled: false + l2_interfaces: + - name: GigabitEthernet0/0 + - mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + - mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + encapsulation: dot1q + l3_interfaces: + - ipv4: + - address: 192.168.0.2/24 + name: GigabitEthernet0/0 + - name: GigabitEthernet0/1 + - name: GigabitEthernet0/2 + - name: Loopback888 + - name: Loopback999 + + ##Playbook + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces + + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=True) }}" + + ##Output + ok: [localhost] => { + "ansible_facts": { + "data_source": [ + { + "data": [ + { + "duplex": "auto", + "enabled": true, + "name": "GigabitEthernet0/0", + "note": [ + "Connected green wire" + ], + "speed": "auto" + }, + { + "description": "Configured by Ansible - Interface 1", + "duplex": "auto", + "enabled": true, + "mtu": 1500, + "name": "GigabitEthernet0/1", + "note": [ + "Connected blue wire", + "Configured by Paul" + ], + "speed": "auto", + "vifs": [ + { + "comment": "Needs reconfiguration", + "description": "Eth1 - VIF 100", + "enabled": true, + "mtu": 400, + "vlan_id": 100 + }, + { + "description": "Eth1 - VIF 101", + "enabled": true, + "vlan_id": 101 + } + ] + }, + { + "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + "enabled": false, + "mtu": 600, + "name": "GigabitEthernet0/2" + } + ], + "match_key": "name", + "prefix": "interfaces" + }, + { + "data": [ + { + "name": "GigabitEthernet0/0" + }, + { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993" + ] + } + }, + { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993" + ], + "encapsulation": "dot1q" + } + } + ], + "match_key": "name", + "prefix": "l2_interfaces" + }, + { + "data": [ + { + "ipv4": [ + { + "address": "192.168.0.2/24" + } + ], + "name": "GigabitEthernet0/0" + }, + { + "name": "GigabitEthernet0/1" + }, + { + "name": "GigabitEthernet0/2" + }, + { + "name": "Loopback888" + }, + { + "name": "Loopback999" + } + ], + "match_key": "name", + "prefix": "l3_interfaces" + } + ] + }, + "changed": false + } + Read vars_file 'facts.yml' + + TASK [Combine all the facts based on match_keys] **************************************************************************************************************** + fatal: [localhost]: FAILED! => { + "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' reported Missing match value Loopback999, Loopback888 in data source 0, Missing match value Loopback999, Loopback888 in data source 1" + } + + # Failing on missing match keys + # ----------------------------- + + ##facts.yaml + interfaces: + - name: GigabitEthernet0/0 + enabled: true + duplex: auto + speed: auto + note: + - Connected green wire + - name: GigabitEthernet0/1 + description: Configured by Ansible - Interface 1 + mtu: 1500 + speed: auto + duplex: auto + enabled: true + note: + - Connected blue wire + - Configured by Paul + vifs: + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: GigabitEthernet0/2 + description: Configured by Ansible - Interface 2 (ADMIN DOWN) + mtu: 600 + enabled: false + l2_interfaces: + - name: GigabitEthernet0/0 + - mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + - mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + encapsulation: dot1q + l3_interfaces: + - ipv4: + - address: 192.168.0.2/24 + inft_name: GigabitEthernet0/0 + - inft_name: GigabitEthernet0/1 + - inft_name: GigabitEthernet0/2 + - inft_name: Loopback888 + - inft_name: Loopback999 + + ##Playbook + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces + + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_key=True) }}" + + ##Output + # ok: [localhost] => { + # "ansible_facts": { + # "data_source": [ + # { + # "data": [ + # { + # "duplex": "auto", + # "enabled": true, + # "name": "GigabitEthernet0/0", + # "note": [ + # "Connected green wire" + # ], + # "speed": "auto" + # }, + # { + # "description": "Configured by Ansible - Interface 1", + # "duplex": "auto", + # "enabled": true, + # "mtu": 1500, + # "name": "GigabitEthernet0/1", + # "note": [ + # "Connected blue wire", + # "Configured by Paul" + # ], + # "speed": "auto", + # "vifs": [ + # { + # "comment": "Needs reconfiguration", + # "description": "Eth1 - VIF 100", + # "enabled": true, + # "mtu": 400, + # "vlan_id": 100 + # }, + # { + # "description": "Eth1 - VIF 101", + # "enabled": true, + # "vlan_id": 101 + # } + # ] + # }, + # { + # "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + # "enabled": false, + # "mtu": 600, + # "name": "GigabitEthernet0/2" + # } + # ], + # "match_key": "name", + # "prefix": "interfaces" + # }, + # { + # "data": [ + # { + # "name": "GigabitEthernet0/0" + # }, + # { + # "mode": "access", + # "name": "GigabitEthernet0/1", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ] + # } + # }, + # { + # "mode": "trunk", + # "name": "GigabitEthernet0/2", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ], + # "encapsulation": "dot1q" + # } + # } + # ], + # "match_key": "name", + # "prefix": "l2_interfaces" + # }, + # { + # "data": [ + # { + # "inft_name": "GigabitEthernet0/0", + # "ipv4": [ + # { + # "address": "192.168.0.2/24" + # } + # ] + # }, + # { + # "inft_name": "GigabitEthernet0/1" + # }, + # { + # "inft_name": "GigabitEthernet0/2" + # }, + # { + # "inft_name": "Loopback888" + # }, + # { + # "inft_name": "Loopback999" + # } + # ], + # "match_key": "name", + # "prefix": "l3_interfaces" + # } + # ] + # }, + # "changed": false + # } + # Read vars_file 'facts.yml' + + # TASK [Combine all the facts based on match_keys] **************************************************************************************************************** + # fatal: [localhost]: FAILED! => { + # "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match key 'name' in data source 2 in list entry 0, Missing match key 'name' in data source 2 in list entry 1, Missing match key 'name' in data source 2 in list entry 2, Missing match key 'name' in data source 2 in list entry 3, Missing match key 'name' in data source 2 in list entry 4" + # } + + # Failing on duplicate values in facts + # ------------------------------------ + + ##facts.yaml + interfaces: + - name: GigabitEthernet0/0 + enabled: true + duplex: auto + speed: auto + note: + - Connected green wire + - name: GigabitEthernet0/1 + description: Configured by Ansible - Interface 1 + mtu: 1500 + speed: auto + duplex: auto + enabled: true + note: + - Connected blue wire + - Configured by Paul + vifs: + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: GigabitEthernet0/2 + description: Configured by Ansible - Interface 2 (ADMIN DOWN) + mtu: 600 + enabled: false + l2_interfaces: + - name: GigabitEthernet0/0 + - name: GigabitEthernet0/0 + - mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + - mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + encapsulation: dot1q + l3_interfaces: + - ipv4: + - address: 192.168.0.2/24 + name: GigabitEthernet0/0 + - name: GigabitEthernet0/1 + - name: GigabitEthernet0/2 + - name: Loopback888 + - name: Loopback999 + + ##Playbook + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces + + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_duplicate=True) }}" + + ##Output + ok: [localhost] => { + "ansible_facts": { + "data_source": [ + { + "data": [ + { + "duplex": "auto", + "enabled": true, + "name": "GigabitEthernet0/0", + "note": [ + "Connected green wire" + ], + "speed": "auto" + }, + { + "description": "Configured by Ansible - Interface 1", + "duplex": "auto", + "enabled": true, + "mtu": 1500, + "name": "GigabitEthernet0/1", + "note": [ + "Connected blue wire", + "Configured by Paul" + ], + "speed": "auto", + "vifs": [ + { + "comment": "Needs reconfiguration", + "description": "Eth1 - VIF 100", + "enabled": true, + "mtu": 400, + "vlan_id": 100 + }, + { + "description": "Eth1 - VIF 101", + "enabled": true, + "vlan_id": 101 + } + ] + }, + { + "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + "enabled": false, + "mtu": 600, + "name": "GigabitEthernet0/2" + } + ], + "match_key": "name", + "prefix": "interfaces" + }, + { + "data": [ + { + "name": "GigabitEthernet0/0" + }, + { + "name": "GigabitEthernet0/0" + }, + { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993" + ] + } + }, + { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993" + ], + "encapsulation": "dot1q" + } + } + ], + "match_key": "name", + "prefix": "l2_interfaces" + }, + { + "data": [ + { + "ipv4": [ + { + "address": "192.168.0.2/24" + } + ], + "name": "GigabitEthernet0/0" + }, + { + "name": "GigabitEthernet0/1" + }, + { + "name": "GigabitEthernet0/2" + }, + { + "name": "Loopback888" + }, + { + "name": "Loopback999" + } + ], + "match_key": "name", + "prefix": "l3_interfaces" + } + ] + }, + "changed": false + } + Read vars_file 'facts.yml' + + TASK [Combine all the facts based on match_keys] **************************************************************************************************************** + fatal: [localhost]: FAILED! => { + "msg": "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 1" + } + + + + +Status +------ + + +Authors +~~~~~~~ + +- Sagar Paul (@KB-perByte) + + +.. hint:: + Configuration entries for each entry type have a low to high priority order. For example, a variable that is lower in the list will override a variable that is higher up. diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index da301e8..01bb170 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -15,51 +15,54 @@ __metaclass__ = type DOCUMENTATION = """ name: consolidate author: Sagar Paul (@KB-perByte) - version_added: "2.5.0" - short_description: Keep specific keys from a data recursively. + version_added: "2.5.2" + short_description: Consolidate facts together on common attributes. description: - - This plugin keep only specified keys from a provided data recursively. - - Matching parameter defaults to equals unless C(matching_parameter) is explicitly mentioned. - - Using the parameters below- C(data|ansible.utils.keep_keys(target([....]))) + - This plugin presents a collective structured data including all supplied facts grouping on common attributes mentioned. + - All other boolean parameter defaults to False unless parameters is explicitly mentioned. + - Using the parameters below- C(data_source|ansible.utils.consolidate(fail_missing_match_key=False))) options: data_source: description: - - This option represents a list of dictionaries or a dictionary with any level of nesting data. - - For example C(config_data|ansible.utils.keep_keys(target([....]))), in this case C(config_data) represents this option. + - This option represents a list of dictionaries to perform the operation on. + - For example C(facts_source|ansible.utils.consolidate(fail_missing_match_key=False))), in this case C(facts_source) represents this option. type: list elements: dict suboptions: data: - description: Specify the target keys to keep in list format. + description: Specify facts data that gets consolidated. type: raw match_key: - description: Specify the target keys to keep in list format. + description: Specify key to match on. type: str prefix: - description: Specify the target keys to keep in list format. + description: Specify the prefix with which the result set be created. type: str fail_missing_match_key: - description: Specify the target keys to keep in list format. + description: Fail if match_key is not found in a specific data set. type: bool fail_missing_match_value: - description: Specify the target keys to keep in list format. + description: Fail if a keys to match in not same accross all data sets. type: bool fail_duplicate: - description: Specify the matching configuration of target keys and data attributes. + description: Fail if duplicate values for any key is found. type: bool """ EXAMPLES = r""" -##example.yaml +# Consolidated facts example +# ------------ + +##facts.yml interfaces: - - name: eth0 + - name: GigabitEthernet0/0 enabled: true duplex: auto speed: auto note: - Connected green wire - - name: eth1 + - name: GigabitEthernet0/1 description: Configured by Ansible - Interface 1 mtu: 1500 speed: auto @@ -69,128 +72,380 @@ interfaces: - Connected blue wire - Configured by Paul vifs: - - vlan_id: 100 - description: Eth1 - VIF 100 - mtu: 400 - enabled: true - comment: Needs reconfiguration - - vlan_id: 101 - description: Eth1 - VIF 101 - enabled: true - - name: eth2 + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: GigabitEthernet0/2 description: Configured by Ansible - Interface 2 (ADMIN DOWN) mtu: 600 enabled: false +l2_interfaces: + - name: GigabitEthernet0/0 + - mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + - mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + encapsulation: dot1q +l3_interfaces: + - ipv4: + - address: 192.168.0.2/24 + name: GigabitEthernet0/0 + - name: GigabitEthernet0/1 + - name: GigabitEthernet0/2 + - name: Loopback888 + - name: Loopback999 ##Playbook -vars_files: - - "example.yaml" -tasks: - - name: keep selective keys from dict/list of dict data - ansible.builtin.set_fact: - data: "{{ interfaces }}" + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - debug: - msg: "{{ data|ansible.utils.keep_keys(target=['description', 'name', 'mtu', 'duplex', 'enabled', 'vifs', 'vlan_id']) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" ##Output -# TASK [keep selective keys from python dict/list of dict] **************************************************************************************** # ok: [localhost] => { # "ansible_facts": { -# "data": [ +# "data_source": [ # { -# "duplex": "auto", -# "enabled": true, -# "name": "eth0", -# "note": [ -# "Connected green wire" -# ], -# "speed": "auto" -# }, -# { -# "description": "Configured by Ansible - Interface 1", -# "duplex": "auto", -# "enabled": true, -# "mtu": 1500, -# "name": "eth1", -# "note": [ -# "Connected blue wire", -# "Configured by Paul" -# ], -# "speed": "auto", -# "vifs": [ +# "data": [ # { -# "comment": "Needs reconfiguration", -# "description": "Eth1 - VIF 100", +# "duplex": "auto", # "enabled": true, -# "mtu": 400, -# "vlan_id": 100 +# "name": "GigabitEthernet0/0", +# "note": [ +# "Connected green wire" +# ], +# "speed": "auto" # }, # { -# "description": "Eth1 - VIF 101", +# "description": "Configured by Ansible - Interface 1", +# "duplex": "auto", # "enabled": true, -# "vlan_id": 101 +# "mtu": 1500, +# "name": "GigabitEthernet0/1", +# "note": [ +# "Connected blue wire", +# "Configured by Paul" +# ], +# "speed": "auto", +# "vifs": [ +# { +# "comment": "Needs reconfiguration", +# "description": "Eth1 - VIF 100", +# "enabled": true, +# "mtu": 400, +# "vlan_id": 100 +# }, +# { +# "description": "Eth1 - VIF 101", +# "enabled": true, +# "vlan_id": 101 +# } +# ] +# }, +# { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "enabled": false, +# "mtu": 600, +# "name": "GigabitEthernet0/2" # } -# ] +# ], +# "match_key": "name", +# "prefix": "interfaces" # }, # { -# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", -# "enabled": false, -# "mtu": 600, -# "name": "eth2" +# "data": [ +# { +# "name": "GigabitEthernet0/0" +# }, +# { +# "mode": "access", +# "name": "GigabitEthernet0/1", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ] +# } +# }, +# { +# "mode": "trunk", +# "name": "GigabitEthernet0/2", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ], +# "encapsulation": "dot1q" +# } +# } +# ], +# "match_key": "name", +# "prefix": "l2_interfaces" +# }, +# { +# "data": [ +# { +# "ipv4": [ +# { +# "address": "192.168.0.2/24" +# } +# ], +# "name": "GigabitEthernet0/0" +# }, +# { +# "name": "GigabitEthernet0/1" +# }, +# { +# "name": "GigabitEthernet0/2" +# }, +# { +# "name": "Loopback888" +# }, +# { +# "name": "Loopback999" +# } +# ], +# "match_key": "name", +# "prefix": "l3_interfaces" # } # ] # }, # "changed": false # } -# Read vars_file 'example.yaml' +# Read vars_file 'facts.yml' -# TASK [debug] ************************************************************************************************************* +# TASK [Combine all the facts based on match_keys] **************************************************************************************************************** # ok: [localhost] => { -# "msg": [ -# { -# "duplex": "auto", -# "enabled": true, -# "name": "eth0" -# }, -# { -# "description": "Configured by Ansible - Interface 1", -# "duplex": "auto", -# "enabled": true, -# "mtu": 1500, -# "name": "eth1", -# "vifs": [ -# { -# "description": "Eth1 - VIF 100", +# "ansible_facts": { +# "combined": { +# "GigabitEthernet0/0": { +# "interfaces": { +# "duplex": "auto", # "enabled": true, -# "mtu": 400, -# "vlan_id": 100 +# "name": "GigabitEthernet0/0", +# "note": [ +# "Connected green wire" +# ], +# "speed": "auto" # }, -# { -# "description": "Eth1 - VIF 101", -# "enabled": true, -# "vlan_id": 101 +# "l2_interfaces": { +# "name": "GigabitEthernet0/0" +# }, +# "l3_interfaces": { +# "ipv4": [ +# { +# "address": "192.168.0.2/24" +# } +# ], +# "name": "GigabitEthernet0/0" # } -# ] -# }, -# { -# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", -# "enabled": false, -# "mtu": 600, -# "name": "eth2" +# }, +# "GigabitEthernet0/1": { +# "interfaces": { +# "description": "Configured by Ansible - Interface 1", +# "duplex": "auto", +# "enabled": true, +# "mtu": 1500, +# "name": "GigabitEthernet0/1", +# "note": [ +# "Connected blue wire", +# "Configured by Paul" +# ], +# "speed": "auto", +# "vifs": [ +# { +# "comment": "Needs reconfiguration", +# "description": "Eth1 - VIF 100", +# "enabled": true, +# "mtu": 400, +# "vlan_id": 100 +# }, +# { +# "description": "Eth1 - VIF 101", +# "enabled": true, +# "vlan_id": 101 +# } +# ] +# }, +# "l2_interfaces": { +# "mode": "access", +# "name": "GigabitEthernet0/1", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ] +# } +# }, +# "l3_interfaces": { +# "name": "GigabitEthernet0/1" +# } +# }, +# "GigabitEthernet0/2": { +# "interfaces": { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "enabled": false, +# "mtu": 600, +# "name": "GigabitEthernet0/2" +# }, +# "l2_interfaces": { +# "mode": "trunk", +# "name": "GigabitEthernet0/2", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ], +# "encapsulation": "dot1q" +# } +# }, +# "l3_interfaces": { +# "name": "GigabitEthernet0/2" +# } +# }, +# "Loopback888": { +# "interfaces": {}, +# "l2_interfaces": {}, +# "l3_interfaces": { +# "name": "Loopback888" +# } +# }, +# "Loopback999": { +# "interfaces": {}, +# "l2_interfaces": {}, +# "l3_interfaces": { +# "name": "Loopback999" +# } +# } # } -# ] +# }, +# "changed": false # } -##example.yaml +# Failing on missing match values +# ------------------------------- + +##facts.yaml interfaces: - - name: eth0 + - name: GigabitEthernet0/0 enabled: true duplex: auto speed: auto note: - Connected green wire - - name: eth1 + - name: GigabitEthernet0/1 description: Configured by Ansible - Interface 1 mtu: 1500 speed: auto @@ -200,106 +455,744 @@ interfaces: - Connected blue wire - Configured by Paul vifs: - - vlan_id: 100 - description: Eth1 - VIF 100 - mtu: 400 - enabled: true - comment: Needs reconfiguration - - vlan_id: 101 - description: Eth1 - VIF 101 - enabled: true - - name: eth2 + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: GigabitEthernet0/2 description: Configured by Ansible - Interface 2 (ADMIN DOWN) mtu: 600 enabled: false +l2_interfaces: + - name: GigabitEthernet0/0 + - mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + - mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + encapsulation: dot1q +l3_interfaces: + - ipv4: + - address: 192.168.0.2/24 + name: GigabitEthernet0/0 + - name: GigabitEthernet0/1 + - name: GigabitEthernet0/2 + - name: Loopback888 + - name: Loopback999 ##Playbook -vars_files: - - "example.yaml" -tasks: - - name: keep selective keys from dict/list of dict data - ansible.builtin.set_fact: - data: "{{ interfaces }}" + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - debug: - msg: "{{ data|ansible.utils.keep_keys(target=['desc', 'name'], matching_parameter= 'starts_with') }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=True) }}" + +##Output +ok: [localhost] => { + "ansible_facts": { + "data_source": [ + { + "data": [ + { + "duplex": "auto", + "enabled": true, + "name": "GigabitEthernet0/0", + "note": [ + "Connected green wire" + ], + "speed": "auto" + }, + { + "description": "Configured by Ansible - Interface 1", + "duplex": "auto", + "enabled": true, + "mtu": 1500, + "name": "GigabitEthernet0/1", + "note": [ + "Connected blue wire", + "Configured by Paul" + ], + "speed": "auto", + "vifs": [ + { + "comment": "Needs reconfiguration", + "description": "Eth1 - VIF 100", + "enabled": true, + "mtu": 400, + "vlan_id": 100 + }, + { + "description": "Eth1 - VIF 101", + "enabled": true, + "vlan_id": 101 + } + ] + }, + { + "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + "enabled": false, + "mtu": 600, + "name": "GigabitEthernet0/2" + } + ], + "match_key": "name", + "prefix": "interfaces" + }, + { + "data": [ + { + "name": "GigabitEthernet0/0" + }, + { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993" + ] + } + }, + { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993" + ], + "encapsulation": "dot1q" + } + } + ], + "match_key": "name", + "prefix": "l2_interfaces" + }, + { + "data": [ + { + "ipv4": [ + { + "address": "192.168.0.2/24" + } + ], + "name": "GigabitEthernet0/0" + }, + { + "name": "GigabitEthernet0/1" + }, + { + "name": "GigabitEthernet0/2" + }, + { + "name": "Loopback888" + }, + { + "name": "Loopback999" + } + ], + "match_key": "name", + "prefix": "l3_interfaces" + } + ] + }, + "changed": false +} +Read vars_file 'facts.yml' + +TASK [Combine all the facts based on match_keys] **************************************************************************************************************** +fatal: [localhost]: FAILED! => { + "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' reported Missing match value Loopback999, Loopback888 in data source 0, Missing match value Loopback999, Loopback888 in data source 1" +} + +# Failing on missing match keys +# ----------------------------- + +##facts.yaml +interfaces: + - name: GigabitEthernet0/0 + enabled: true + duplex: auto + speed: auto + note: + - Connected green wire + - name: GigabitEthernet0/1 + description: Configured by Ansible - Interface 1 + mtu: 1500 + speed: auto + duplex: auto + enabled: true + note: + - Connected blue wire + - Configured by Paul + vifs: + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: GigabitEthernet0/2 + description: Configured by Ansible - Interface 2 (ADMIN DOWN) + mtu: 600 + enabled: false +l2_interfaces: + - name: GigabitEthernet0/0 + - mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + - mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + encapsulation: dot1q +l3_interfaces: + - ipv4: + - address: 192.168.0.2/24 + inft_name: GigabitEthernet0/0 + - inft_name: GigabitEthernet0/1 + - inft_name: GigabitEthernet0/2 + - inft_name: Loopback888 + - inft_name: Loopback999 + +##Playbook + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces + + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_key=True) }}" ##Output -# TASK [keep selective keys from python dict/list of dict] ************************** # ok: [localhost] => { # "ansible_facts": { -# "data": [ +# "data_source": [ # { -# "duplex": "auto", -# "enabled": true, -# "name": "eth0", -# "note": [ -# "Connected green wire" -# ], -# "speed": "auto" -# }, -# { -# "description": "Configured by Ansible - Interface 1", -# "duplex": "auto", -# "enabled": true, -# "mtu": 1500, -# "name": "eth1", -# "note": [ -# "Connected blue wire", -# "Configured by Paul" -# ], -# "speed": "auto", -# "vifs": [ +# "data": [ # { -# "comment": "Needs reconfiguration", -# "description": "Eth1 - VIF 100", +# "duplex": "auto", # "enabled": true, -# "mtu": 400, -# "vlan_id": 100 +# "name": "GigabitEthernet0/0", +# "note": [ +# "Connected green wire" +# ], +# "speed": "auto" # }, # { -# "description": "Eth1 - VIF 101", +# "description": "Configured by Ansible - Interface 1", +# "duplex": "auto", # "enabled": true, -# "vlan_id": 101 +# "mtu": 1500, +# "name": "GigabitEthernet0/1", +# "note": [ +# "Connected blue wire", +# "Configured by Paul" +# ], +# "speed": "auto", +# "vifs": [ +# { +# "comment": "Needs reconfiguration", +# "description": "Eth1 - VIF 100", +# "enabled": true, +# "mtu": 400, +# "vlan_id": 100 +# }, +# { +# "description": "Eth1 - VIF 101", +# "enabled": true, +# "vlan_id": 101 +# } +# ] +# }, +# { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "enabled": false, +# "mtu": 600, +# "name": "GigabitEthernet0/2" # } -# ] +# ], +# "match_key": "name", +# "prefix": "interfaces" # }, # { -# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", -# "enabled": false, -# "mtu": 600, -# "name": "eth2" +# "data": [ +# { +# "name": "GigabitEthernet0/0" +# }, +# { +# "mode": "access", +# "name": "GigabitEthernet0/1", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ] +# } +# }, +# { +# "mode": "trunk", +# "name": "GigabitEthernet0/2", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ], +# "encapsulation": "dot1q" +# } +# } +# ], +# "match_key": "name", +# "prefix": "l2_interfaces" +# }, +# { +# "data": [ +# { +# "inft_name": "GigabitEthernet0/0", +# "ipv4": [ +# { +# "address": "192.168.0.2/24" +# } +# ] +# }, +# { +# "inft_name": "GigabitEthernet0/1" +# }, +# { +# "inft_name": "GigabitEthernet0/2" +# }, +# { +# "inft_name": "Loopback888" +# }, +# { +# "inft_name": "Loopback999" +# } +# ], +# "match_key": "name", +# "prefix": "l3_interfaces" # } # ] # }, # "changed": false # } -# Read vars_file 'example.yaml' +# Read vars_file 'facts.yml' -# TASK [debug] ********************************************************************************** -# ok: [localhost] => { -# "msg": [ -# { -# "name": "eth0" -# }, -# { -# "description": "Configured by Ansible - Interface 1", -# "name": "eth1", -# "vifs": [ -# { -# "description": "Eth1 - VIF 100" -# }, -# { -# "description": "Eth1 - VIF 101" -# } -# ] -# }, -# { -# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", -# "name": "eth2" -# } -# ] +# TASK [Combine all the facts based on match_keys] **************************************************************************************************************** +# fatal: [localhost]: FAILED! => { +# "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match key 'name' in data source 2 in list entry 0, Missing match key 'name' in data source 2 in list entry 1, Missing match key 'name' in data source 2 in list entry 2, Missing match key 'name' in data source 2 in list entry 3, Missing match key 'name' in data source 2 in list entry 4" # } + +# Failing on duplicate values in facts +# ------------------------------------ + +##facts.yaml +interfaces: + - name: GigabitEthernet0/0 + enabled: true + duplex: auto + speed: auto + note: + - Connected green wire + - name: GigabitEthernet0/1 + description: Configured by Ansible - Interface 1 + mtu: 1500 + speed: auto + duplex: auto + enabled: true + note: + - Connected blue wire + - Configured by Paul + vifs: + - vlan_id: 100 + description: Eth1 - VIF 100 + mtu: 400 + enabled: true + comment: Needs reconfiguration + - vlan_id: 101 + description: Eth1 - VIF 101 + enabled: true + - name: GigabitEthernet0/2 + description: Configured by Ansible - Interface 2 (ADMIN DOWN) + mtu: 600 + enabled: false +l2_interfaces: + - name: GigabitEthernet0/0 + - name: GigabitEthernet0/0 + - mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + - mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - "11" + - "12" + - "59" + - "67" + - "75" + - "77" + - "81" + - "100" + - 400-408 + - 411-413 + - "415" + - "418" + - "982" + - "986" + - "988" + - "993" + encapsulation: dot1q +l3_interfaces: + - ipv4: + - address: 192.168.0.2/24 + name: GigabitEthernet0/0 + - name: GigabitEthernet0/1 + - name: GigabitEthernet0/2 + - name: Loopback888 + - name: Loopback999 + +##Playbook + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces + + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_duplicate=True) }}" + +##Output +ok: [localhost] => { + "ansible_facts": { + "data_source": [ + { + "data": [ + { + "duplex": "auto", + "enabled": true, + "name": "GigabitEthernet0/0", + "note": [ + "Connected green wire" + ], + "speed": "auto" + }, + { + "description": "Configured by Ansible - Interface 1", + "duplex": "auto", + "enabled": true, + "mtu": 1500, + "name": "GigabitEthernet0/1", + "note": [ + "Connected blue wire", + "Configured by Paul" + ], + "speed": "auto", + "vifs": [ + { + "comment": "Needs reconfiguration", + "description": "Eth1 - VIF 100", + "enabled": true, + "mtu": 400, + "vlan_id": 100 + }, + { + "description": "Eth1 - VIF 101", + "enabled": true, + "vlan_id": 101 + } + ] + }, + { + "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + "enabled": false, + "mtu": 600, + "name": "GigabitEthernet0/2" + } + ], + "match_key": "name", + "prefix": "interfaces" + }, + { + "data": [ + { + "name": "GigabitEthernet0/0" + }, + { + "name": "GigabitEthernet0/0" + }, + { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993" + ] + } + }, + { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993" + ], + "encapsulation": "dot1q" + } + } + ], + "match_key": "name", + "prefix": "l2_interfaces" + }, + { + "data": [ + { + "ipv4": [ + { + "address": "192.168.0.2/24" + } + ], + "name": "GigabitEthernet0/0" + }, + { + "name": "GigabitEthernet0/1" + }, + { + "name": "GigabitEthernet0/2" + }, + { + "name": "Loopback888" + }, + { + "name": "Loopback999" + } + ], + "match_key": "name", + "prefix": "l3_interfaces" + } + ] + }, + "changed": false +} +Read vars_file 'facts.yml' + +TASK [Combine all the facts based on match_keys] **************************************************************************************************************** +fatal: [localhost]: FAILED! => { + "msg": "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 1" +} """ from ansible.errors import AnsibleFilterError @@ -315,10 +1208,10 @@ try: except ImportError: from jinja2.filters import environmentfilter as pass_environment -import debugpy +# import debugpy -debugpy.listen(3000) -debugpy.wait_for_client() +# debugpy.listen(3000) +# debugpy.wait_for_client() @pass_environment @@ -333,9 +1226,7 @@ def _consolidate(*args, **kwargs): ] data = dict(zip(keys, args[1:])) data.update(kwargs) - aav = AnsibleArgSpecValidator( - data=data, schema=DOCUMENTATION, name="consolidate" - ) + aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="consolidate") valid, errors, updated_data = aav.validate() if not valid: raise AnsibleFilterError(errors) From 0cbb257293aaf8c73b1e7056db8315b1b0bd21aa Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 4 Apr 2022 20:56:07 +0530 Subject: [PATCH 04/23] update 1 --- plugins/filter/consolidate.py | 4 +- plugins/plugin_utils/consolidate.py | 78 ++++++++++++++++------------- 2 files changed, 46 insertions(+), 36 deletions(-) diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index 01bb170..4cfa3c1 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -1,6 +1,6 @@ # # -*- coding: utf-8 -*- -# Copyright 2021 Red Hat +# Copyright 2022 Red Hat # GNU General Public License v3.0+ # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -1234,7 +1234,7 @@ def _consolidate(*args, **kwargs): class FilterModule(object): - """keep_keys""" + """consolidate""" def filters(self): diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py index d4d3d7e..9d76142 100644 --- a/plugins/plugin_utils/consolidate.py +++ b/plugins/plugin_utils/consolidate.py @@ -1,12 +1,12 @@ # # -*- coding: utf-8 -*- -# Copyright 2021 Red Hat +# Copyright 2022 Red Hat # GNU General Public License v3.0+ # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # """ -The keep_keys plugin code +The consolidate plugin code """ from __future__ import absolute_import, division, print_function @@ -27,17 +27,15 @@ def _raise_error(filter, msg): def fail_on_filter(validator_func): + """decorator to fail on supplied filters""" + def update_err(*args, **kwargs): res, err = validator_func(*args, **kwargs) if err.get("match_key_err"): - _raise_error( - "fail_missing_match_key", ", ".join(err["match_key_err"]) - ) + _raise_error("fail_missing_match_key", ", ".join(err["match_key_err"])) if err.get("match_val_err"): - _raise_error( - "fail_missing_match_value", ", ".join(err["match_val_err"]) - ) + _raise_error("fail_missing_match_value", ", ".join(err["match_val_err"])) if err.get("duplicate_err"): _raise_error("fail_duplicate", ", ".join(err["duplicate_err"])) return res @@ -49,13 +47,9 @@ def fail_on_filter(validator_func): def check_missing_match_key_duplicate( data_sources, fail_missing_match_key, fail_duplicate ): - """Validate the operation - :param operation: The operation - :type operation: str - :raises: AnsibleFilterError - """ + """Checks if the match_key specified is present in all the supplied data, + also checks for duplicate data accross all the data sources""" results, errors_match_key, errors_duplicate = [], [], [] - # Check for missing and duplicate match key for ds_idx, data_source in enumerate(data_sources): match_key = data_source["match_key"] ds_values = [] @@ -71,9 +65,7 @@ def check_missing_match_key_duplicate( continue if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate: - errors_duplicate.append( - f"Duplicate values in data source {ds_idx}" - ) + errors_duplicate.append(f"Duplicate values in data source {ds_idx}") results.append(set(ds_values)) return results, { "match_key_err": errors_match_key, @@ -82,11 +74,19 @@ def check_missing_match_key_duplicate( @fail_on_filter -def check_missing_match_values(results, fail_missing_match_value): +def check_missing_match_values(matched_keys, fail_missing_match_value): + """Checks values to match be consistent over all the whole data source + + Args: + matched_keys (list): list of unique keys based on specified match_keys + fail_missing_match_value (bool): Fail if match_key value is missing in a data set + Returns: + set: set of unique values + """ errors_match_values = [] - all_values = set(itertools.chain.from_iterable(results)) + all_values = set(itertools.chain.from_iterable(matched_keys)) if fail_missing_match_value: - for ds_idx, ds_values in enumerate(results): + for ds_idx, ds_values in enumerate(matched_keys): missing_match = all_values - ds_values if missing_match: errors_match_values.append( @@ -96,13 +96,21 @@ def check_missing_match_values(results, fail_missing_match_value): def consolidate_facts(data_sources, all_values): + """Iterate over all the data sources and consolidate the data + + Args: + data_sources (list): supplied data sources + all_values (set): a set of keys to iterate over + + Returns: + list: list of consolidated data + """ + consolidated_facts = {} for data_source in data_sources: match_key = data_source["match_key"] source = data_source["prefix"] - data_dict = { - d[match_key]: d for d in data_source["data"] if match_key in d - } + data_dict = {d[match_key]: d for d in data_source["data"] if match_key in d} for value in sorted(all_values): if value not in consolidated_facts: consolidated_facts[value] = {} @@ -116,19 +124,21 @@ def consolidate( fail_missing_match_value=False, fail_duplicate=False, ): - """keep selective keys recursively from a given data" - :param data: The data passed in (data|keep_keys(...)) - :type data: raw - :param target: List of keys on with operation is to be performed - :type data: list - :type elements: string - :param matching_parameter: matching type of the target keys with data keys - :type data: str + """Calls data validation and consolidation functions + + Args: + data_source (list): list of dicts as data sources + fail_missing_match_key (bool, optional): Fails if match_keys not present in data set. Defaults to False. + fail_missing_match_value (bool, optional): Fails if matching attribute missing in a data. Defaults to False. + fail_duplicate (bool, optional): Fails if duplicate data present in a data. Defaults to False. + + Returns: + list: list of dicts of validated and consolidated data """ - # write code here + key_sets = check_missing_match_key_duplicate( data_source, fail_missing_match_key, fail_duplicate ) key_vals = check_missing_match_values(key_sets, fail_missing_match_value) - datapr = consolidate_facts(data_source, key_vals) - return datapr + consolidated_facts = consolidate_facts(data_source, key_vals) + return consolidated_facts From 1b34ebd896cefc0e4354a2bc28549d7151b76a34 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 4 Apr 2022 21:06:58 +0530 Subject: [PATCH 05/23] tox fixed and docstrings --- plugins/filter/consolidate.py | 15 ++++------ plugins/plugin_utils/consolidate.py | 45 +++++++++++++++++++++++------ 2 files changed, 42 insertions(+), 18 deletions(-) diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index 4cfa3c1..0845021 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -1208,15 +1208,10 @@ try: except ImportError: from jinja2.filters import environmentfilter as pass_environment -# import debugpy - -# debugpy.listen(3000) -# debugpy.wait_for_client() - @pass_environment def _consolidate(*args, **kwargs): - """keep specific keys from a data recursively""" + """Consolidate facts together on common attributes""" keys = [ "data_source", @@ -1226,7 +1221,9 @@ def _consolidate(*args, **kwargs): ] data = dict(zip(keys, args[1:])) data.update(kwargs) - aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="consolidate") + aav = AnsibleArgSpecValidator( + data=data, schema=DOCUMENTATION, name="consolidate" + ) valid, errors, updated_data = aav.validate() if not valid: raise AnsibleFilterError(errors) @@ -1234,9 +1231,9 @@ def _consolidate(*args, **kwargs): class FilterModule(object): - """consolidate""" + """Consolidate""" def filters(self): - """a mapping of filter names to functions""" + """A mapping of filter names to functions""" return {"consolidate": _consolidate} diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py index 9d76142..b549cba 100644 --- a/plugins/plugin_utils/consolidate.py +++ b/plugins/plugin_utils/consolidate.py @@ -18,24 +18,39 @@ import itertools def _raise_error(filter, msg): """Raise an error message, prepend with filter name - :param msg: The message - :type msg: str - :raises: AnsibleError + + Args: + filter (str): Filter name + msg (str): Message specific to filter supplied + + Raises: + AnsibleFilterError: AnsibleError with filter name and message """ error = f"Error when using plugin 'consolidate': '{filter}' reported {msg}" raise AnsibleFilterError(error) def fail_on_filter(validator_func): - """decorator to fail on supplied filters""" + """decorator to fail on supplied filters + + Args: + validator_func (func): Function that generates failure messages + + Returns: + raw: Value without errors if generated and not failed + """ def update_err(*args, **kwargs): res, err = validator_func(*args, **kwargs) if err.get("match_key_err"): - _raise_error("fail_missing_match_key", ", ".join(err["match_key_err"])) + _raise_error( + "fail_missing_match_key", ", ".join(err["match_key_err"]) + ) if err.get("match_val_err"): - _raise_error("fail_missing_match_value", ", ".join(err["match_val_err"])) + _raise_error( + "fail_missing_match_value", ", ".join(err["match_val_err"]) + ) if err.get("duplicate_err"): _raise_error("fail_duplicate", ", ".join(err["duplicate_err"])) return res @@ -48,7 +63,15 @@ def check_missing_match_key_duplicate( data_sources, fail_missing_match_key, fail_duplicate ): """Checks if the match_key specified is present in all the supplied data, - also checks for duplicate data accross all the data sources""" + also checks for duplicate data accross all the data sources + + Args: + data_sources (list): list of dicts as data sources + fail_missing_match_key (bool): Fails if match_keys not present in data set + fail_duplicate (bool): Fails if duplicate data present in a data + Returns: + list: list of unique keys based on specified match_keys + """ results, errors_match_key, errors_duplicate = [], [], [] for ds_idx, data_source in enumerate(data_sources): match_key = data_source["match_key"] @@ -65,7 +88,9 @@ def check_missing_match_key_duplicate( continue if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate: - errors_duplicate.append(f"Duplicate values in data source {ds_idx}") + errors_duplicate.append( + f"Duplicate values in data source {ds_idx}" + ) results.append(set(ds_values)) return results, { "match_key_err": errors_match_key, @@ -110,7 +135,9 @@ def consolidate_facts(data_sources, all_values): for data_source in data_sources: match_key = data_source["match_key"] source = data_source["prefix"] - data_dict = {d[match_key]: d for d in data_source["data"] if match_key in d} + data_dict = { + d[match_key]: d for d in data_source["data"] if match_key in d + } for value in sorted(all_values): if value not in consolidated_facts: consolidated_facts[value] = {} From 38986edc8ed605c2ae2feb9ae88a85c386633ec4 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 4 Apr 2022 21:09:45 +0530 Subject: [PATCH 06/23] doc fix --- docs/ansible.utils.consolidate_filter.rst | 2 +- plugins/filter/consolidate.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/docs/ansible.utils.consolidate_filter.rst b/docs/ansible.utils.consolidate_filter.rst index cba46fd..3b97abb 100644 --- a/docs/ansible.utils.consolidate_filter.rst +++ b/docs/ansible.utils.consolidate_filter.rst @@ -17,7 +17,7 @@ Version added: 2.5.2 Synopsis -------- -- This plugin presents a collective structured data including all supplied facts grouping on common attributes mentioned. +- This plugin presents collective structured data including all supplied facts grouping on common attributes mentioned. - All other boolean parameter defaults to False unless parameters is explicitly mentioned. - Using the parameters below- ``data_source|ansible.utils.consolidate(fail_missing_match_key=False``)) diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index 0845021..5ea6d94 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -18,7 +18,7 @@ DOCUMENTATION = """ version_added: "2.5.2" short_description: Consolidate facts together on common attributes. description: - - This plugin presents a collective structured data including all supplied facts grouping on common attributes mentioned. + - This plugin presents collective structured data including all supplied facts grouping on common attributes mentioned. - All other boolean parameter defaults to False unless parameters is explicitly mentioned. - Using the parameters below- C(data_source|ansible.utils.consolidate(fail_missing_match_key=False))) options: @@ -1221,9 +1221,7 @@ def _consolidate(*args, **kwargs): ] data = dict(zip(keys, args[1:])) data.update(kwargs) - aav = AnsibleArgSpecValidator( - data=data, schema=DOCUMENTATION, name="consolidate" - ) + aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="consolidate") valid, errors, updated_data = aav.validate() if not valid: raise AnsibleFilterError(errors) From 02779ae85a4e027f39d0639153267d9ffcedee4f Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Tue, 5 Apr 2022 12:23:31 +0530 Subject: [PATCH 07/23] UTs added --- plugins/filter/consolidate.py | 4 +- tests/unit/plugins/filter/test_consolidate.py | 512 ++++++++++++++++++ 2 files changed, 515 insertions(+), 1 deletion(-) create mode 100644 tests/unit/plugins/filter/test_consolidate.py diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index 5ea6d94..9e29cbd 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -1221,7 +1221,9 @@ def _consolidate(*args, **kwargs): ] data = dict(zip(keys, args[1:])) data.update(kwargs) - aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="consolidate") + aav = AnsibleArgSpecValidator( + data=data, schema=DOCUMENTATION, name="consolidate" + ) valid, errors, updated_data = aav.validate() if not valid: raise AnsibleFilterError(errors) diff --git a/tests/unit/plugins/filter/test_consolidate.py b/tests/unit/plugins/filter/test_consolidate.py new file mode 100644 index 0000000..b3213af --- /dev/null +++ b/tests/unit/plugins/filter/test_consolidate.py @@ -0,0 +1,512 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Red Hat +# GNU General Public License v3.0+ +# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import unittest +from ansible.errors import AnsibleFilterError +from ansible_collections.ansible.utils.plugins.filter.consolidate import ( + _consolidate, +) + + +class TestConsolidate(unittest.TestCase): + def setUp(self): + pass + + def test_consolidate_plugin(self): + data_source = [ + { + "data": [ + { + "duplex": "auto", + "enabled": True, + "name": "GigabitEthernet0/0", + "note": ["Connected green wire"], + "speed": "auto", + }, + { + "description": "Configured by Ansible - Interface 1", + "duplex": "auto", + "enabled": True, + "mtu": 1500, + "name": "GigabitEthernet0/1", + "note": ["Connected blue wire", "Configured by Paul"], + "speed": "auto", + "vifs": [ + { + "comment": "Needs reconfiguration", + "description": "Eth1 - VIF 100", + "enabled": True, + "mtu": 400, + "vlan_id": 100, + }, + { + "description": "Eth1 - VIF 101", + "enabled": True, + "vlan_id": 101, + }, + ], + }, + { + "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + "enabled": False, + "mtu": 600, + "name": "GigabitEthernet0/2", + }, + ], + "match_key": "name", + "prefix": "interfaces", + }, + { + "data": [ + {"name": "GigabitEthernet0/0"}, + { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ] + }, + }, + { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ], + "encapsulation": "dot1q", + }, + }, + ], + "match_key": "name", + "prefix": "l2_interfaces", + }, + { + "data": [ + { + "ipv4": [{"address": "192.168.0.2/24"}], + "name": "GigabitEthernet0/0", + }, + {"name": "GigabitEthernet0/1"}, + {"name": "GigabitEthernet0/2"}, + {"name": "Loopback888"}, + {"name": "Loopback999"}, + ], + "match_key": "name", + "prefix": "l3_interfaces", + }, + ] + + output = { + "GigabitEthernet0/0": { + "interfaces": { + "duplex": "auto", + "enabled": True, + "name": "GigabitEthernet0/0", + "note": ["Connected green wire"], + "speed": "auto", + }, + "l2_interfaces": {"name": "GigabitEthernet0/0"}, + "l3_interfaces": { + "ipv4": [{"address": "192.168.0.2/24"}], + "name": "GigabitEthernet0/0", + }, + }, + "GigabitEthernet0/1": { + "interfaces": { + "description": "Configured by Ansible - Interface 1", + "duplex": "auto", + "enabled": True, + "mtu": 1500, + "name": "GigabitEthernet0/1", + "note": ["Connected blue wire", "Configured by Paul"], + "speed": "auto", + "vifs": [ + { + "comment": "Needs reconfiguration", + "description": "Eth1 - VIF 100", + "enabled": True, + "mtu": 400, + "vlan_id": 100, + }, + { + "description": "Eth1 - VIF 101", + "enabled": True, + "vlan_id": 101, + }, + ], + }, + "l2_interfaces": { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ] + }, + }, + "l3_interfaces": {"name": "GigabitEthernet0/1"}, + }, + "GigabitEthernet0/2": { + "interfaces": { + "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + "enabled": False, + "mtu": 600, + "name": "GigabitEthernet0/2", + }, + "l2_interfaces": { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ], + "encapsulation": "dot1q", + }, + }, + "l3_interfaces": {"name": "GigabitEthernet0/2"}, + }, + "Loopback888": { + "interfaces": {}, + "l2_interfaces": {}, + "l3_interfaces": {"name": "Loopback888"}, + }, + "Loopback999": { + "interfaces": {}, + "l2_interfaces": {}, + "l3_interfaces": {"name": "Loopback999"}, + }, + } + args = ["", data_source] + + result = _consolidate(*args) + self.assertEqual(result, output) + + def test_fail_missing_match_key(self): + data_source = [ + { + "data": [ + { + "duplex": "auto", + "enabled": True, + "name": "GigabitEthernet0/0", + "note": ["Connected green wire"], + "speed": "auto", + }, + { + "description": "Configured by Ansible - Interface 1", + "duplex": "auto", + "enabled": True, + "mtu": 1500, + "name": "GigabitEthernet0/1", + "note": ["Connected blue wire", "Configured by Paul"], + "speed": "auto", + "vifs": [ + { + "comment": "Needs reconfiguration", + "description": "Eth1 - VIF 100", + "enabled": True, + "mtu": 400, + "vlan_id": 100, + }, + { + "description": "Eth1 - VIF 101", + "enabled": True, + "vlan_id": 101, + }, + ], + }, + { + "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + "enabled": False, + "mtu": 600, + "name": "GigabitEthernet0/2", + }, + ], + "match_key": "name", + "prefix": "interfaces", + }, + { + "data": [ + {"name": "GigabitEthernet0/0"}, + { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ] + }, + }, + { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ], + "encapsulation": "dot1q", + }, + }, + ], + "match_key": "name", + "prefix": "l2_interfaces", + }, + { + "data": [ + { + "ipv4": [{"address": "192.168.0.2/24"}], + "intf_name": "GigabitEthernet0/0", + }, + {"name": "GigabitEthernet0/1"}, + {"name": "GigabitEthernet0/2"}, + {"name": "Loopback888"}, + {"name": "Loopback999"}, + ], + "match_key": "name", + "prefix": "l3_interfaces", + }, + ] + + fail_missing_match_key = True + args = ["", data_source, fail_missing_match_key] + with self.assertRaises(AnsibleFilterError) as error: + _consolidate(*args) + self.assertIn( + "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match key 'name' in data source 2 in list entry 0", + str(error.exception), + ) + + def test_fail_missing_match_value(self): + data_source = [ + { + "data": [ + { + "duplex": "auto", + "enabled": True, + "name": "GigabitEthernet0/0", + "note": ["Connected green wire"], + "speed": "auto", + }, + { + "description": "Configured by Ansible - Interface 1", + "duplex": "auto", + "enabled": True, + "mtu": 1500, + "name": "GigabitEthernet0/1", + "note": ["Connected blue wire", "Configured by Paul"], + "speed": "auto", + "vifs": [ + { + "comment": "Needs reconfiguration", + "description": "Eth1 - VIF 100", + "enabled": True, + "mtu": 400, + "vlan_id": 100, + }, + { + "description": "Eth1 - VIF 101", + "enabled": True, + "vlan_id": 101, + }, + ], + }, + { + "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + "enabled": False, + "mtu": 600, + "name": "GigabitEthernet0/2", + }, + ], + "match_key": "name", + "prefix": "interfaces", + }, + { + "data": [ + {"name": "GigabitEthernet0/0"}, + { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ] + }, + }, + { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": { + "allowed_vlans": [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ], + "encapsulation": "dot1q", + }, + }, + ], + "match_key": "name", + "prefix": "l2_interfaces", + }, + { + "data": [ + { + "ipv4": [{"address": "192.168.0.2/24"}], + "name": "GigabitEthernet0/0", + }, + { + "ipv4": [{"address": "192.168.0.3/24"}], + "name": "GigabitEthernet0/0", + }, + {"name": "GigabitEthernet0/1"}, + {"name": "GigabitEthernet0/2"}, + {"name": "Loopback888"}, + {"name": "Loopback999"}, + ], + "match_key": "name", + "prefix": "l3_interfaces", + }, + ] + + fail_missing_match_value = False + fail_missing_match_key = False + fail_duplicate = True + args = [ + "", + data_source, + fail_missing_match_key, + fail_missing_match_value, + fail_duplicate, + ] + with self.assertRaises(AnsibleFilterError) as error: + _consolidate(*args) + self.assertIn( + "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 2", + str(error.exception), + ) From 234b50384269647e52fc531e6f71426a647ffacf Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Tue, 5 Apr 2022 13:47:16 +0530 Subject: [PATCH 08/23] update intg tests and changelog --- .../fragments/consolidate_filter_plugin.yml | 5 + .../targets/utils_consolidate/tasks/main.yaml | 14 ++ .../utils_consolidate/tasks/simple.yaml | 134 ++++++++++++++++++ .../targets/utils_consolidate/vars/main.yaml | 109 ++++++++++++++ 4 files changed, 262 insertions(+) create mode 100644 changelogs/fragments/consolidate_filter_plugin.yml create mode 100644 tests/integration/targets/utils_consolidate/tasks/main.yaml create mode 100644 tests/integration/targets/utils_consolidate/tasks/simple.yaml create mode 100644 tests/integration/targets/utils_consolidate/vars/main.yaml diff --git a/changelogs/fragments/consolidate_filter_plugin.yml b/changelogs/fragments/consolidate_filter_plugin.yml new file mode 100644 index 0000000..e745cda --- /dev/null +++ b/changelogs/fragments/consolidate_filter_plugin.yml @@ -0,0 +1,5 @@ +--- +minor_changes: + - "'consolidate' filter plugin added." +trivial: + - Fix sanity issues and update black vesion. diff --git a/tests/integration/targets/utils_consolidate/tasks/main.yaml b/tests/integration/targets/utils_consolidate/tasks/main.yaml new file mode 100644 index 0000000..eb1a94b --- /dev/null +++ b/tests/integration/targets/utils_consolidate/tasks/main.yaml @@ -0,0 +1,14 @@ +--- +- name: Recursively find all test files + find: + file_type: file + paths: "{{ role_path }}/tasks" + recurse: false + use_regex: true + patterns: + - '^(?!_|main).+$' + delegate_to: localhost + register: found + +- include: "{{ item.path }}" + loop: "{{ found.files }}" diff --git a/tests/integration/targets/utils_consolidate/tasks/simple.yaml b/tests/integration/targets/utils_consolidate/tasks/simple.yaml new file mode 100644 index 0000000..150a422 --- /dev/null +++ b/tests/integration/targets/utils_consolidate/tasks/simple.yaml @@ -0,0 +1,134 @@ +--- +- name: Build the data structure + ansible.builtin.set_fact: + data_source: + - data: + [ + { "name": "Loopback888" }, + { "name": "Loopback999" }, + { "name": "GigabitEthernet0/0" }, + { "name": "GigabitEthernet0/1" }, + { "name": "GigabitEthernet0/2" }, + ] + match_key: name + prefix: acl_interfaces + - data: + [ + { + "description": "Configured by Ansible Team", + "enabled": False, + "name": "Loopback888", + }, + { + "description": "Configured by Ansible Team", + "enabled": False, + "name": "Loopback888", + }, + { "enabled": True, "name": "Loopback999" }, + { + "description": "Configured and Managed By Ansible Team", + "enabled": True, + "name": "GigabitEthernet0/0", + }, + { + "description": "This is a user template", + "enabled": True, + "name": "GigabitEthernet0/1", + }, + { + "description": "This is a user template", + "enabled": True, + "name": "GigabitEthernet0/2", + }, + ] + match_key: name + prefix: interfaces + - data: + [ + { "name": "GigabitEthernet0/0" }, + { + "mode": "access", + "name": "GigabitEthernet0/1", + "trunk": + { + "allowed_vlans": + [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ], + }, + }, + { + "mode": "trunk", + "name": "GigabitEthernet0/2", + "trunk": + { + "allowed_vlans": + [ + "11", + "12", + "59", + "67", + "75", + "77", + "81", + "100", + "400-408", + "411-413", + "415", + "418", + "982", + "986", + "988", + "993", + ], + "encapsulation": "dot1q", + }, + }, + ] + match_key: name + prefix: l2_interfaces + - data: + [ + { + "ipv4": [{ "address": "192.168.0.76/24" }], + "name": "GigabitEthernet0/0", + }, + { "name": "GigabitEthernet0/1" }, + { "name": "GigabitEthernet0/2" }, + { "name": "Loopback888" }, + { "name": "Loopback999" }, + ] + match_key: name + prefix: l3_interfaces + - data: + [ + { "no_name": True }, + { "name": "GigabitEthernet0/1" }, + { "name": "GigabitEthernet100/100" }, + ] + match_key: name + prefix: ospf_interfaces + +- name: Combine all the dictionaries based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" + +- name: Assert result dicts + assert: + that: + - combined == combined_facts diff --git a/tests/integration/targets/utils_consolidate/vars/main.yaml b/tests/integration/targets/utils_consolidate/vars/main.yaml new file mode 100644 index 0000000..239ab4d --- /dev/null +++ b/tests/integration/targets/utils_consolidate/vars/main.yaml @@ -0,0 +1,109 @@ +--- +combined_facts: + GigabitEthernet0/0: + acl_interfaces: + name: GigabitEthernet0/0 + interfaces: + description: Configured and Managed By Ansible Team + enabled: true + name: GigabitEthernet0/0 + l2_interfaces: + name: GigabitEthernet0/0 + l3_interfaces: + ipv4: + - address: 10.8.38.76/24 + name: GigabitEthernet0/0 + ospf_interfaces: {} + GigabitEthernet0/1: + acl_interfaces: + name: GigabitEthernet0/1 + interfaces: + description: This is a user template + enabled: true + name: GigabitEthernet0/1 + l2_interfaces: + mode: access + name: GigabitEthernet0/1 + trunk: + allowed_vlans: + - '11' + - '12' + - '59' + - '67' + - '75' + - '77' + - '81' + - '100' + - 400-408 + - 411-413 + - '415' + - '418' + - '982' + - '986' + - '988' + - '993' + l3_interfaces: + name: GigabitEthernet0/1 + ospf_interfaces: + name: GigabitEthernet0/1 + GigabitEthernet0/2: + acl_interfaces: + name: GigabitEthernet0/2 + interfaces: + description: This is a user template + enabled: true + name: GigabitEthernet0/2 + l2_interfaces: + mode: trunk + name: GigabitEthernet0/2 + trunk: + allowed_vlans: + - '11' + - '12' + - '59' + - '67' + - '75' + - '77' + - '81' + - '100' + - 400-408 + - 411-413 + - '415' + - '418' + - '982' + - '986' + - '988' + - '993' + encapsulation: dot1q + l3_interfaces: + name: GigabitEthernet0/2 + ospf_interfaces: {} + GigabitEthernet100/100: + acl_interfaces: {} + interfaces: {} + l2_interfaces: {} + l3_interfaces: {} + ospf_interfaces: + name: GigabitEthernet100/100 + Loopback888: + acl_interfaces: + name: Loopback888 + interfaces: + description: Configured by Ansible Team + enabled: false + name: Loopback888 + l2_interfaces: {} + l3_interfaces: + name: Loopback888 + ospf_interfaces: {} + Loopback999: + acl_interfaces: + name: Loopback999 + interfaces: + enabled: true + name: Loopback999 + l2_interfaces: {} + l3_interfaces: + name: Loopback999 + ospf_interfaces: {} + From 3f98e8ae7a57ad09b7c8e9792dd3075c34cb7ac6 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Tue, 5 Apr 2022 15:44:37 +0530 Subject: [PATCH 09/23] sanity and tox fixed --- docs/ansible.utils.consolidate_filter.rst | 736 +++++++++--------- plugins/filter/consolidate.py | 736 +++++++++--------- .../targets/utils_consolidate/vars/main.yaml | 1 - 3 files changed, 742 insertions(+), 731 deletions(-) diff --git a/docs/ansible.utils.consolidate_filter.rst b/docs/ansible.utils.consolidate_filter.rst index 3b97abb..01622da 100644 --- a/docs/ansible.utils.consolidate_filter.rst +++ b/docs/ansible.utils.consolidate_filter.rst @@ -185,9 +185,10 @@ Examples .. code-block:: yaml # Consolidated facts example - # ------------ + # -------------------------- ##facts.yml + interfaces: - name: GigabitEthernet0/0 enabled: true @@ -270,25 +271,25 @@ Examples - name: Loopback999 ##Playbook - vars_files: - - "facts.yml" - tasks: - - name: Build the facts collection - set_fact: - data_source: - - data: "{{ interfaces }}" - match_key: name - prefix: interfaces - - data: "{{ l2_interfaces }}" - match_key: name - prefix: l2_interfaces - - data: "{{ l3_interfaces }}" - match_key: name - prefix: l3_interfaces + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - name: Combine all the facts based on match_keys - set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" ##Output # ok: [localhost] => { @@ -431,7 +432,7 @@ Examples # } # Read vars_file 'facts.yml' - # TASK [Combine all the facts based on match_keys] **************************************************************************************************************** + # TASK [Combine all the facts based on match_keys] # ok: [localhost] => { # "ansible_facts": { # "combined": { @@ -653,171 +654,172 @@ Examples - name: Loopback999 ##Playbook - vars_files: - - "facts.yml" - tasks: - - name: Build the facts collection - set_fact: - data_source: - - data: "{{ interfaces }}" - match_key: name - prefix: interfaces - - data: "{{ l2_interfaces }}" - match_key: name - prefix: l2_interfaces - - data: "{{ l3_interfaces }}" - match_key: name - prefix: l3_interfaces + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - name: Combine all the facts based on match_keys - set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=True) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=True) }}" ##Output - ok: [localhost] => { - "ansible_facts": { - "data_source": [ - { - "data": [ - { - "duplex": "auto", - "enabled": true, - "name": "GigabitEthernet0/0", - "note": [ - "Connected green wire" - ], - "speed": "auto" - }, - { - "description": "Configured by Ansible - Interface 1", - "duplex": "auto", - "enabled": true, - "mtu": 1500, - "name": "GigabitEthernet0/1", - "note": [ - "Connected blue wire", - "Configured by Paul" - ], - "speed": "auto", - "vifs": [ - { - "comment": "Needs reconfiguration", - "description": "Eth1 - VIF 100", - "enabled": true, - "mtu": 400, - "vlan_id": 100 - }, - { - "description": "Eth1 - VIF 101", - "enabled": true, - "vlan_id": 101 - } - ] - }, - { - "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", - "enabled": false, - "mtu": 600, - "name": "GigabitEthernet0/2" - } - ], - "match_key": "name", - "prefix": "interfaces" - }, - { - "data": [ - { - "name": "GigabitEthernet0/0" - }, - { - "mode": "access", - "name": "GigabitEthernet0/1", - "trunk": { - "allowed_vlans": [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993" - ] - } - }, - { - "mode": "trunk", - "name": "GigabitEthernet0/2", - "trunk": { - "allowed_vlans": [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993" - ], - "encapsulation": "dot1q" - } - } - ], - "match_key": "name", - "prefix": "l2_interfaces" - }, - { - "data": [ - { - "ipv4": [ - { - "address": "192.168.0.2/24" - } - ], - "name": "GigabitEthernet0/0" - }, - { - "name": "GigabitEthernet0/1" - }, - { - "name": "GigabitEthernet0/2" - }, - { - "name": "Loopback888" - }, - { - "name": "Loopback999" - } - ], - "match_key": "name", - "prefix": "l3_interfaces" - } - ] - }, - "changed": false - } - Read vars_file 'facts.yml' + # ok: [localhost] => { + # "ansible_facts": { + # "data_source": [ + # { + # "data": [ + # { + # "duplex": "auto", + # "enabled": true, + # "name": "GigabitEthernet0/0", + # "note": [ + # "Connected green wire" + # ], + # "speed": "auto" + # }, + # { + # "description": "Configured by Ansible - Interface 1", + # "duplex": "auto", + # "enabled": true, + # "mtu": 1500, + # "name": "GigabitEthernet0/1", + # "note": [ + # "Connected blue wire", + # "Configured by Paul" + # ], + # "speed": "auto", + # "vifs": [ + # { + # "comment": "Needs reconfiguration", + # "description": "Eth1 - VIF 100", + # "enabled": true, + # "mtu": 400, + # "vlan_id": 100 + # }, + # { + # "description": "Eth1 - VIF 101", + # "enabled": true, + # "vlan_id": 101 + # } + # ] + # }, + # { + # "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + # "enabled": false, + # "mtu": 600, + # "name": "GigabitEthernet0/2" + # } + # ], + # "match_key": "name", + # "prefix": "interfaces" + # }, + # { + # "data": [ + # { + # "name": "GigabitEthernet0/0" + # }, + # { + # "mode": "access", + # "name": "GigabitEthernet0/1", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ] + # } + # }, + # { + # "mode": "trunk", + # "name": "GigabitEthernet0/2", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ], + # "encapsulation": "dot1q" + # } + # } + # ], + # "match_key": "name", + # "prefix": "l2_interfaces" + # }, + # { + # "data": [ + # { + # "ipv4": [ + # { + # "address": "192.168.0.2/24" + # } + # ], + # "name": "GigabitEthernet0/0" + # }, + # { + # "name": "GigabitEthernet0/1" + # }, + # { + # "name": "GigabitEthernet0/2" + # }, + # { + # "name": "Loopback888" + # }, + # { + # "name": "Loopback999" + # } + # ], + # "match_key": "name", + # "prefix": "l3_interfaces" + # } + # ] + # }, + # "changed": false + # } + # Read vars_file 'facts.yml' - TASK [Combine all the facts based on match_keys] **************************************************************************************************************** - fatal: [localhost]: FAILED! => { - "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' reported Missing match value Loopback999, Loopback888 in data source 0, Missing match value Loopback999, Loopback888 in data source 1" - } + # TASK [Combine all the facts based on match_keys] + # fatal: [localhost]: FAILED! => { + # "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' reported Missing match value Loopback999, + # Loopback888 in data source 0, Missing match value Loopback999, Loopback888 in data source 1" + # } # Failing on missing match keys # ----------------------------- @@ -905,25 +907,25 @@ Examples - inft_name: Loopback999 ##Playbook - vars_files: - - "facts.yml" - tasks: - - name: Build the facts collection - set_fact: - data_source: - - data: "{{ interfaces }}" - match_key: name - prefix: interfaces - - data: "{{ l2_interfaces }}" - match_key: name - prefix: l2_interfaces - - data: "{{ l3_interfaces }}" - match_key: name - prefix: l3_interfaces + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - name: Combine all the facts based on match_keys - set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_key=True) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_key=True) }}" ##Output # ok: [localhost] => { @@ -1066,9 +1068,13 @@ Examples # } # Read vars_file 'facts.yml' - # TASK [Combine all the facts based on match_keys] **************************************************************************************************************** + # TASK [Combine all the facts based on match_keys] # fatal: [localhost]: FAILED! => { - # "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match key 'name' in data source 2 in list entry 0, Missing match key 'name' in data source 2 in list entry 1, Missing match key 'name' in data source 2 in list entry 2, Missing match key 'name' in data source 2 in list entry 3, Missing match key 'name' in data source 2 in list entry 4" + # "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match + # key 'name' in data source 2 in list entry 0, Missing match key 'name' in data + # source 2 in list entry 1, Missing match key 'name' in data source 2 in list + # entry 2, Missing match key 'name' in data source 2 in list entry 3, Missing + # match key 'name' in data source 2 in list entry 4" # } # Failing on duplicate values in facts @@ -1158,174 +1164,174 @@ Examples - name: Loopback999 ##Playbook - vars_files: - - "facts.yml" - tasks: - - name: Build the facts collection - set_fact: - data_source: - - data: "{{ interfaces }}" - match_key: name - prefix: interfaces - - data: "{{ l2_interfaces }}" - match_key: name - prefix: l2_interfaces - - data: "{{ l3_interfaces }}" - match_key: name - prefix: l3_interfaces + vars_files: + - "facts.yml" + tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - name: Combine all the facts based on match_keys - set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_duplicate=True) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_duplicate=True) }}" ##Output - ok: [localhost] => { - "ansible_facts": { - "data_source": [ - { - "data": [ - { - "duplex": "auto", - "enabled": true, - "name": "GigabitEthernet0/0", - "note": [ - "Connected green wire" - ], - "speed": "auto" - }, - { - "description": "Configured by Ansible - Interface 1", - "duplex": "auto", - "enabled": true, - "mtu": 1500, - "name": "GigabitEthernet0/1", - "note": [ - "Connected blue wire", - "Configured by Paul" - ], - "speed": "auto", - "vifs": [ - { - "comment": "Needs reconfiguration", - "description": "Eth1 - VIF 100", - "enabled": true, - "mtu": 400, - "vlan_id": 100 - }, - { - "description": "Eth1 - VIF 101", - "enabled": true, - "vlan_id": 101 - } - ] - }, - { - "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", - "enabled": false, - "mtu": 600, - "name": "GigabitEthernet0/2" - } - ], - "match_key": "name", - "prefix": "interfaces" - }, - { - "data": [ - { - "name": "GigabitEthernet0/0" - }, - { - "name": "GigabitEthernet0/0" - }, - { - "mode": "access", - "name": "GigabitEthernet0/1", - "trunk": { - "allowed_vlans": [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993" - ] - } - }, - { - "mode": "trunk", - "name": "GigabitEthernet0/2", - "trunk": { - "allowed_vlans": [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993" - ], - "encapsulation": "dot1q" - } - } - ], - "match_key": "name", - "prefix": "l2_interfaces" - }, - { - "data": [ - { - "ipv4": [ - { - "address": "192.168.0.2/24" - } - ], - "name": "GigabitEthernet0/0" - }, - { - "name": "GigabitEthernet0/1" - }, - { - "name": "GigabitEthernet0/2" - }, - { - "name": "Loopback888" - }, - { - "name": "Loopback999" - } - ], - "match_key": "name", - "prefix": "l3_interfaces" - } - ] - }, - "changed": false - } - Read vars_file 'facts.yml' + # ok: [localhost] => { + # "ansible_facts": { + # "data_source": [ + # { + # "data": [ + # { + # "duplex": "auto", + # "enabled": true, + # "name": "GigabitEthernet0/0", + # "note": [ + # "Connected green wire" + # ], + # "speed": "auto" + # }, + # { + # "description": "Configured by Ansible - Interface 1", + # "duplex": "auto", + # "enabled": true, + # "mtu": 1500, + # "name": "GigabitEthernet0/1", + # "note": [ + # "Connected blue wire", + # "Configured by Paul" + # ], + # "speed": "auto", + # "vifs": [ + # { + # "comment": "Needs reconfiguration", + # "description": "Eth1 - VIF 100", + # "enabled": true, + # "mtu": 400, + # "vlan_id": 100 + # }, + # { + # "description": "Eth1 - VIF 101", + # "enabled": true, + # "vlan_id": 101 + # } + # ] + # }, + # { + # "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", + # "enabled": false, + # "mtu": 600, + # "name": "GigabitEthernet0/2" + # } + # ], + # "match_key": "name", + # "prefix": "interfaces" + # }, + # { + # "data": [ + # { + # "name": "GigabitEthernet0/0" + # }, + # { + # "name": "GigabitEthernet0/0" + # }, + # { + # "mode": "access", + # "name": "GigabitEthernet0/1", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ] + # } + # }, + # { + # "mode": "trunk", + # "name": "GigabitEthernet0/2", + # "trunk": { + # "allowed_vlans": [ + # "11", + # "12", + # "59", + # "67", + # "75", + # "77", + # "81", + # "100", + # "400-408", + # "411-413", + # "415", + # "418", + # "982", + # "986", + # "988", + # "993" + # ], + # "encapsulation": "dot1q" + # } + # } + # ], + # "match_key": "name", + # "prefix": "l2_interfaces" + # }, + # { + # "data": [ + # { + # "ipv4": [ + # { + # "address": "192.168.0.2/24" + # } + # ], + # "name": "GigabitEthernet0/0" + # }, + # { + # "name": "GigabitEthernet0/1" + # }, + # { + # "name": "GigabitEthernet0/2" + # }, + # { + # "name": "Loopback888" + # }, + # { + # "name": "Loopback999" + # } + # ], + # "match_key": "name", + # "prefix": "l3_interfaces" + # } + # ] + # }, + # "changed": false + # } + # Read vars_file 'facts.yml' - TASK [Combine all the facts based on match_keys] **************************************************************************************************************** - fatal: [localhost]: FAILED! => { - "msg": "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 1" - } + # TASK [Combine all the facts based on match_keys] + # fatal: [localhost]: FAILED! => { + # "msg": "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 1" + # } diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index 9e29cbd..975059f 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -52,9 +52,10 @@ DOCUMENTATION = """ EXAMPLES = r""" # Consolidated facts example -# ------------ +# -------------------------- ##facts.yml + interfaces: - name: GigabitEthernet0/0 enabled: true @@ -137,25 +138,25 @@ l3_interfaces: - name: Loopback999 ##Playbook - vars_files: - - "facts.yml" - tasks: - - name: Build the facts collection - set_fact: - data_source: - - data: "{{ interfaces }}" - match_key: name - prefix: interfaces - - data: "{{ l2_interfaces }}" - match_key: name - prefix: l2_interfaces - - data: "{{ l3_interfaces }}" - match_key: name - prefix: l3_interfaces +vars_files: + - "facts.yml" +tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - name: Combine all the facts based on match_keys - set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" ##Output # ok: [localhost] => { @@ -298,7 +299,7 @@ l3_interfaces: # } # Read vars_file 'facts.yml' -# TASK [Combine all the facts based on match_keys] **************************************************************************************************************** +# TASK [Combine all the facts based on match_keys] # ok: [localhost] => { # "ansible_facts": { # "combined": { @@ -520,171 +521,172 @@ l3_interfaces: - name: Loopback999 ##Playbook - vars_files: - - "facts.yml" - tasks: - - name: Build the facts collection - set_fact: - data_source: - - data: "{{ interfaces }}" - match_key: name - prefix: interfaces - - data: "{{ l2_interfaces }}" - match_key: name - prefix: l2_interfaces - - data: "{{ l3_interfaces }}" - match_key: name - prefix: l3_interfaces +vars_files: + - "facts.yml" +tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - name: Combine all the facts based on match_keys - set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=True) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=True) }}" ##Output -ok: [localhost] => { - "ansible_facts": { - "data_source": [ - { - "data": [ - { - "duplex": "auto", - "enabled": true, - "name": "GigabitEthernet0/0", - "note": [ - "Connected green wire" - ], - "speed": "auto" - }, - { - "description": "Configured by Ansible - Interface 1", - "duplex": "auto", - "enabled": true, - "mtu": 1500, - "name": "GigabitEthernet0/1", - "note": [ - "Connected blue wire", - "Configured by Paul" - ], - "speed": "auto", - "vifs": [ - { - "comment": "Needs reconfiguration", - "description": "Eth1 - VIF 100", - "enabled": true, - "mtu": 400, - "vlan_id": 100 - }, - { - "description": "Eth1 - VIF 101", - "enabled": true, - "vlan_id": 101 - } - ] - }, - { - "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", - "enabled": false, - "mtu": 600, - "name": "GigabitEthernet0/2" - } - ], - "match_key": "name", - "prefix": "interfaces" - }, - { - "data": [ - { - "name": "GigabitEthernet0/0" - }, - { - "mode": "access", - "name": "GigabitEthernet0/1", - "trunk": { - "allowed_vlans": [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993" - ] - } - }, - { - "mode": "trunk", - "name": "GigabitEthernet0/2", - "trunk": { - "allowed_vlans": [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993" - ], - "encapsulation": "dot1q" - } - } - ], - "match_key": "name", - "prefix": "l2_interfaces" - }, - { - "data": [ - { - "ipv4": [ - { - "address": "192.168.0.2/24" - } - ], - "name": "GigabitEthernet0/0" - }, - { - "name": "GigabitEthernet0/1" - }, - { - "name": "GigabitEthernet0/2" - }, - { - "name": "Loopback888" - }, - { - "name": "Loopback999" - } - ], - "match_key": "name", - "prefix": "l3_interfaces" - } - ] - }, - "changed": false -} -Read vars_file 'facts.yml' +# ok: [localhost] => { +# "ansible_facts": { +# "data_source": [ +# { +# "data": [ +# { +# "duplex": "auto", +# "enabled": true, +# "name": "GigabitEthernet0/0", +# "note": [ +# "Connected green wire" +# ], +# "speed": "auto" +# }, +# { +# "description": "Configured by Ansible - Interface 1", +# "duplex": "auto", +# "enabled": true, +# "mtu": 1500, +# "name": "GigabitEthernet0/1", +# "note": [ +# "Connected blue wire", +# "Configured by Paul" +# ], +# "speed": "auto", +# "vifs": [ +# { +# "comment": "Needs reconfiguration", +# "description": "Eth1 - VIF 100", +# "enabled": true, +# "mtu": 400, +# "vlan_id": 100 +# }, +# { +# "description": "Eth1 - VIF 101", +# "enabled": true, +# "vlan_id": 101 +# } +# ] +# }, +# { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "enabled": false, +# "mtu": 600, +# "name": "GigabitEthernet0/2" +# } +# ], +# "match_key": "name", +# "prefix": "interfaces" +# }, +# { +# "data": [ +# { +# "name": "GigabitEthernet0/0" +# }, +# { +# "mode": "access", +# "name": "GigabitEthernet0/1", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ] +# } +# }, +# { +# "mode": "trunk", +# "name": "GigabitEthernet0/2", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ], +# "encapsulation": "dot1q" +# } +# } +# ], +# "match_key": "name", +# "prefix": "l2_interfaces" +# }, +# { +# "data": [ +# { +# "ipv4": [ +# { +# "address": "192.168.0.2/24" +# } +# ], +# "name": "GigabitEthernet0/0" +# }, +# { +# "name": "GigabitEthernet0/1" +# }, +# { +# "name": "GigabitEthernet0/2" +# }, +# { +# "name": "Loopback888" +# }, +# { +# "name": "Loopback999" +# } +# ], +# "match_key": "name", +# "prefix": "l3_interfaces" +# } +# ] +# }, +# "changed": false +# } +# Read vars_file 'facts.yml' -TASK [Combine all the facts based on match_keys] **************************************************************************************************************** -fatal: [localhost]: FAILED! => { - "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' reported Missing match value Loopback999, Loopback888 in data source 0, Missing match value Loopback999, Loopback888 in data source 1" -} +# TASK [Combine all the facts based on match_keys] +# fatal: [localhost]: FAILED! => { +# "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' reported Missing match value Loopback999, +# Loopback888 in data source 0, Missing match value Loopback999, Loopback888 in data source 1" +# } # Failing on missing match keys # ----------------------------- @@ -772,25 +774,25 @@ l3_interfaces: - inft_name: Loopback999 ##Playbook - vars_files: - - "facts.yml" - tasks: - - name: Build the facts collection - set_fact: - data_source: - - data: "{{ interfaces }}" - match_key: name - prefix: interfaces - - data: "{{ l2_interfaces }}" - match_key: name - prefix: l2_interfaces - - data: "{{ l3_interfaces }}" - match_key: name - prefix: l3_interfaces +vars_files: + - "facts.yml" +tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - name: Combine all the facts based on match_keys - set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_key=True) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_key=True) }}" ##Output # ok: [localhost] => { @@ -933,9 +935,13 @@ l3_interfaces: # } # Read vars_file 'facts.yml' -# TASK [Combine all the facts based on match_keys] **************************************************************************************************************** +# TASK [Combine all the facts based on match_keys] # fatal: [localhost]: FAILED! => { -# "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match key 'name' in data source 2 in list entry 0, Missing match key 'name' in data source 2 in list entry 1, Missing match key 'name' in data source 2 in list entry 2, Missing match key 'name' in data source 2 in list entry 3, Missing match key 'name' in data source 2 in list entry 4" +# "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match +# key 'name' in data source 2 in list entry 0, Missing match key 'name' in data +# source 2 in list entry 1, Missing match key 'name' in data source 2 in list +# entry 2, Missing match key 'name' in data source 2 in list entry 3, Missing +# match key 'name' in data source 2 in list entry 4" # } # Failing on duplicate values in facts @@ -1025,174 +1031,174 @@ l3_interfaces: - name: Loopback999 ##Playbook - vars_files: - - "facts.yml" - tasks: - - name: Build the facts collection - set_fact: - data_source: - - data: "{{ interfaces }}" - match_key: name - prefix: interfaces - - data: "{{ l2_interfaces }}" - match_key: name - prefix: l2_interfaces - - data: "{{ l3_interfaces }}" - match_key: name - prefix: l3_interfaces +vars_files: + - "facts.yml" +tasks: + - name: Build the facts collection + set_fact: + data_source: + - data: "{{ interfaces }}" + match_key: name + prefix: interfaces + - data: "{{ l2_interfaces }}" + match_key: name + prefix: l2_interfaces + - data: "{{ l3_interfaces }}" + match_key: name + prefix: l3_interfaces - - name: Combine all the facts based on match_keys - set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_duplicate=True) }}" + - name: Combine all the facts based on match_keys + set_fact: + combined: "{{ data_source|ansible.utils.consolidate(fail_duplicate=True) }}" ##Output -ok: [localhost] => { - "ansible_facts": { - "data_source": [ - { - "data": [ - { - "duplex": "auto", - "enabled": true, - "name": "GigabitEthernet0/0", - "note": [ - "Connected green wire" - ], - "speed": "auto" - }, - { - "description": "Configured by Ansible - Interface 1", - "duplex": "auto", - "enabled": true, - "mtu": 1500, - "name": "GigabitEthernet0/1", - "note": [ - "Connected blue wire", - "Configured by Paul" - ], - "speed": "auto", - "vifs": [ - { - "comment": "Needs reconfiguration", - "description": "Eth1 - VIF 100", - "enabled": true, - "mtu": 400, - "vlan_id": 100 - }, - { - "description": "Eth1 - VIF 101", - "enabled": true, - "vlan_id": 101 - } - ] - }, - { - "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", - "enabled": false, - "mtu": 600, - "name": "GigabitEthernet0/2" - } - ], - "match_key": "name", - "prefix": "interfaces" - }, - { - "data": [ - { - "name": "GigabitEthernet0/0" - }, - { - "name": "GigabitEthernet0/0" - }, - { - "mode": "access", - "name": "GigabitEthernet0/1", - "trunk": { - "allowed_vlans": [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993" - ] - } - }, - { - "mode": "trunk", - "name": "GigabitEthernet0/2", - "trunk": { - "allowed_vlans": [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993" - ], - "encapsulation": "dot1q" - } - } - ], - "match_key": "name", - "prefix": "l2_interfaces" - }, - { - "data": [ - { - "ipv4": [ - { - "address": "192.168.0.2/24" - } - ], - "name": "GigabitEthernet0/0" - }, - { - "name": "GigabitEthernet0/1" - }, - { - "name": "GigabitEthernet0/2" - }, - { - "name": "Loopback888" - }, - { - "name": "Loopback999" - } - ], - "match_key": "name", - "prefix": "l3_interfaces" - } - ] - }, - "changed": false -} -Read vars_file 'facts.yml' +# ok: [localhost] => { +# "ansible_facts": { +# "data_source": [ +# { +# "data": [ +# { +# "duplex": "auto", +# "enabled": true, +# "name": "GigabitEthernet0/0", +# "note": [ +# "Connected green wire" +# ], +# "speed": "auto" +# }, +# { +# "description": "Configured by Ansible - Interface 1", +# "duplex": "auto", +# "enabled": true, +# "mtu": 1500, +# "name": "GigabitEthernet0/1", +# "note": [ +# "Connected blue wire", +# "Configured by Paul" +# ], +# "speed": "auto", +# "vifs": [ +# { +# "comment": "Needs reconfiguration", +# "description": "Eth1 - VIF 100", +# "enabled": true, +# "mtu": 400, +# "vlan_id": 100 +# }, +# { +# "description": "Eth1 - VIF 101", +# "enabled": true, +# "vlan_id": 101 +# } +# ] +# }, +# { +# "description": "Configured by Ansible - Interface 2 (ADMIN DOWN)", +# "enabled": false, +# "mtu": 600, +# "name": "GigabitEthernet0/2" +# } +# ], +# "match_key": "name", +# "prefix": "interfaces" +# }, +# { +# "data": [ +# { +# "name": "GigabitEthernet0/0" +# }, +# { +# "name": "GigabitEthernet0/0" +# }, +# { +# "mode": "access", +# "name": "GigabitEthernet0/1", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ] +# } +# }, +# { +# "mode": "trunk", +# "name": "GigabitEthernet0/2", +# "trunk": { +# "allowed_vlans": [ +# "11", +# "12", +# "59", +# "67", +# "75", +# "77", +# "81", +# "100", +# "400-408", +# "411-413", +# "415", +# "418", +# "982", +# "986", +# "988", +# "993" +# ], +# "encapsulation": "dot1q" +# } +# } +# ], +# "match_key": "name", +# "prefix": "l2_interfaces" +# }, +# { +# "data": [ +# { +# "ipv4": [ +# { +# "address": "192.168.0.2/24" +# } +# ], +# "name": "GigabitEthernet0/0" +# }, +# { +# "name": "GigabitEthernet0/1" +# }, +# { +# "name": "GigabitEthernet0/2" +# }, +# { +# "name": "Loopback888" +# }, +# { +# "name": "Loopback999" +# } +# ], +# "match_key": "name", +# "prefix": "l3_interfaces" +# } +# ] +# }, +# "changed": false +# } +# Read vars_file 'facts.yml' -TASK [Combine all the facts based on match_keys] **************************************************************************************************************** -fatal: [localhost]: FAILED! => { - "msg": "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 1" -} +# TASK [Combine all the facts based on match_keys] +# fatal: [localhost]: FAILED! => { +# "msg": "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 1" +# } """ from ansible.errors import AnsibleFilterError diff --git a/tests/integration/targets/utils_consolidate/vars/main.yaml b/tests/integration/targets/utils_consolidate/vars/main.yaml index 239ab4d..79a684b 100644 --- a/tests/integration/targets/utils_consolidate/vars/main.yaml +++ b/tests/integration/targets/utils_consolidate/vars/main.yaml @@ -106,4 +106,3 @@ combined_facts: l3_interfaces: name: Loopback999 ospf_interfaces: {} - From 39ab9aeecd2b7a1077df23c4adc57e2d70268756 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Tue, 5 Apr 2022 15:45:50 +0530 Subject: [PATCH 10/23] changelog spelling corrected --- changelogs/fragments/consolidate_filter_plugin.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/consolidate_filter_plugin.yml b/changelogs/fragments/consolidate_filter_plugin.yml index e745cda..e57e811 100644 --- a/changelogs/fragments/consolidate_filter_plugin.yml +++ b/changelogs/fragments/consolidate_filter_plugin.yml @@ -2,4 +2,4 @@ minor_changes: - "'consolidate' filter plugin added." trivial: - - Fix sanity issues and update black vesion. + - Fix sanity issues and update black version. From ae4bfa421c6069c4bee89e38d648fb004e9cf63e Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Wed, 6 Apr 2022 11:20:16 +0530 Subject: [PATCH 11/23] remove strings --- plugins/plugin_utils/consolidate.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py index b549cba..30ea170 100644 --- a/plugins/plugin_utils/consolidate.py +++ b/plugins/plugin_utils/consolidate.py @@ -26,7 +26,9 @@ def _raise_error(filter, msg): Raises: AnsibleFilterError: AnsibleError with filter name and message """ - error = f"Error when using plugin 'consolidate': '{filter}' reported {msg}" + error = "Error when using plugin 'consolidate': '{filter}' reported {msg}".format( + filter=filter, msg=msg + ) raise AnsibleFilterError(error) @@ -83,13 +85,17 @@ def check_missing_match_key_duplicate( except KeyError: if fail_missing_match_key: errors_match_key.append( - f"Missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}" + "Missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}".format( + match_key=match_key, ds_idx=ds_idx, dd_idx=dd_idx + ) ) continue if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate: errors_duplicate.append( - f"Duplicate values in data source {ds_idx}" + "Duplicate values in data source {ds_idx}".format( + ds_idx=ds_idx + ) ) results.append(set(ds_values)) return results, { @@ -114,8 +120,11 @@ def check_missing_match_values(matched_keys, fail_missing_match_value): for ds_idx, ds_values in enumerate(matched_keys): missing_match = all_values - ds_values if missing_match: + m_matches = ", ".join(missing_match) errors_match_values.append( - f"Missing match value {', '.join(missing_match)} in data source {ds_idx}" + "Missing match value {m_matches} in data source {ds_idx}".format( + ds_idx=ds_idx, m_matches=m_matches + ) ) return all_values, {"match_val_err": errors_match_values} From e7d50b8ce607dc61bea79a2cab762760b7e34a2c Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Wed, 6 Apr 2022 13:48:47 +0530 Subject: [PATCH 12/23] fix intergration tests --- .../utils_consolidate/tasks/simple.yaml | 101 +----------------- .../targets/utils_consolidate/vars/main.yaml | 92 ---------------- 2 files changed, 1 insertion(+), 192 deletions(-) diff --git a/tests/integration/targets/utils_consolidate/tasks/simple.yaml b/tests/integration/targets/utils_consolidate/tasks/simple.yaml index 150a422..1bb0b2f 100644 --- a/tests/integration/targets/utils_consolidate/tasks/simple.yaml +++ b/tests/integration/targets/utils_consolidate/tasks/simple.yaml @@ -4,9 +4,6 @@ data_source: - data: [ - { "name": "Loopback888" }, - { "name": "Loopback999" }, - { "name": "GigabitEthernet0/0" }, { "name": "GigabitEthernet0/1" }, { "name": "GigabitEthernet0/2" }, ] @@ -14,22 +11,6 @@ prefix: acl_interfaces - data: [ - { - "description": "Configured by Ansible Team", - "enabled": False, - "name": "Loopback888", - }, - { - "description": "Configured by Ansible Team", - "enabled": False, - "name": "Loopback888", - }, - { "enabled": True, "name": "Loopback999" }, - { - "description": "Configured and Managed By Ansible Team", - "enabled": True, - "name": "GigabitEthernet0/0", - }, { "description": "This is a user template", "enabled": True, @@ -43,89 +24,9 @@ ] match_key: name prefix: interfaces - - data: - [ - { "name": "GigabitEthernet0/0" }, - { - "mode": "access", - "name": "GigabitEthernet0/1", - "trunk": - { - "allowed_vlans": - [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993", - ], - }, - }, - { - "mode": "trunk", - "name": "GigabitEthernet0/2", - "trunk": - { - "allowed_vlans": - [ - "11", - "12", - "59", - "67", - "75", - "77", - "81", - "100", - "400-408", - "411-413", - "415", - "418", - "982", - "986", - "988", - "993", - ], - "encapsulation": "dot1q", - }, - }, - ] - match_key: name - prefix: l2_interfaces - - data: - [ - { - "ipv4": [{ "address": "192.168.0.76/24" }], - "name": "GigabitEthernet0/0", - }, - { "name": "GigabitEthernet0/1" }, - { "name": "GigabitEthernet0/2" }, - { "name": "Loopback888" }, - { "name": "Loopback999" }, - ] - match_key: name - prefix: l3_interfaces - - data: - [ - { "no_name": True }, - { "name": "GigabitEthernet0/1" }, - { "name": "GigabitEthernet100/100" }, - ] - match_key: name - prefix: ospf_interfaces - name: Combine all the dictionaries based on match_keys - set_fact: + ansible.builtin.set_fact: combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" - name: Assert result dicts diff --git a/tests/integration/targets/utils_consolidate/vars/main.yaml b/tests/integration/targets/utils_consolidate/vars/main.yaml index 79a684b..6952326 100644 --- a/tests/integration/targets/utils_consolidate/vars/main.yaml +++ b/tests/integration/targets/utils_consolidate/vars/main.yaml @@ -1,19 +1,5 @@ --- combined_facts: - GigabitEthernet0/0: - acl_interfaces: - name: GigabitEthernet0/0 - interfaces: - description: Configured and Managed By Ansible Team - enabled: true - name: GigabitEthernet0/0 - l2_interfaces: - name: GigabitEthernet0/0 - l3_interfaces: - ipv4: - - address: 10.8.38.76/24 - name: GigabitEthernet0/0 - ospf_interfaces: {} GigabitEthernet0/1: acl_interfaces: name: GigabitEthernet0/1 @@ -21,31 +7,6 @@ combined_facts: description: This is a user template enabled: true name: GigabitEthernet0/1 - l2_interfaces: - mode: access - name: GigabitEthernet0/1 - trunk: - allowed_vlans: - - '11' - - '12' - - '59' - - '67' - - '75' - - '77' - - '81' - - '100' - - 400-408 - - 411-413 - - '415' - - '418' - - '982' - - '986' - - '988' - - '993' - l3_interfaces: - name: GigabitEthernet0/1 - ospf_interfaces: - name: GigabitEthernet0/1 GigabitEthernet0/2: acl_interfaces: name: GigabitEthernet0/2 @@ -53,56 +14,3 @@ combined_facts: description: This is a user template enabled: true name: GigabitEthernet0/2 - l2_interfaces: - mode: trunk - name: GigabitEthernet0/2 - trunk: - allowed_vlans: - - '11' - - '12' - - '59' - - '67' - - '75' - - '77' - - '81' - - '100' - - 400-408 - - 411-413 - - '415' - - '418' - - '982' - - '986' - - '988' - - '993' - encapsulation: dot1q - l3_interfaces: - name: GigabitEthernet0/2 - ospf_interfaces: {} - GigabitEthernet100/100: - acl_interfaces: {} - interfaces: {} - l2_interfaces: {} - l3_interfaces: {} - ospf_interfaces: - name: GigabitEthernet100/100 - Loopback888: - acl_interfaces: - name: Loopback888 - interfaces: - description: Configured by Ansible Team - enabled: false - name: Loopback888 - l2_interfaces: {} - l3_interfaces: - name: Loopback888 - ospf_interfaces: {} - Loopback999: - acl_interfaces: - name: Loopback999 - interfaces: - enabled: true - name: Loopback999 - l2_interfaces: {} - l3_interfaces: - name: Loopback999 - ospf_interfaces: {} From f3e851d54408739ba99929ce84d9974b046841a4 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Wed, 6 Apr 2022 19:19:48 +0530 Subject: [PATCH 13/23] update review 1 --- docs/ansible.utils.consolidate_filter.rst | 4 ++++ plugins/filter/consolidate.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/docs/ansible.utils.consolidate_filter.rst b/docs/ansible.utils.consolidate_filter.rst index 01622da..709ca51 100644 --- a/docs/ansible.utils.consolidate_filter.rst +++ b/docs/ansible.utils.consolidate_filter.rst @@ -44,6 +44,7 @@ Parameters
list / elements=dictionary + / required
@@ -63,6 +64,7 @@ Parameters
raw + / required
@@ -81,6 +83,7 @@ Parameters
string + / required
@@ -99,6 +102,7 @@ Parameters
string + / required
diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index 975059f..7c6c929 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -28,16 +28,20 @@ DOCUMENTATION = """ - For example C(facts_source|ansible.utils.consolidate(fail_missing_match_key=False))), in this case C(facts_source) represents this option. type: list elements: dict + required: True suboptions: data: description: Specify facts data that gets consolidated. type: raw + required: True match_key: description: Specify key to match on. type: str + required: True prefix: description: Specify the prefix with which the result set be created. type: str + required: True fail_missing_match_key: description: Fail if match_key is not found in a specific data set. type: bool From 89efc29ab4ce199305d3ca692ead6f04a6194fb0 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Wed, 6 Apr 2022 19:25:56 +0530 Subject: [PATCH 14/23] update added version --- docs/ansible.utils.consolidate_filter.rst | 2 +- plugins/filter/consolidate.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/ansible.utils.consolidate_filter.rst b/docs/ansible.utils.consolidate_filter.rst index 709ca51..d1a588a 100644 --- a/docs/ansible.utils.consolidate_filter.rst +++ b/docs/ansible.utils.consolidate_filter.rst @@ -8,7 +8,7 @@ ansible.utils.consolidate **Consolidate facts together on common attributes.** -Version added: 2.5.2 +Version added: 2.6.0 .. contents:: :local: diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index 7c6c929..1219b18 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -15,7 +15,7 @@ __metaclass__ = type DOCUMENTATION = """ name: consolidate author: Sagar Paul (@KB-perByte) - version_added: "2.5.2" + version_added: "2.6.0" short_description: Consolidate facts together on common attributes. description: - This plugin presents collective structured data including all supplied facts grouping on common attributes mentioned. From 14282909b4f6c54d916cd92ea818ce93e10e9527 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 7 Apr 2022 21:28:56 +0000 Subject: [PATCH 15/23] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a4b529c..aa4e7d1 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ The Ansible ``ansible.utils`` collection includes a variety of plugins that aid This collection has been tested against following Ansible versions: **>=2.9.10**. -For collections that support Ansible 2.9, please ensure you update your `network_os` to use the -fully qualified collection name (for example, `cisco.ios.ios`). +For collections that support Ansible 2.9, please ensure you update your `network_os` to use the +fully qualified collection name (for example, `cisco.ios.ios`). Plugins and modules within a collection may be tested with only specific Ansible versions. A collection may contain metadata that identifies these versions. PEP440 is the schema used to describe the versions of Ansible. From 595c9ee38fb58feb0902eb13b630e49a66e12b35 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Fri, 8 Apr 2022 23:31:45 +0530 Subject: [PATCH 16/23] review pt 1 --- README.md | 4 +- .../fragments/consolidate_filter_plugin.yml | 2 - docs/ansible.utils.consolidate_filter.rst | 90 +++++++++---------- plugins/filter/consolidate.py | 89 +++++++++--------- plugins/plugin_utils/consolidate.py | 24 ++--- .../utils_consolidate/tasks/simple.yaml | 8 +- tests/unit/plugins/filter/test_consolidate.py | 34 +++---- 7 files changed, 128 insertions(+), 123 deletions(-) diff --git a/README.md b/README.md index aa4e7d1..a4b529c 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ The Ansible ``ansible.utils`` collection includes a variety of plugins that aid This collection has been tested against following Ansible versions: **>=2.9.10**. -For collections that support Ansible 2.9, please ensure you update your `network_os` to use the -fully qualified collection name (for example, `cisco.ios.ios`). +For collections that support Ansible 2.9, please ensure you update your `network_os` to use the +fully qualified collection name (for example, `cisco.ios.ios`). Plugins and modules within a collection may be tested with only specific Ansible versions. A collection may contain metadata that identifies these versions. PEP440 is the schema used to describe the versions of Ansible. diff --git a/changelogs/fragments/consolidate_filter_plugin.yml b/changelogs/fragments/consolidate_filter_plugin.yml index e57e811..66e016c 100644 --- a/changelogs/fragments/consolidate_filter_plugin.yml +++ b/changelogs/fragments/consolidate_filter_plugin.yml @@ -1,5 +1,3 @@ --- minor_changes: - "'consolidate' filter plugin added." -trivial: - - Fix sanity issues and update black version. diff --git a/docs/ansible.utils.consolidate_filter.rst b/docs/ansible.utils.consolidate_filter.rst index d1a588a..1b4eb22 100644 --- a/docs/ansible.utils.consolidate_filter.rst +++ b/docs/ansible.utils.consolidate_filter.rst @@ -19,7 +19,7 @@ Synopsis -------- - This plugin presents collective structured data including all supplied facts grouping on common attributes mentioned. - All other boolean parameter defaults to False unless parameters is explicitly mentioned. -- Using the parameters below- ``data_source|ansible.utils.consolidate(fail_missing_match_key=False``)) +- Using the parameters below- ``data_sources|ansible.utils.consolidate(fail_missing_match_key=False``)) @@ -39,7 +39,7 @@ Parameters
- data_source + data_sources
list @@ -98,7 +98,7 @@ Parameters
- prefix + name
string @@ -110,7 +110,7 @@ Parameters -
Specify the prefix with which the result set be created.
+
Specify the name with which the result set be created.
@@ -126,13 +126,13 @@ Parameters
    Choices:
  • no
  • -
  • yes
  • +
  • yes ←
-
Fail if duplicate values for any key is found.
+
Fail if the match key's value exists more than once in a given data set.
@@ -147,7 +147,7 @@ Parameters
    Choices:
  • no
  • -
  • yes
  • +
  • yes ←
@@ -168,13 +168,13 @@ Parameters
    Choices:
  • no
  • -
  • yes
  • +
  • yes ←
-
Fail if a keys to match in not same accross all data sets.
+
Fail if the match key's value is not found in every data source.
@@ -280,25 +280,25 @@ Examples tasks: - name: Build the facts collection set_fact: - data_source: + data_sources: - data: "{{ interfaces }}" match_key: name - prefix: interfaces + name: interfaces - data: "{{ l2_interfaces }}" match_key: name - prefix: l2_interfaces + name: l2_interfaces - data: "{{ l3_interfaces }}" match_key: name - prefix: l3_interfaces + name: l3_interfaces - name: Combine all the facts based on match_keys set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=False) }}" ##Output # ok: [localhost] => { # "ansible_facts": { - # "data_source": [ + # "data_sources": [ # { # "data": [ # { @@ -344,7 +344,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "interfaces" + # "name": "interfaces" # }, # { # "data": [ @@ -402,7 +402,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "l2_interfaces" + # "name": "l2_interfaces" # }, # { # "data": [ @@ -428,7 +428,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "l3_interfaces" + # "name": "l3_interfaces" # } # ] # }, @@ -663,25 +663,25 @@ Examples tasks: - name: Build the facts collection set_fact: - data_source: + data_sources: - data: "{{ interfaces }}" match_key: name - prefix: interfaces + name: interfaces - data: "{{ l2_interfaces }}" match_key: name - prefix: l2_interfaces + name: l2_interfaces - data: "{{ l3_interfaces }}" match_key: name - prefix: l3_interfaces + name: l3_interfaces - name: Combine all the facts based on match_keys set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=True) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}" ##Output # ok: [localhost] => { # "ansible_facts": { - # "data_source": [ + # "data_sources": [ # { # "data": [ # { @@ -727,7 +727,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "interfaces" + # "name": "interfaces" # }, # { # "data": [ @@ -785,7 +785,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "l2_interfaces" + # "name": "l2_interfaces" # }, # { # "data": [ @@ -811,7 +811,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "l3_interfaces" + # "name": "l3_interfaces" # } # ] # }, @@ -916,25 +916,25 @@ Examples tasks: - name: Build the facts collection set_fact: - data_source: + data_sources: - data: "{{ interfaces }}" match_key: name - prefix: interfaces + name: interfaces - data: "{{ l2_interfaces }}" match_key: name - prefix: l2_interfaces + name: l2_interfaces - data: "{{ l3_interfaces }}" match_key: name - prefix: l3_interfaces + name: l3_interfaces - name: Combine all the facts based on match_keys set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_key=True) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}" ##Output # ok: [localhost] => { # "ansible_facts": { - # "data_source": [ + # "data_sources": [ # { # "data": [ # { @@ -980,7 +980,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "interfaces" + # "name": "interfaces" # }, # { # "data": [ @@ -1038,7 +1038,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "l2_interfaces" + # "name": "l2_interfaces" # }, # { # "data": [ @@ -1064,7 +1064,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "l3_interfaces" + # "name": "l3_interfaces" # } # ] # }, @@ -1173,25 +1173,25 @@ Examples tasks: - name: Build the facts collection set_fact: - data_source: + data_sources: - data: "{{ interfaces }}" match_key: name - prefix: interfaces + name: interfaces - data: "{{ l2_interfaces }}" match_key: name - prefix: l2_interfaces + name: l2_interfaces - data: "{{ l3_interfaces }}" match_key: name - prefix: l3_interfaces + name: l3_interfaces - name: Combine all the facts based on match_keys set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_duplicate=True) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}" ##Output # ok: [localhost] => { # "ansible_facts": { - # "data_source": [ + # "data_sources": [ # { # "data": [ # { @@ -1237,7 +1237,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "interfaces" + # "name": "interfaces" # }, # { # "data": [ @@ -1298,7 +1298,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "l2_interfaces" + # "name": "l2_interfaces" # }, # { # "data": [ @@ -1324,7 +1324,7 @@ Examples # } # ], # "match_key": "name", - # "prefix": "l3_interfaces" + # "name": "l3_interfaces" # } # ] # }, diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index 1219b18..cb6f8e8 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -20,9 +20,9 @@ DOCUMENTATION = """ description: - This plugin presents collective structured data including all supplied facts grouping on common attributes mentioned. - All other boolean parameter defaults to False unless parameters is explicitly mentioned. - - Using the parameters below- C(data_source|ansible.utils.consolidate(fail_missing_match_key=False))) + - Using the parameters below- C(data_sources|ansible.utils.consolidate(fail_missing_match_key=False))) options: - data_source: + data_sources: description: - This option represents a list of dictionaries to perform the operation on. - For example C(facts_source|ansible.utils.consolidate(fail_missing_match_key=False))), in this case C(facts_source) represents this option. @@ -38,19 +38,22 @@ DOCUMENTATION = """ description: Specify key to match on. type: str required: True - prefix: - description: Specify the prefix with which the result set be created. + name: + description: Specify the name with which the result set be created. type: str required: True fail_missing_match_key: description: Fail if match_key is not found in a specific data set. type: bool + default: True fail_missing_match_value: - description: Fail if a keys to match in not same accross all data sets. + description: Fail if the match key's value is not found in every data source. type: bool + default: True fail_duplicate: - description: Fail if duplicate values for any key is found. + description: Fail if the match key's value exists more than once in a given data set. type: bool + default: True """ EXAMPLES = r""" @@ -147,25 +150,25 @@ vars_files: tasks: - name: Build the facts collection set_fact: - data_source: + data_sources: - data: "{{ interfaces }}" match_key: name - prefix: interfaces + name: interfaces - data: "{{ l2_interfaces }}" match_key: name - prefix: l2_interfaces + name: l2_interfaces - data: "{{ l3_interfaces }}" match_key: name - prefix: l3_interfaces + name: l3_interfaces - name: Combine all the facts based on match_keys set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=False) }}" ##Output # ok: [localhost] => { # "ansible_facts": { -# "data_source": [ +# "data_sources": [ # { # "data": [ # { @@ -211,7 +214,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "interfaces" +# "name": "interfaces" # }, # { # "data": [ @@ -269,7 +272,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "l2_interfaces" +# "name": "l2_interfaces" # }, # { # "data": [ @@ -295,7 +298,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "l3_interfaces" +# "name": "l3_interfaces" # } # ] # }, @@ -530,25 +533,25 @@ vars_files: tasks: - name: Build the facts collection set_fact: - data_source: + data_sources: - data: "{{ interfaces }}" match_key: name - prefix: interfaces + name: interfaces - data: "{{ l2_interfaces }}" match_key: name - prefix: l2_interfaces + name: l2_interfaces - data: "{{ l3_interfaces }}" match_key: name - prefix: l3_interfaces + name: l3_interfaces - name: Combine all the facts based on match_keys set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=True) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}" ##Output # ok: [localhost] => { # "ansible_facts": { -# "data_source": [ +# "data_sources": [ # { # "data": [ # { @@ -594,7 +597,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "interfaces" +# "name": "interfaces" # }, # { # "data": [ @@ -652,7 +655,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "l2_interfaces" +# "name": "l2_interfaces" # }, # { # "data": [ @@ -678,7 +681,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "l3_interfaces" +# "name": "l3_interfaces" # } # ] # }, @@ -783,25 +786,25 @@ vars_files: tasks: - name: Build the facts collection set_fact: - data_source: + data_sources: - data: "{{ interfaces }}" match_key: name - prefix: interfaces + name: interfaces - data: "{{ l2_interfaces }}" match_key: name - prefix: l2_interfaces + name: l2_interfaces - data: "{{ l3_interfaces }}" match_key: name - prefix: l3_interfaces + name: l3_interfaces - name: Combine all the facts based on match_keys set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_key=True) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}" ##Output # ok: [localhost] => { # "ansible_facts": { -# "data_source": [ +# "data_sources": [ # { # "data": [ # { @@ -847,7 +850,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "interfaces" +# "name": "interfaces" # }, # { # "data": [ @@ -905,7 +908,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "l2_interfaces" +# "name": "l2_interfaces" # }, # { # "data": [ @@ -931,7 +934,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "l3_interfaces" +# "name": "l3_interfaces" # } # ] # }, @@ -1040,25 +1043,25 @@ vars_files: tasks: - name: Build the facts collection set_fact: - data_source: + data_sources: - data: "{{ interfaces }}" match_key: name - prefix: interfaces + name: interfaces - data: "{{ l2_interfaces }}" match_key: name - prefix: l2_interfaces + name: l2_interfaces - data: "{{ l3_interfaces }}" match_key: name - prefix: l3_interfaces + name: l3_interfaces - name: Combine all the facts based on match_keys set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_duplicate=True) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}" ##Output # ok: [localhost] => { # "ansible_facts": { -# "data_source": [ +# "data_sources": [ # { # "data": [ # { @@ -1104,7 +1107,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "interfaces" +# "name": "interfaces" # }, # { # "data": [ @@ -1165,7 +1168,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "l2_interfaces" +# "name": "l2_interfaces" # }, # { # "data": [ @@ -1191,7 +1194,7 @@ tasks: # } # ], # "match_key": "name", -# "prefix": "l3_interfaces" +# "name": "l3_interfaces" # } # ] # }, @@ -1224,7 +1227,7 @@ def _consolidate(*args, **kwargs): """Consolidate facts together on common attributes""" keys = [ - "data_source", + "data_sources", "fail_missing_match_key", "fail_missing_match_value", "fail_duplicate", diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py index 30ea170..e77de83 100644 --- a/plugins/plugin_utils/consolidate.py +++ b/plugins/plugin_utils/consolidate.py @@ -33,7 +33,7 @@ def _raise_error(filter, msg): def fail_on_filter(validator_func): - """decorator to fail on supplied filters + """Decorator to fail on supplied filters Args: validator_func (func): Function that generates failure messages @@ -43,7 +43,11 @@ def fail_on_filter(validator_func): """ def update_err(*args, **kwargs): + """Filters return value or raises error as per supplied parameters + Returns: + any: Return value to the function call + """ res, err = validator_func(*args, **kwargs) if err.get("match_key_err"): _raise_error( @@ -64,8 +68,8 @@ def fail_on_filter(validator_func): def check_missing_match_key_duplicate( data_sources, fail_missing_match_key, fail_duplicate ): - """Checks if the match_key specified is present in all the supplied data, - also checks for duplicate data accross all the data sources + """Check if the match_key specified is present in all the supplied data, + also check for duplicate data accross all the data sources Args: data_sources (list): list of dicts as data sources @@ -75,11 +79,11 @@ def check_missing_match_key_duplicate( list: list of unique keys based on specified match_keys """ results, errors_match_key, errors_duplicate = [], [], [] - for ds_idx, data_source in enumerate(data_sources): + for ds_idx, data_source in enumerate(data_sources, start=1): match_key = data_source["match_key"] ds_values = [] - for dd_idx, data_dict in enumerate(data_source["data"]): + for dd_idx, data_dict in enumerate(data_source["data"], start=1): try: ds_values.append(data_dict[match_key]) except KeyError: @@ -117,7 +121,7 @@ def check_missing_match_values(matched_keys, fail_missing_match_value): errors_match_values = [] all_values = set(itertools.chain.from_iterable(matched_keys)) if fail_missing_match_value: - for ds_idx, ds_values in enumerate(matched_keys): + for ds_idx, ds_values in enumerate(matched_keys, start=1): missing_match = all_values - ds_values if missing_match: m_matches = ", ".join(missing_match) @@ -143,7 +147,7 @@ def consolidate_facts(data_sources, all_values): consolidated_facts = {} for data_source in data_sources: match_key = data_source["match_key"] - source = data_source["prefix"] + source = data_source["name"] data_dict = { d[match_key]: d for d in data_source["data"] if match_key in d } @@ -155,7 +159,7 @@ def consolidate_facts(data_sources, all_values): def consolidate( - data_source, + data_sources, fail_missing_match_key=False, fail_missing_match_value=False, fail_duplicate=False, @@ -173,8 +177,8 @@ def consolidate( """ key_sets = check_missing_match_key_duplicate( - data_source, fail_missing_match_key, fail_duplicate + data_sources, fail_missing_match_key, fail_duplicate ) key_vals = check_missing_match_values(key_sets, fail_missing_match_value) - consolidated_facts = consolidate_facts(data_source, key_vals) + consolidated_facts = consolidate_facts(data_sources, key_vals) return consolidated_facts diff --git a/tests/integration/targets/utils_consolidate/tasks/simple.yaml b/tests/integration/targets/utils_consolidate/tasks/simple.yaml index 1bb0b2f..19081e7 100644 --- a/tests/integration/targets/utils_consolidate/tasks/simple.yaml +++ b/tests/integration/targets/utils_consolidate/tasks/simple.yaml @@ -1,14 +1,14 @@ --- - name: Build the data structure ansible.builtin.set_fact: - data_source: + data_sources: - data: [ { "name": "GigabitEthernet0/1" }, { "name": "GigabitEthernet0/2" }, ] match_key: name - prefix: acl_interfaces + name: acl_interfaces - data: [ { @@ -23,11 +23,11 @@ }, ] match_key: name - prefix: interfaces + name: interfaces - name: Combine all the dictionaries based on match_keys ansible.builtin.set_fact: - combined: "{{ data_source|ansible.utils.consolidate(fail_missing_match_value=False) }}" + combined: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=False) }}" - name: Assert result dicts assert: diff --git a/tests/unit/plugins/filter/test_consolidate.py b/tests/unit/plugins/filter/test_consolidate.py index b3213af..876d0ea 100644 --- a/tests/unit/plugins/filter/test_consolidate.py +++ b/tests/unit/plugins/filter/test_consolidate.py @@ -19,7 +19,7 @@ class TestConsolidate(unittest.TestCase): pass def test_consolidate_plugin(self): - data_source = [ + data_sources = [ { "data": [ { @@ -60,7 +60,7 @@ class TestConsolidate(unittest.TestCase): }, ], "match_key": "name", - "prefix": "interfaces", + "name": "interfaces", }, { "data": [ @@ -116,7 +116,7 @@ class TestConsolidate(unittest.TestCase): }, ], "match_key": "name", - "prefix": "l2_interfaces", + "name": "l2_interfaces", }, { "data": [ @@ -130,7 +130,7 @@ class TestConsolidate(unittest.TestCase): {"name": "Loopback999"}, ], "match_key": "name", - "prefix": "l3_interfaces", + "name": "l3_interfaces", }, ] @@ -244,13 +244,13 @@ class TestConsolidate(unittest.TestCase): "l3_interfaces": {"name": "Loopback999"}, }, } - args = ["", data_source] + args = ["", data_sources] result = _consolidate(*args) self.assertEqual(result, output) def test_fail_missing_match_key(self): - data_source = [ + data_sources = [ { "data": [ { @@ -291,7 +291,7 @@ class TestConsolidate(unittest.TestCase): }, ], "match_key": "name", - "prefix": "interfaces", + "name": "interfaces", }, { "data": [ @@ -347,7 +347,7 @@ class TestConsolidate(unittest.TestCase): }, ], "match_key": "name", - "prefix": "l2_interfaces", + "name": "l2_interfaces", }, { "data": [ @@ -361,21 +361,21 @@ class TestConsolidate(unittest.TestCase): {"name": "Loopback999"}, ], "match_key": "name", - "prefix": "l3_interfaces", + "name": "l3_interfaces", }, ] fail_missing_match_key = True - args = ["", data_source, fail_missing_match_key] + args = ["", data_sources, fail_missing_match_key] with self.assertRaises(AnsibleFilterError) as error: _consolidate(*args) self.assertIn( - "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match key 'name' in data source 2 in list entry 0", + "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match key 'name' in data source 3 in list entry 1", str(error.exception), ) def test_fail_missing_match_value(self): - data_source = [ + data_sources = [ { "data": [ { @@ -416,7 +416,7 @@ class TestConsolidate(unittest.TestCase): }, ], "match_key": "name", - "prefix": "interfaces", + "name": "interfaces", }, { "data": [ @@ -472,7 +472,7 @@ class TestConsolidate(unittest.TestCase): }, ], "match_key": "name", - "prefix": "l2_interfaces", + "name": "l2_interfaces", }, { "data": [ @@ -490,7 +490,7 @@ class TestConsolidate(unittest.TestCase): {"name": "Loopback999"}, ], "match_key": "name", - "prefix": "l3_interfaces", + "name": "l3_interfaces", }, ] @@ -499,7 +499,7 @@ class TestConsolidate(unittest.TestCase): fail_duplicate = True args = [ "", - data_source, + data_sources, fail_missing_match_key, fail_missing_match_value, fail_duplicate, @@ -507,6 +507,6 @@ class TestConsolidate(unittest.TestCase): with self.assertRaises(AnsibleFilterError) as error: _consolidate(*args) self.assertIn( - "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 2", + "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 3", str(error.exception), ) From 4f4db18d41c22bac42b38c68d125c6bd72590096 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 8 Apr 2022 18:02:04 +0000 Subject: [PATCH 17/23] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a4b529c..aa4e7d1 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ The Ansible ``ansible.utils`` collection includes a variety of plugins that aid This collection has been tested against following Ansible versions: **>=2.9.10**. -For collections that support Ansible 2.9, please ensure you update your `network_os` to use the -fully qualified collection name (for example, `cisco.ios.ios`). +For collections that support Ansible 2.9, please ensure you update your `network_os` to use the +fully qualified collection name (for example, `cisco.ios.ios`). Plugins and modules within a collection may be tested with only specific Ansible versions. A collection may contain metadata that identifies these versions. PEP440 is the schema used to describe the versions of Ansible. From 1ce17fab54ce904f88d8fde1faaba14352688ae0 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Sat, 9 Apr 2022 11:28:35 +0530 Subject: [PATCH 18/23] update tests --- plugins/filter/consolidate.py | 4 ++++ plugins/plugin_utils/consolidate.py | 6 +++--- tests/unit/plugins/filter/test_consolidate.py | 15 ++++++++++++--- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index cb6f8e8..bd1ee83 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -1216,6 +1216,10 @@ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_valid AnsibleArgSpecValidator, ) +# import debugpy + +# debugpy.listen(3000) +# debugpy.wait_for_client() try: from jinja2.filters import pass_environment except ImportError: diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py index e77de83..ceacca1 100644 --- a/plugins/plugin_utils/consolidate.py +++ b/plugins/plugin_utils/consolidate.py @@ -160,9 +160,9 @@ def consolidate_facts(data_sources, all_values): def consolidate( data_sources, - fail_missing_match_key=False, - fail_missing_match_value=False, - fail_duplicate=False, + fail_missing_match_key, + fail_missing_match_value, + fail_duplicate, ): """Calls data validation and consolidation functions diff --git a/tests/unit/plugins/filter/test_consolidate.py b/tests/unit/plugins/filter/test_consolidate.py index 876d0ea..222e729 100644 --- a/tests/unit/plugins/filter/test_consolidate.py +++ b/tests/unit/plugins/filter/test_consolidate.py @@ -244,7 +244,16 @@ class TestConsolidate(unittest.TestCase): "l3_interfaces": {"name": "Loopback999"}, }, } - args = ["", data_sources] + fail_missing_match_value = False + fail_missing_match_key = False + fail_duplicate = False + args = [ + "", + data_sources, + fail_missing_match_key, + fail_missing_match_value, + fail_duplicate, + ] result = _consolidate(*args) self.assertEqual(result, output) @@ -366,7 +375,7 @@ class TestConsolidate(unittest.TestCase): ] fail_missing_match_key = True - args = ["", data_sources, fail_missing_match_key] + args = ["", data_sources, fail_missing_match_key, False, False] with self.assertRaises(AnsibleFilterError) as error: _consolidate(*args) self.assertIn( @@ -374,7 +383,7 @@ class TestConsolidate(unittest.TestCase): str(error.exception), ) - def test_fail_missing_match_value(self): + def test_fail_duplicate(self): data_sources = [ { "data": [ From 7801c67a5c599c75f6198aa65984423777888916 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 11 Apr 2022 14:57:00 +0530 Subject: [PATCH 19/23] update err code --- plugins/plugin_utils/consolidate.py | 55 ++++++++++++++--------------- 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py index ceacca1..2303c5a 100644 --- a/plugins/plugin_utils/consolidate.py +++ b/plugins/plugin_utils/consolidate.py @@ -16,7 +16,7 @@ from ansible.errors import AnsibleFilterError import itertools -def _raise_error(filter, msg): +def _raise_error(err): """Raise an error message, prepend with filter name Args: @@ -26,9 +26,15 @@ def _raise_error(filter, msg): Raises: AnsibleFilterError: AnsibleError with filter name and message """ - error = "Error when using plugin 'consolidate': '{filter}' reported {msg}".format( - filter=filter, msg=msg + tmp_err = [] + tmplt_err = ( + "Error when using plugin 'consolidate': '{filter}' reported {msg}" ) + for filter in list(err.keys()): + if err.get(filter): + msg = ", ".join(err.get(filter)) + tmp_err.append(tmplt_err.format(filter=filter, msg=msg)) + error = "; ".join(tmp_err) raise AnsibleFilterError(error) @@ -49,16 +55,8 @@ def fail_on_filter(validator_func): any: Return value to the function call """ res, err = validator_func(*args, **kwargs) - if err.get("match_key_err"): - _raise_error( - "fail_missing_match_key", ", ".join(err["match_key_err"]) - ) - if err.get("match_val_err"): - _raise_error( - "fail_missing_match_value", ", ".join(err["match_val_err"]) - ) - if err.get("duplicate_err"): - _raise_error("fail_duplicate", ", ".join(err["duplicate_err"])) + if err: + _raise_error(err) return res return update_err @@ -89,7 +87,7 @@ def check_missing_match_key_duplicate( except KeyError: if fail_missing_match_key: errors_match_key.append( - "Missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}".format( + "missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}".format( match_key=match_key, ds_idx=ds_idx, dd_idx=dd_idx ) ) @@ -97,14 +95,14 @@ def check_missing_match_key_duplicate( if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate: errors_duplicate.append( - "Duplicate values in data source {ds_idx}".format( + "duplicate values in data source {ds_idx}".format( ds_idx=ds_idx ) ) results.append(set(ds_values)) return results, { - "match_key_err": errors_match_key, - "duplicate_err": errors_duplicate, + "fail_missing_match_key": errors_match_key, + "fail_duplicate": errors_duplicate, } @@ -118,19 +116,20 @@ def check_missing_match_values(matched_keys, fail_missing_match_value): Returns: set: set of unique values """ - errors_match_values = [] all_values = set(itertools.chain.from_iterable(matched_keys)) - if fail_missing_match_value: - for ds_idx, ds_values in enumerate(matched_keys, start=1): - missing_match = all_values - ds_values - if missing_match: - m_matches = ", ".join(missing_match) - errors_match_values.append( - "Missing match value {m_matches} in data source {ds_idx}".format( - ds_idx=ds_idx, m_matches=m_matches - ) + if not fail_missing_match_value: + return all_values, {} + errors_match_values = [] + for ds_idx, ds_values in enumerate(matched_keys, start=1): + missing_match = all_values - ds_values + if missing_match: + m_matches = ", ".join(missing_match) + errors_match_values.append( + "missing match value {m_matches} in data source {ds_idx}".format( + ds_idx=ds_idx, m_matches=m_matches ) - return all_values, {"match_val_err": errors_match_values} + ) + return all_values, {"fail_missing_match_value": errors_match_values} def consolidate_facts(data_sources, all_values): From ca31a4e474d68df2e675b9e23489d616e2074973 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 11 Apr 2022 16:13:46 +0530 Subject: [PATCH 20/23] update docs --- README.md | 4 +- docs/ansible.utils.consolidate_filter.rst | 238 ++++++++++++++++- plugins/filter/consolidate.py | 242 +++++++++++++++++- plugins/plugin_utils/consolidate.py | 8 +- tests/integration/inventory | 2 + tests/unit/plugins/filter/test_consolidate.py | 4 +- 6 files changed, 485 insertions(+), 13 deletions(-) create mode 100644 tests/integration/inventory diff --git a/README.md b/README.md index aa4e7d1..a4b529c 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ The Ansible ``ansible.utils`` collection includes a variety of plugins that aid This collection has been tested against following Ansible versions: **>=2.9.10**. -For collections that support Ansible 2.9, please ensure you update your `network_os` to use the -fully qualified collection name (for example, `cisco.ios.ios`). +For collections that support Ansible 2.9, please ensure you update your `network_os` to use the +fully qualified collection name (for example, `cisco.ios.ios`). Plugins and modules within a collection may be tested with only specific Ansible versions. A collection may contain metadata that identifies these versions. PEP440 is the schema used to describe the versions of Ansible. diff --git a/docs/ansible.utils.consolidate_filter.rst b/docs/ansible.utils.consolidate_filter.rst index 1b4eb22..0c75662 100644 --- a/docs/ansible.utils.consolidate_filter.rst +++ b/docs/ansible.utils.consolidate_filter.rst @@ -188,8 +188,242 @@ Examples .. code-block:: yaml - # Consolidated facts example - # -------------------------- + # Consolidated filter plugin example + # ---------------------------------- + + ##play.yml + tasks: + - name: Define some test data + ansible.builtin.set_fact: + values: + - name: a + value: 1 + - name: b + value: 2 + - name: c + value: 3 + colors: + - name: a + color: red + - name: b + color: green + - name: c + color: blue + + - name: Define some test data + ansible.builtin.set_fact: + base_data: + - data: "{{ values }}" + match_key: name + name: values + - data: "{{ colors }}" + match_key: name + name: colors + + - name: Consolidate the data source using the name key + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate }}" + vars: + sizes: + - name: a + size: small + - name: b + size: medium + - name: c + size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + + ##Output + + # ok: [localhost] => { + # "ansible_facts": { + # "consolidated": { + # "a": { + # "colors": { + # "color": "red", + # "name": "a" + # }, + # "sizes": { + # "name": "a", + # "size": "small" + # }, + # "values": { + # "name": "a", + # "value": 1 + # } + # }, + # "b": { + # "colors": { + # "color": "green", + # "name": "b" + # }, + # "sizes": { + # "name": "b", + # "size": "medium" + # }, + # "values": { + # "name": "b", + # "value": 2 + # } + # }, + # "c": { + # "colors": { + # "color": "blue", + # "name": "c" + # }, + # "sizes": { + # "name": "c", + # "size": "large" + # }, + # "values": { + # "name": "c", + # "value": 3 + # } + # } + # } + # }, + # "changed": false + # } + + - name: Consolidate the data source using different keys + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate }}" + vars: + sizes: + - title: a + size: small + - title: b + size: medium + - title: c + size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: title + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + + ##Output + + # ok: [localhost] => { + # "ansible_facts": { + # "consolidated": { + # "a": { + # "colors": { + # "color": "red", + # "name": "a" + # }, + # "sizes": { + # "size": "small", + # "title": "a" + # }, + # "values": { + # "name": "a", + # "value": 1 + # } + # }, + # "b": { + # "colors": { + # "color": "green", + # "name": "b" + # }, + # "sizes": { + # "size": "medium", + # "title": "b" + # }, + # "values": { + # "name": "b", + # "value": 2 + # } + # }, + # "c": { + # "colors": { + # "color": "blue", + # "name": "c" + # }, + # "sizes": { + # "size": "large", + # "title": "c" + # }, + # "values": { + # "name": "c", + # "value": 3 + # } + # } + # } + # }, + # "changed": false + # } + + - name: Consolidate the data source using the name key (fail_missing_match_key) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}" + ignore_errors: true + vars: + vars: + sizes: + - size: small + - size: medium + - size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + + ##Output + + # fatal: [localhost]: FAILED! => { + # "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key' + # reported missing match key 'name' in data source 3 in list entry 1, + # missing match key 'name' in data source 3 in list entry 2, + # missing match key 'name' in data source 3 in list entry 3" + # } + + - name: Consolidate the data source using the name key (fail_missing_match_value) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}" + ignore_errors: true + vars: + sizes: + - name: a + size: small + - name: b + size: medium + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + + # fatal: [localhost]: FAILED! => { + # "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' + # reported missing match value c in data source 3" + # } + + - name: Consolidate the data source using the name key (fail_duplicate) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}" + ignore_errors: true + vars: + sizes: + - name: a + size: small + - name: a + size: small + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + + # fatal: [localhost]: FAILED! => { + # "msg": "Error when using plugin 'consolidate': 'fail_duplicate' + # reported duplicate values in data source 3" + # } ##facts.yml diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index bd1ee83..d6f42ec 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -58,8 +58,242 @@ DOCUMENTATION = """ EXAMPLES = r""" -# Consolidated facts example -# -------------------------- +# Consolidated filter plugin example +# ---------------------------------- + +##play.yml +tasks: + - name: Define some test data + ansible.builtin.set_fact: + values: + - name: a + value: 1 + - name: b + value: 2 + - name: c + value: 3 + colors: + - name: a + color: red + - name: b + color: green + - name: c + color: blue + + - name: Define some test data + ansible.builtin.set_fact: + base_data: + - data: "{{ values }}" + match_key: name + name: values + - data: "{{ colors }}" + match_key: name + name: colors + + - name: Consolidate the data source using the name key + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate }}" + vars: + sizes: + - name: a + size: small + - name: b + size: medium + - name: c + size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + +##Output + +# ok: [localhost] => { +# "ansible_facts": { +# "consolidated": { +# "a": { +# "colors": { +# "color": "red", +# "name": "a" +# }, +# "sizes": { +# "name": "a", +# "size": "small" +# }, +# "values": { +# "name": "a", +# "value": 1 +# } +# }, +# "b": { +# "colors": { +# "color": "green", +# "name": "b" +# }, +# "sizes": { +# "name": "b", +# "size": "medium" +# }, +# "values": { +# "name": "b", +# "value": 2 +# } +# }, +# "c": { +# "colors": { +# "color": "blue", +# "name": "c" +# }, +# "sizes": { +# "name": "c", +# "size": "large" +# }, +# "values": { +# "name": "c", +# "value": 3 +# } +# } +# } +# }, +# "changed": false +# } + +- name: Consolidate the data source using different keys + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate }}" + vars: + sizes: + - title: a + size: small + - title: b + size: medium + - title: c + size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: title + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + +##Output + +# ok: [localhost] => { +# "ansible_facts": { +# "consolidated": { +# "a": { +# "colors": { +# "color": "red", +# "name": "a" +# }, +# "sizes": { +# "size": "small", +# "title": "a" +# }, +# "values": { +# "name": "a", +# "value": 1 +# } +# }, +# "b": { +# "colors": { +# "color": "green", +# "name": "b" +# }, +# "sizes": { +# "size": "medium", +# "title": "b" +# }, +# "values": { +# "name": "b", +# "value": 2 +# } +# }, +# "c": { +# "colors": { +# "color": "blue", +# "name": "c" +# }, +# "sizes": { +# "size": "large", +# "title": "c" +# }, +# "values": { +# "name": "c", +# "value": 3 +# } +# } +# } +# }, +# "changed": false +# } + +- name: Consolidate the data source using the name key (fail_missing_match_key) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}" + ignore_errors: true + vars: + vars: + sizes: + - size: small + - size: medium + - size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + +##Output + +# fatal: [localhost]: FAILED! => { +# "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key' +# reported missing match key 'name' in data source 3 in list entry 1, +# missing match key 'name' in data source 3 in list entry 2, +# missing match key 'name' in data source 3 in list entry 3" +# } + +- name: Consolidate the data source using the name key (fail_missing_match_value) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}" + ignore_errors: true + vars: + sizes: + - name: a + size: small + - name: b + size: medium + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + +# fatal: [localhost]: FAILED! => { +# "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' +# reported missing match value c in data source 3" +# } + +- name: Consolidate the data source using the name key (fail_duplicate) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}" + ignore_errors: true + vars: + sizes: + - name: a + size: small + - name: a + size: small + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" + +# fatal: [localhost]: FAILED! => { +# "msg": "Error when using plugin 'consolidate': 'fail_duplicate' +# reported duplicate values in data source 3" +# } ##facts.yml @@ -1216,10 +1450,6 @@ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_valid AnsibleArgSpecValidator, ) -# import debugpy - -# debugpy.listen(3000) -# debugpy.wait_for_client() try: from jinja2.filters import pass_environment except ImportError: diff --git a/plugins/plugin_utils/consolidate.py b/plugins/plugin_utils/consolidate.py index 2303c5a..242b2d4 100644 --- a/plugins/plugin_utils/consolidate.py +++ b/plugins/plugin_utils/consolidate.py @@ -55,7 +55,13 @@ def fail_on_filter(validator_func): any: Return value to the function call """ res, err = validator_func(*args, **kwargs) - if err: + if any( + [ + err.get("fail_missing_match_key"), + err.get("fail_duplicate"), + err.get("fail_missing_match_value"), + ] + ): _raise_error(err) return res diff --git a/tests/integration/inventory b/tests/integration/inventory new file mode 100644 index 0000000..646a212 --- /dev/null +++ b/tests/integration/inventory @@ -0,0 +1,2 @@ +[testgroup] +testhost ansible_connection="local" ansible_pipelining="yes" ansible_python_interpreter="/home/sagpaul/MyVnvs/dev39A212/bin/python" \ No newline at end of file diff --git a/tests/unit/plugins/filter/test_consolidate.py b/tests/unit/plugins/filter/test_consolidate.py index 222e729..8b7a4c8 100644 --- a/tests/unit/plugins/filter/test_consolidate.py +++ b/tests/unit/plugins/filter/test_consolidate.py @@ -379,7 +379,7 @@ class TestConsolidate(unittest.TestCase): with self.assertRaises(AnsibleFilterError) as error: _consolidate(*args) self.assertIn( - "Error when using plugin 'consolidate': 'fail_missing_match_key' reported Missing match key 'name' in data source 3 in list entry 1", + "Error when using plugin 'consolidate': 'fail_missing_match_key' reported missing match key 'name' in data source 3 in list entry 1", str(error.exception), ) @@ -516,6 +516,6 @@ class TestConsolidate(unittest.TestCase): with self.assertRaises(AnsibleFilterError) as error: _consolidate(*args) self.assertIn( - "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 3", + "Error when using plugin 'consolidate': 'fail_duplicate' reported duplicate values in data source 3", str(error.exception), ) From ceb7db1cd3ecd44d15b198833c57e924e6c1e1e9 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 11 Apr 2022 16:15:08 +0530 Subject: [PATCH 21/23] del extra --- tests/integration/inventory | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 tests/integration/inventory diff --git a/tests/integration/inventory b/tests/integration/inventory deleted file mode 100644 index 646a212..0000000 --- a/tests/integration/inventory +++ /dev/null @@ -1,2 +0,0 @@ -[testgroup] -testhost ansible_connection="local" ansible_pipelining="yes" ansible_python_interpreter="/home/sagpaul/MyVnvs/dev39A212/bin/python" \ No newline at end of file From 23d80fd8cfecdb5cdc9ee6e776e2c6cd084e49b2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 10:45:48 +0000 Subject: [PATCH 22/23] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a4b529c..aa4e7d1 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ The Ansible ``ansible.utils`` collection includes a variety of plugins that aid This collection has been tested against following Ansible versions: **>=2.9.10**. -For collections that support Ansible 2.9, please ensure you update your `network_os` to use the -fully qualified collection name (for example, `cisco.ios.ios`). +For collections that support Ansible 2.9, please ensure you update your `network_os` to use the +fully qualified collection name (for example, `cisco.ios.ios`). Plugins and modules within a collection may be tested with only specific Ansible versions. A collection may contain metadata that identifies these versions. PEP440 is the schema used to describe the versions of Ansible. From 6110081f19ce2271880928ad9d2565d247baa029 Mon Sep 17 00:00:00 2001 From: KB-perByte Date: Mon, 11 Apr 2022 17:20:49 +0530 Subject: [PATCH 23/23] sanity fix --- docs/ansible.utils.consolidate_filter.rst | 120 +++++++++++----------- plugins/filter/consolidate.py | 120 +++++++++++----------- 2 files changed, 120 insertions(+), 120 deletions(-) diff --git a/docs/ansible.utils.consolidate_filter.rst b/docs/ansible.utils.consolidate_filter.rst index 0c75662..e200312 100644 --- a/docs/ansible.utils.consolidate_filter.rst +++ b/docs/ansible.utils.consolidate_filter.rst @@ -289,22 +289,22 @@ Examples # "changed": false # } - - name: Consolidate the data source using different keys - ansible.builtin.set_fact: - consolidated: "{{ data_sources|ansible.utils.consolidate }}" - vars: - sizes: - - title: a - size: small - - title: b - size: medium - - title: c - size: large - additional_data_source: - - data: "{{ sizes }}" - match_key: title - name: sizes - data_sources: "{{ base_data + additional_data_source }}" + name: Consolidate the data source using different keys + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate }}" + vars: + sizes: + - title: a + size: small + - title: b + size: medium + - title: c + size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: title + name: sizes + data_sources: "{{ base_data + additional_data_source }}" ##Output @@ -358,21 +358,21 @@ Examples # "changed": false # } - - name: Consolidate the data source using the name key (fail_missing_match_key) - ansible.builtin.set_fact: - consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}" - ignore_errors: true + name: Consolidate the data source using the name key (fail_missing_match_key) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}" + ignore_errors: true + vars: vars: - vars: - sizes: - - size: small - - size: medium - - size: large - additional_data_source: - - data: "{{ sizes }}" - match_key: name - name: sizes - data_sources: "{{ base_data + additional_data_source }}" + sizes: + - size: small + - size: medium + - size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" ##Output @@ -383,42 +383,42 @@ Examples # missing match key 'name' in data source 3 in list entry 3" # } - - name: Consolidate the data source using the name key (fail_missing_match_value) - ansible.builtin.set_fact: - consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}" - ignore_errors: true - vars: - sizes: - - name: a - size: small - - name: b - size: medium - additional_data_source: - - data: "{{ sizes }}" - match_key: name - name: sizes - data_sources: "{{ base_data + additional_data_source }}" + name: Consolidate the data source using the name key (fail_missing_match_value) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}" + ignore_errors: true + vars: + sizes: + - name: a + size: small + - name: b + size: medium + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" # fatal: [localhost]: FAILED! => { # "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' # reported missing match value c in data source 3" # } - - name: Consolidate the data source using the name key (fail_duplicate) - ansible.builtin.set_fact: - consolidated: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}" - ignore_errors: true - vars: - sizes: - - name: a - size: small - - name: a - size: small - additional_data_source: - - data: "{{ sizes }}" - match_key: name - name: sizes - data_sources: "{{ base_data + additional_data_source }}" + name: Consolidate the data source using the name key (fail_duplicate) + ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}" + ignore_errors: true + vars: + sizes: + - name: a + size: small + - name: a + size: small + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" # fatal: [localhost]: FAILED! => { # "msg": "Error when using plugin 'consolidate': 'fail_duplicate' diff --git a/plugins/filter/consolidate.py b/plugins/filter/consolidate.py index d6f42ec..329b887 100644 --- a/plugins/filter/consolidate.py +++ b/plugins/filter/consolidate.py @@ -159,22 +159,22 @@ tasks: # "changed": false # } -- name: Consolidate the data source using different keys - ansible.builtin.set_fact: - consolidated: "{{ data_sources|ansible.utils.consolidate }}" - vars: - sizes: - - title: a - size: small - - title: b - size: medium - - title: c - size: large - additional_data_source: - - data: "{{ sizes }}" - match_key: title - name: sizes - data_sources: "{{ base_data + additional_data_source }}" +name: Consolidate the data source using different keys +ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate }}" +vars: + sizes: + - title: a + size: small + - title: b + size: medium + - title: c + size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: title + name: sizes + data_sources: "{{ base_data + additional_data_source }}" ##Output @@ -228,21 +228,21 @@ tasks: # "changed": false # } -- name: Consolidate the data source using the name key (fail_missing_match_key) - ansible.builtin.set_fact: - consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}" - ignore_errors: true +name: Consolidate the data source using the name key (fail_missing_match_key) +ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}" +ignore_errors: true +vars: vars: - vars: - sizes: - - size: small - - size: medium - - size: large - additional_data_source: - - data: "{{ sizes }}" - match_key: name - name: sizes - data_sources: "{{ base_data + additional_data_source }}" + sizes: + - size: small + - size: medium + - size: large + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" ##Output @@ -253,42 +253,42 @@ tasks: # missing match key 'name' in data source 3 in list entry 3" # } -- name: Consolidate the data source using the name key (fail_missing_match_value) - ansible.builtin.set_fact: - consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}" - ignore_errors: true - vars: - sizes: - - name: a - size: small - - name: b - size: medium - additional_data_source: - - data: "{{ sizes }}" - match_key: name - name: sizes - data_sources: "{{ base_data + additional_data_source }}" +name: Consolidate the data source using the name key (fail_missing_match_value) +ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}" +ignore_errors: true +vars: + sizes: + - name: a + size: small + - name: b + size: medium + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" # fatal: [localhost]: FAILED! => { # "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value' # reported missing match value c in data source 3" # } -- name: Consolidate the data source using the name key (fail_duplicate) - ansible.builtin.set_fact: - consolidated: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}" - ignore_errors: true - vars: - sizes: - - name: a - size: small - - name: a - size: small - additional_data_source: - - data: "{{ sizes }}" - match_key: name - name: sizes - data_sources: "{{ base_data + additional_data_source }}" +name: Consolidate the data source using the name key (fail_duplicate) +ansible.builtin.set_fact: + consolidated: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}" +ignore_errors: true +vars: + sizes: + - name: a + size: small + - name: a + size: small + additional_data_source: + - data: "{{ sizes }}" + match_key: name + name: sizes + data_sources: "{{ base_data + additional_data_source }}" # fatal: [localhost]: FAILED! => { # "msg": "Error when using plugin 'consolidate': 'fail_duplicate'