Merge pull request #156 from KB-perByte/consolidate

Consolidate filter plugin

SUMMARY

Consolidate filter plugin
This plugin presents collective structured data including all supplied facts grouping on common attributes mentioned.

ISSUE TYPE


New Module Pull Request

COMPONENT NAME

ansible.utils.consolidate
ADDITIONAL INFORMATION



- hosts: localhost
  gather_facts: false
  tasks:
    - name: Define some test data
      ansible.builtin.set_fact:
        values:
          - name: a
            value: 1
          - name: b
            value: 2
          - name: c
            value: 3
        colors:
          - name: a
            color: red
          - name: b
            color: green
          - name: c
            color: blue

    - name: Define some test data
      ansible.builtin.set_fact:
        base_data:
          - data: "{{ values }}"
            match_key: name
            name: values
          - data: "{{ colors }}"
            match_key: name
            name: colors

    - name: Consolidate the data source using the name key
      ansible.builtin.set_fact:
        consolidated: "{{ data_sources|ansible.utils.consolidate }}"
      vars:
        sizes:
          - name: a
            size: small
          - name: b
            size: medium
          - name: c
            size: large
        additional_data_source:
          - data: "{{ sizes }}"
            match_key: name
            name: sizes
        data_sources: "{{ base_data + additional_data_source }}"

    # consolidated:
    #   a:
    #     colors:
    #       color: red
    #       name: a
    #     sizes:
    #       name: a
    #       size: small
    #     values:
    #       name: a
    #       value: 1
    #   b:
    #     colors:
    #       color: green
    #       name: b
    #     sizes:
    #       name: b
    #       size: medium
    #     values:
    #       name: b
    #       value: 2
    #   c:
    #     colors:
    #       color: blue
    #       name: c
    #     sizes:
    #       name: c
    #       size: large
    #     values:
    #       name: c
    #       value: 3

    - name: Consolidate the data source using different keys
      ansible.builtin.set_fact:
        consolidated: "{{ data_sources|ansible.utils.consolidate }}"
      vars:
        sizes:
          - title: a
            size: small
          - title: b
            size: medium
          - title: c
            size: large
        additional_data_source:
          - data: "{{ sizes }}"
            match_key: title
            name: sizes
        data_sources: "{{ base_data + additional_data_source }}"

    # consolidated:
    #   a:
    #     colors:
    #       color: red
    #       name: a
    #     sizes:
    #       size: small
    #       title: a
    #     values:
    #       name: a
    #       value: 1
    #   b:
    #     colors:
    #       color: green
    #       name: b
    #     sizes:
    #       size: medium
    #       title: b
    #     values:
    #       name: b
    #       value: 2
    #   c:
    #     colors:
    #       color: blue
    #       name: c
    #     sizes:
    #       size: large
    #       title: c
    #     values:
    #       name: c
    #       value: 3

    - name: Consolidate the data source using the name key (fail_missing_match_key)
      ansible.builtin.set_fact:
        consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_key=True) }}"
      ignore_errors: true
      vars:
        vars:
        sizes:
          - size: small
          - size: medium
          - size: large
        additional_data_source:
          - data: "{{ sizes }}"
            match_key: name
            name: sizes
        data_sources: "{{ base_data + additional_data_source }}"

    # fatal: [localhost]: FAILED! => {
    #     "msg": "Error when using plugin 'consolidate': 'fail_missing_match_key'
    #               reported Missing match key 'name' in data source 2 in list entry 0,
    #                        Missing match key 'name' in data source 2 in list entry 1,
    #                        Missing match key 'name' in data source 2 in list entry 2"
    # }

    - name: Consolidate the data source using the name key (fail_missing_match_value)
      ansible.builtin.set_fact:
        consolidated: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=True) }}"
      ignore_errors: true
      vars:
        sizes:
          - name: a
            size: small
          - name: b
            size: medium
        additional_data_source:
          - data: "{{ sizes }}"
            match_key: name
            name: sizes
        data_sources: "{{ base_data + additional_data_source }}"

    # fatal: [localhost]: FAILED! => {
    #     "msg": "Error when using plugin 'consolidate': 'fail_missing_match_value'
    #               reported Missing match value c in data source 2"
    # }

    - name: Consolidate the data source using the name key (fail_duplicate)
      ansible.builtin.set_fact:
        consolidated: "{{ data_sources|ansible.utils.consolidate(fail_duplicate=True) }}"
      ignore_errors: true
      vars:
        sizes:
          - name: a
            size: small
          - name: a
            size: small
        additional_data_source:
          - data: "{{ sizes }}"
            match_key: name
            name: sizes
        data_sources: "{{ base_data + additional_data_source }}"

    # fatal: [localhost]: FAILED! => {
    #     "msg": "Error when using plugin 'consolidate': 'fail_duplicate' reported Duplicate values in data source 2"
    # }

Reviewed-by: Ashwini Mhatre <mashu97@gmail.com>
Reviewed-by: Sagar Paul <sagpaul@redhat.com>
Reviewed-by: Bradley A. Thornton <bthornto@redhat.com>
pull/166/head^2
softwarefactory-project-zuul[bot] 2022-04-11 12:56:51 +00:00 committed by GitHub
commit b9efd5727d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 3853 additions and 0 deletions

View File

@ -24,6 +24,7 @@ PEP440 is the schema used to describe the versions of Ansible.
Name | Description Name | Description
--- | --- --- | ---
[ansible.utils.cidr_merge](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.cidr_merge_filter.rst)|This filter can be used to merge subnets or individual addresses. [ansible.utils.cidr_merge](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.cidr_merge_filter.rst)|This filter can be used to merge subnets or individual addresses.
[ansible.utils.consolidate](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.consolidate_filter.rst)|Consolidate facts together on common attributes.
[ansible.utils.from_xml](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.from_xml_filter.rst)|Convert given XML string to native python dictionary. [ansible.utils.from_xml](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.from_xml_filter.rst)|Convert given XML string to native python dictionary.
[ansible.utils.get_path](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.get_path_filter.rst)|Retrieve the value in a variable using a path [ansible.utils.get_path](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.get_path_filter.rst)|Retrieve the value in a variable using a path
[ansible.utils.hwaddr](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.hwaddr_filter.rst)|HWaddr / MAC address filters [ansible.utils.hwaddr](https://github.com/ansible-collections/ansible.utils/blob/main/docs/ansible.utils.hwaddr_filter.rst)|HWaddr / MAC address filters

View File

@ -0,0 +1,3 @@
---
minor_changes:
- "'consolidate' filter plugin added."

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,189 @@
#
# -*- coding: utf-8 -*-
# Copyright 2022 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
"""
The consolidate plugin code
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.errors import AnsibleFilterError
import itertools
def _raise_error(err):
"""Raise an error message, prepend with filter name
Args:
filter (str): Filter name
msg (str): Message specific to filter supplied
Raises:
AnsibleFilterError: AnsibleError with filter name and message
"""
tmp_err = []
tmplt_err = (
"Error when using plugin 'consolidate': '{filter}' reported {msg}"
)
for filter in list(err.keys()):
if err.get(filter):
msg = ", ".join(err.get(filter))
tmp_err.append(tmplt_err.format(filter=filter, msg=msg))
error = "; ".join(tmp_err)
raise AnsibleFilterError(error)
def fail_on_filter(validator_func):
"""Decorator to fail on supplied filters
Args:
validator_func (func): Function that generates failure messages
Returns:
raw: Value without errors if generated and not failed
"""
def update_err(*args, **kwargs):
"""Filters return value or raises error as per supplied parameters
Returns:
any: Return value to the function call
"""
res, err = validator_func(*args, **kwargs)
if any(
[
err.get("fail_missing_match_key"),
err.get("fail_duplicate"),
err.get("fail_missing_match_value"),
]
):
_raise_error(err)
return res
return update_err
@fail_on_filter
def check_missing_match_key_duplicate(
data_sources, fail_missing_match_key, fail_duplicate
):
"""Check if the match_key specified is present in all the supplied data,
also check for duplicate data accross all the data sources
Args:
data_sources (list): list of dicts as data sources
fail_missing_match_key (bool): Fails if match_keys not present in data set
fail_duplicate (bool): Fails if duplicate data present in a data
Returns:
list: list of unique keys based on specified match_keys
"""
results, errors_match_key, errors_duplicate = [], [], []
for ds_idx, data_source in enumerate(data_sources, start=1):
match_key = data_source["match_key"]
ds_values = []
for dd_idx, data_dict in enumerate(data_source["data"], start=1):
try:
ds_values.append(data_dict[match_key])
except KeyError:
if fail_missing_match_key:
errors_match_key.append(
"missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}".format(
match_key=match_key, ds_idx=ds_idx, dd_idx=dd_idx
)
)
continue
if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate:
errors_duplicate.append(
"duplicate values in data source {ds_idx}".format(
ds_idx=ds_idx
)
)
results.append(set(ds_values))
return results, {
"fail_missing_match_key": errors_match_key,
"fail_duplicate": errors_duplicate,
}
@fail_on_filter
def check_missing_match_values(matched_keys, fail_missing_match_value):
"""Checks values to match be consistent over all the whole data source
Args:
matched_keys (list): list of unique keys based on specified match_keys
fail_missing_match_value (bool): Fail if match_key value is missing in a data set
Returns:
set: set of unique values
"""
all_values = set(itertools.chain.from_iterable(matched_keys))
if not fail_missing_match_value:
return all_values, {}
errors_match_values = []
for ds_idx, ds_values in enumerate(matched_keys, start=1):
missing_match = all_values - ds_values
if missing_match:
m_matches = ", ".join(missing_match)
errors_match_values.append(
"missing match value {m_matches} in data source {ds_idx}".format(
ds_idx=ds_idx, m_matches=m_matches
)
)
return all_values, {"fail_missing_match_value": errors_match_values}
def consolidate_facts(data_sources, all_values):
"""Iterate over all the data sources and consolidate the data
Args:
data_sources (list): supplied data sources
all_values (set): a set of keys to iterate over
Returns:
list: list of consolidated data
"""
consolidated_facts = {}
for data_source in data_sources:
match_key = data_source["match_key"]
source = data_source["name"]
data_dict = {
d[match_key]: d for d in data_source["data"] if match_key in d
}
for value in sorted(all_values):
if value not in consolidated_facts:
consolidated_facts[value] = {}
consolidated_facts[value][source] = data_dict.get(value, {})
return consolidated_facts
def consolidate(
data_sources,
fail_missing_match_key,
fail_missing_match_value,
fail_duplicate,
):
"""Calls data validation and consolidation functions
Args:
data_source (list): list of dicts as data sources
fail_missing_match_key (bool, optional): Fails if match_keys not present in data set. Defaults to False.
fail_missing_match_value (bool, optional): Fails if matching attribute missing in a data. Defaults to False.
fail_duplicate (bool, optional): Fails if duplicate data present in a data. Defaults to False.
Returns:
list: list of dicts of validated and consolidated data
"""
key_sets = check_missing_match_key_duplicate(
data_sources, fail_missing_match_key, fail_duplicate
)
key_vals = check_missing_match_values(key_sets, fail_missing_match_value)
consolidated_facts = consolidate_facts(data_sources, key_vals)
return consolidated_facts

View File

@ -0,0 +1,14 @@
---
- name: Recursively find all test files
find:
file_type: file
paths: "{{ role_path }}/tasks"
recurse: false
use_regex: true
patterns:
- '^(?!_|main).+$'
delegate_to: localhost
register: found
- include: "{{ item.path }}"
loop: "{{ found.files }}"

View File

@ -0,0 +1,35 @@
---
- name: Build the data structure
ansible.builtin.set_fact:
data_sources:
- data:
[
{ "name": "GigabitEthernet0/1" },
{ "name": "GigabitEthernet0/2" },
]
match_key: name
name: acl_interfaces
- data:
[
{
"description": "This is a user template",
"enabled": True,
"name": "GigabitEthernet0/1",
},
{
"description": "This is a user template",
"enabled": True,
"name": "GigabitEthernet0/2",
},
]
match_key: name
name: interfaces
- name: Combine all the dictionaries based on match_keys
ansible.builtin.set_fact:
combined: "{{ data_sources|ansible.utils.consolidate(fail_missing_match_value=False) }}"
- name: Assert result dicts
assert:
that:
- combined == combined_facts

View File

@ -0,0 +1,16 @@
---
combined_facts:
GigabitEthernet0/1:
acl_interfaces:
name: GigabitEthernet0/1
interfaces:
description: This is a user template
enabled: true
name: GigabitEthernet0/1
GigabitEthernet0/2:
acl_interfaces:
name: GigabitEthernet0/2
interfaces:
description: This is a user template
enabled: true
name: GigabitEthernet0/2

View File

@ -0,0 +1,521 @@
# -*- coding: utf-8 -*-
# Copyright 2021 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import unittest
from ansible.errors import AnsibleFilterError
from ansible_collections.ansible.utils.plugins.filter.consolidate import (
_consolidate,
)
class TestConsolidate(unittest.TestCase):
def setUp(self):
pass
def test_consolidate_plugin(self):
data_sources = [
{
"data": [
{
"duplex": "auto",
"enabled": True,
"name": "GigabitEthernet0/0",
"note": ["Connected green wire"],
"speed": "auto",
},
{
"description": "Configured by Ansible - Interface 1",
"duplex": "auto",
"enabled": True,
"mtu": 1500,
"name": "GigabitEthernet0/1",
"note": ["Connected blue wire", "Configured by Paul"],
"speed": "auto",
"vifs": [
{
"comment": "Needs reconfiguration",
"description": "Eth1 - VIF 100",
"enabled": True,
"mtu": 400,
"vlan_id": 100,
},
{
"description": "Eth1 - VIF 101",
"enabled": True,
"vlan_id": 101,
},
],
},
{
"description": "Configured by Ansible - Interface 2 (ADMIN DOWN)",
"enabled": False,
"mtu": 600,
"name": "GigabitEthernet0/2",
},
],
"match_key": "name",
"name": "interfaces",
},
{
"data": [
{"name": "GigabitEthernet0/0"},
{
"mode": "access",
"name": "GigabitEthernet0/1",
"trunk": {
"allowed_vlans": [
"11",
"12",
"59",
"67",
"75",
"77",
"81",
"100",
"400-408",
"411-413",
"415",
"418",
"982",
"986",
"988",
"993",
]
},
},
{
"mode": "trunk",
"name": "GigabitEthernet0/2",
"trunk": {
"allowed_vlans": [
"11",
"12",
"59",
"67",
"75",
"77",
"81",
"100",
"400-408",
"411-413",
"415",
"418",
"982",
"986",
"988",
"993",
],
"encapsulation": "dot1q",
},
},
],
"match_key": "name",
"name": "l2_interfaces",
},
{
"data": [
{
"ipv4": [{"address": "192.168.0.2/24"}],
"name": "GigabitEthernet0/0",
},
{"name": "GigabitEthernet0/1"},
{"name": "GigabitEthernet0/2"},
{"name": "Loopback888"},
{"name": "Loopback999"},
],
"match_key": "name",
"name": "l3_interfaces",
},
]
output = {
"GigabitEthernet0/0": {
"interfaces": {
"duplex": "auto",
"enabled": True,
"name": "GigabitEthernet0/0",
"note": ["Connected green wire"],
"speed": "auto",
},
"l2_interfaces": {"name": "GigabitEthernet0/0"},
"l3_interfaces": {
"ipv4": [{"address": "192.168.0.2/24"}],
"name": "GigabitEthernet0/0",
},
},
"GigabitEthernet0/1": {
"interfaces": {
"description": "Configured by Ansible - Interface 1",
"duplex": "auto",
"enabled": True,
"mtu": 1500,
"name": "GigabitEthernet0/1",
"note": ["Connected blue wire", "Configured by Paul"],
"speed": "auto",
"vifs": [
{
"comment": "Needs reconfiguration",
"description": "Eth1 - VIF 100",
"enabled": True,
"mtu": 400,
"vlan_id": 100,
},
{
"description": "Eth1 - VIF 101",
"enabled": True,
"vlan_id": 101,
},
],
},
"l2_interfaces": {
"mode": "access",
"name": "GigabitEthernet0/1",
"trunk": {
"allowed_vlans": [
"11",
"12",
"59",
"67",
"75",
"77",
"81",
"100",
"400-408",
"411-413",
"415",
"418",
"982",
"986",
"988",
"993",
]
},
},
"l3_interfaces": {"name": "GigabitEthernet0/1"},
},
"GigabitEthernet0/2": {
"interfaces": {
"description": "Configured by Ansible - Interface 2 (ADMIN DOWN)",
"enabled": False,
"mtu": 600,
"name": "GigabitEthernet0/2",
},
"l2_interfaces": {
"mode": "trunk",
"name": "GigabitEthernet0/2",
"trunk": {
"allowed_vlans": [
"11",
"12",
"59",
"67",
"75",
"77",
"81",
"100",
"400-408",
"411-413",
"415",
"418",
"982",
"986",
"988",
"993",
],
"encapsulation": "dot1q",
},
},
"l3_interfaces": {"name": "GigabitEthernet0/2"},
},
"Loopback888": {
"interfaces": {},
"l2_interfaces": {},
"l3_interfaces": {"name": "Loopback888"},
},
"Loopback999": {
"interfaces": {},
"l2_interfaces": {},
"l3_interfaces": {"name": "Loopback999"},
},
}
fail_missing_match_value = False
fail_missing_match_key = False
fail_duplicate = False
args = [
"",
data_sources,
fail_missing_match_key,
fail_missing_match_value,
fail_duplicate,
]
result = _consolidate(*args)
self.assertEqual(result, output)
def test_fail_missing_match_key(self):
data_sources = [
{
"data": [
{
"duplex": "auto",
"enabled": True,
"name": "GigabitEthernet0/0",
"note": ["Connected green wire"],
"speed": "auto",
},
{
"description": "Configured by Ansible - Interface 1",
"duplex": "auto",
"enabled": True,
"mtu": 1500,
"name": "GigabitEthernet0/1",
"note": ["Connected blue wire", "Configured by Paul"],
"speed": "auto",
"vifs": [
{
"comment": "Needs reconfiguration",
"description": "Eth1 - VIF 100",
"enabled": True,
"mtu": 400,
"vlan_id": 100,
},
{
"description": "Eth1 - VIF 101",
"enabled": True,
"vlan_id": 101,
},
],
},
{
"description": "Configured by Ansible - Interface 2 (ADMIN DOWN)",
"enabled": False,
"mtu": 600,
"name": "GigabitEthernet0/2",
},
],
"match_key": "name",
"name": "interfaces",
},
{
"data": [
{"name": "GigabitEthernet0/0"},
{
"mode": "access",
"name": "GigabitEthernet0/1",
"trunk": {
"allowed_vlans": [
"11",
"12",
"59",
"67",
"75",
"77",
"81",
"100",
"400-408",
"411-413",
"415",
"418",
"982",
"986",
"988",
"993",
]
},
},
{
"mode": "trunk",
"name": "GigabitEthernet0/2",
"trunk": {
"allowed_vlans": [
"11",
"12",
"59",
"67",
"75",
"77",
"81",
"100",
"400-408",
"411-413",
"415",
"418",
"982",
"986",
"988",
"993",
],
"encapsulation": "dot1q",
},
},
],
"match_key": "name",
"name": "l2_interfaces",
},
{
"data": [
{
"ipv4": [{"address": "192.168.0.2/24"}],
"intf_name": "GigabitEthernet0/0",
},
{"name": "GigabitEthernet0/1"},
{"name": "GigabitEthernet0/2"},
{"name": "Loopback888"},
{"name": "Loopback999"},
],
"match_key": "name",
"name": "l3_interfaces",
},
]
fail_missing_match_key = True
args = ["", data_sources, fail_missing_match_key, False, False]
with self.assertRaises(AnsibleFilterError) as error:
_consolidate(*args)
self.assertIn(
"Error when using plugin 'consolidate': 'fail_missing_match_key' reported missing match key 'name' in data source 3 in list entry 1",
str(error.exception),
)
def test_fail_duplicate(self):
data_sources = [
{
"data": [
{
"duplex": "auto",
"enabled": True,
"name": "GigabitEthernet0/0",
"note": ["Connected green wire"],
"speed": "auto",
},
{
"description": "Configured by Ansible - Interface 1",
"duplex": "auto",
"enabled": True,
"mtu": 1500,
"name": "GigabitEthernet0/1",
"note": ["Connected blue wire", "Configured by Paul"],
"speed": "auto",
"vifs": [
{
"comment": "Needs reconfiguration",
"description": "Eth1 - VIF 100",
"enabled": True,
"mtu": 400,
"vlan_id": 100,
},
{
"description": "Eth1 - VIF 101",
"enabled": True,
"vlan_id": 101,
},
],
},
{
"description": "Configured by Ansible - Interface 2 (ADMIN DOWN)",
"enabled": False,
"mtu": 600,
"name": "GigabitEthernet0/2",
},
],
"match_key": "name",
"name": "interfaces",
},
{
"data": [
{"name": "GigabitEthernet0/0"},
{
"mode": "access",
"name": "GigabitEthernet0/1",
"trunk": {
"allowed_vlans": [
"11",
"12",
"59",
"67",
"75",
"77",
"81",
"100",
"400-408",
"411-413",
"415",
"418",
"982",
"986",
"988",
"993",
]
},
},
{
"mode": "trunk",
"name": "GigabitEthernet0/2",
"trunk": {
"allowed_vlans": [
"11",
"12",
"59",
"67",
"75",
"77",
"81",
"100",
"400-408",
"411-413",
"415",
"418",
"982",
"986",
"988",
"993",
],
"encapsulation": "dot1q",
},
},
],
"match_key": "name",
"name": "l2_interfaces",
},
{
"data": [
{
"ipv4": [{"address": "192.168.0.2/24"}],
"name": "GigabitEthernet0/0",
},
{
"ipv4": [{"address": "192.168.0.3/24"}],
"name": "GigabitEthernet0/0",
},
{"name": "GigabitEthernet0/1"},
{"name": "GigabitEthernet0/2"},
{"name": "Loopback888"},
{"name": "Loopback999"},
],
"match_key": "name",
"name": "l3_interfaces",
},
]
fail_missing_match_value = False
fail_missing_match_key = False
fail_duplicate = True
args = [
"",
data_sources,
fail_missing_match_key,
fail_missing_match_value,
fail_duplicate,
]
with self.assertRaises(AnsibleFilterError) as error:
_consolidate(*args)
self.assertIn(
"Error when using plugin 'consolidate': 'fail_duplicate' reported duplicate values in data source 3",
str(error.exception),
)