Add trailing comma support to pre-commit (#190)
parent
2541cb3f6a
commit
a0622d06ce
|
@ -10,6 +10,11 @@ repos:
|
||||||
args: [--branch, main]
|
args: [--branch, main]
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/add-trailing-comma
|
||||||
|
rev: v2.2.3
|
||||||
|
hooks:
|
||||||
|
- id: add-trailing-comma
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: "v2.6.2"
|
rev: "v2.6.2"
|
||||||
hooks:
|
hooks:
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
---
|
||||||
|
trivial:
|
||||||
|
- Add trailing comma support to pre-commit
|
|
@ -186,8 +186,8 @@ class ActionModule(ActionBase):
|
||||||
oper_sys = self._task_vars.get(hvar, "").split(".")[-1]
|
oper_sys = self._task_vars.get(hvar, "").split(".")[-1]
|
||||||
self._debug(
|
self._debug(
|
||||||
"OS set to {os}, derived from ansible_network_os".format(
|
"OS set to {os}, derived from ansible_network_os".format(
|
||||||
os=oper_sys.lower()
|
os=oper_sys.lower(),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
oper_sys = self._task_vars.get(hvar)
|
oper_sys = self._task_vars.get(hvar)
|
||||||
|
@ -225,15 +225,17 @@ class ActionModule(ActionBase):
|
||||||
with open(template_path, "rb") as file_handler:
|
with open(template_path, "rb") as file_handler:
|
||||||
try:
|
try:
|
||||||
template_contents = to_text(
|
template_contents = to_text(
|
||||||
file_handler.read(), errors="surrogate_or_strict"
|
file_handler.read(),
|
||||||
|
errors="surrogate_or_strict",
|
||||||
)
|
)
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
raise AnsibleActionFail("Template source files must be utf-8 encoded")
|
raise AnsibleActionFail("Template source files must be utf-8 encoded")
|
||||||
except FileNotFoundError as exc:
|
except FileNotFoundError as exc:
|
||||||
raise AnsibleActionFail(
|
raise AnsibleActionFail(
|
||||||
"Failed to open template '{tpath}'. Error: {err}".format(
|
"Failed to open template '{tpath}'. Error: {err}".format(
|
||||||
tpath=template_path, err=to_native(exc)
|
tpath=template_path,
|
||||||
)
|
err=to_native(exc),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
return template_contents
|
return template_contents
|
||||||
|
|
||||||
|
@ -330,8 +332,9 @@ class ActionModule(ActionBase):
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise AnsibleActionFail(
|
raise AnsibleActionFail(
|
||||||
"Unhandled exception from parser '{parser}'. Error: {err}".format(
|
"Unhandled exception from parser '{parser}'. Error: {err}".format(
|
||||||
parser=self._parser_name, err=to_native(exc)
|
parser=self._parser_name,
|
||||||
)
|
err=to_native(exc),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
if result.get("errors"):
|
if result.get("errors"):
|
||||||
|
|
|
@ -48,7 +48,9 @@ class ActionModule(ActionBase):
|
||||||
:type msg: str
|
:type msg: str
|
||||||
"""
|
"""
|
||||||
msg = "<{phost}> [fact_diff][{plugin}] {msg}".format(
|
msg = "<{phost}> [fact_diff][{plugin}] {msg}".format(
|
||||||
phost=self._playhost, plugin=self._plugin, msg=msg
|
phost=self._playhost,
|
||||||
|
plugin=self._plugin,
|
||||||
|
msg=msg,
|
||||||
)
|
)
|
||||||
self._display.vvvv(msg)
|
self._display.vvvv(msg)
|
||||||
|
|
||||||
|
@ -87,7 +89,8 @@ class ActionModule(ActionBase):
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self._result["failed"] = True
|
self._result["failed"] = True
|
||||||
self._result["msg"] = "Error loading plugin '{plugin}': {err}".format(
|
self._result["msg"] = "Error loading plugin '{plugin}': {err}".format(
|
||||||
plugin=plugin, err=to_native(exc)
|
plugin=plugin,
|
||||||
|
err=to_native(exc),
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -101,7 +104,8 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
msg = "Unhandled exception from plugin '{plugin}'. Error: {err}".format(
|
msg = "Unhandled exception from plugin '{plugin}'. Error: {err}".format(
|
||||||
plugin=self._task.args["plugin"]["name"], err=to_native(exc)
|
plugin=self._task.args["plugin"]["name"],
|
||||||
|
err=to_native(exc),
|
||||||
)
|
)
|
||||||
self._result["failed"] = True
|
self._result["failed"] = True
|
||||||
self._result["msg"] = msg
|
self._result["msg"] = msg
|
||||||
|
@ -134,6 +138,6 @@ class ActionModule(ActionBase):
|
||||||
"changed": bool(result["diff"]),
|
"changed": bool(result["diff"]),
|
||||||
"diff_lines": diff_text.splitlines(),
|
"diff_lines": diff_text.splitlines(),
|
||||||
"diff_text": diff_text,
|
"diff_text": diff_text,
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
return self._result
|
return self._result
|
||||||
|
|
|
@ -35,7 +35,9 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
def _check_argspec(self):
|
def _check_argspec(self):
|
||||||
aav = AnsibleArgSpecValidator(
|
aav = AnsibleArgSpecValidator(
|
||||||
data=self._task.args, schema=DOCUMENTATION, name=self._task.action
|
data=self._task.args,
|
||||||
|
schema=DOCUMENTATION,
|
||||||
|
name=self._task.action,
|
||||||
)
|
)
|
||||||
valid, errors, self._task.args = aav.validate()
|
valid, errors, self._task.args = aav.validate()
|
||||||
if not valid:
|
if not valid:
|
||||||
|
@ -117,7 +119,8 @@ class ActionModule(ActionBase):
|
||||||
new_obj = obj[first]
|
new_obj = obj[first]
|
||||||
except (KeyError, TypeError):
|
except (KeyError, TypeError):
|
||||||
msg = "Error: the key '{first}' was not found " "in {obj}.".format(
|
msg = "Error: the key '{first}' was not found " "in {obj}.".format(
|
||||||
obj=obj, first=first
|
obj=obj,
|
||||||
|
first=first,
|
||||||
)
|
)
|
||||||
raise AnsibleActionFail(msg)
|
raise AnsibleActionFail(msg)
|
||||||
self.set_value(new_obj, rest, val)
|
self.set_value(new_obj, rest, val)
|
||||||
|
@ -135,7 +138,8 @@ class ActionModule(ActionBase):
|
||||||
raise AnsibleActionFail(msg)
|
raise AnsibleActionFail(msg)
|
||||||
if first > len(obj):
|
if first > len(obj):
|
||||||
msg = "Error: {obj} not long enough for item #{first} to be set.".format(
|
msg = "Error: {obj} not long enough for item #{first} to be set.".format(
|
||||||
obj=obj, first=first
|
obj=obj,
|
||||||
|
first=first,
|
||||||
)
|
)
|
||||||
raise AnsibleActionFail(msg)
|
raise AnsibleActionFail(msg)
|
||||||
if first == len(obj):
|
if first == len(obj):
|
||||||
|
|
|
@ -84,7 +84,7 @@ class ActionModule(ActionBase):
|
||||||
"Unhandled exception from validator '{validator}'. Error: {err}".format(
|
"Unhandled exception from validator '{validator}'. Error: {err}".format(
|
||||||
validator=self._validator_engine,
|
validator=self._validator_engine,
|
||||||
err=to_text(exc, errors="surrogate_then_replace"),
|
err=to_text(exc, errors="surrogate_then_replace"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self._result["msg"] = ""
|
self._result["msg"] = ""
|
||||||
|
|
|
@ -177,7 +177,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"cidr_merge": _cidr_merge
|
"cidr_merge": _cidr_merge,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -118,7 +118,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"hwaddr": _hwaddr
|
"hwaddr": _hwaddr,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -119,7 +119,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"ip4_hex": _ip4_hex
|
"ip4_hex": _ip4_hex,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -263,13 +263,14 @@ def _ipaddr(*args, **kwargs):
|
||||||
else:
|
else:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Unrecognized type <{0}> for ipaddr filter <{1}>".format(
|
"Unrecognized type <{0}> for ipaddr filter <{1}>".format(
|
||||||
type(data["value"]), "value"
|
type(data["value"]),
|
||||||
)
|
"value",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Unrecognized type <{0}> for ipaddr filter <{1}>".format(type(data["value"]), "value")
|
"Unrecognized type <{0}> for ipaddr filter <{1}>".format(type(data["value"]), "value"),
|
||||||
)
|
)
|
||||||
|
|
||||||
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipaddr")
|
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipaddr")
|
||||||
|
@ -284,7 +285,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"ipaddr": _ipaddr
|
"ipaddr": _ipaddr,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -160,7 +160,7 @@ def ipmath(value, amount):
|
||||||
|
|
||||||
if not isinstance(amount, int):
|
if not isinstance(amount, int):
|
||||||
msg = ("You must pass an integer for arithmetic; " "{0} is not a valid integer").format(
|
msg = ("You must pass an integer for arithmetic; " "{0} is not a valid integer").format(
|
||||||
amount
|
amount,
|
||||||
)
|
)
|
||||||
raise AnsibleFilterError(msg)
|
raise AnsibleFilterError(msg)
|
||||||
|
|
||||||
|
@ -172,7 +172,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# This filter is designed to do simple IP math/arithmetic
|
# This filter is designed to do simple IP math/arithmetic
|
||||||
"ipmath": _ipmath
|
"ipmath": _ipmath,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -327,7 +327,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"ipsubnet": _ipsubnet
|
"ipsubnet": _ipsubnet,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -140,12 +140,15 @@ def _ipv4(*args, **kwargs):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Unrecognized type <{0}> for ipv4 filter <{1}>".format(type(data["value"]), "value")
|
"Unrecognized type <{0}> for ipv4 filter <{1}>".format(
|
||||||
|
type(data["value"]),
|
||||||
|
"value",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Unrecognized type <{0}> for ipv4 filter <{1}>".format(type(data["value"]), "value")
|
"Unrecognized type <{0}> for ipv4 filter <{1}>".format(type(data["value"]), "value"),
|
||||||
)
|
)
|
||||||
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipv4")
|
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipv4")
|
||||||
valid, errors, updated_data = aav.validate()
|
valid, errors, updated_data = aav.validate()
|
||||||
|
@ -163,7 +166,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"ipv4": _ipv4
|
"ipv4": _ipv4,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -158,12 +158,15 @@ def _ipv6(*args, **kwargs):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Unrecognized type <{0}> for ipv6 filter <{1}>".format(type(data["value"]), "value")
|
"Unrecognized type <{0}> for ipv6 filter <{1}>".format(
|
||||||
|
type(data["value"]),
|
||||||
|
"value",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Unrecognized type <{0}> for ipv6 filter <{1}>".format(type(data["value"]), "value")
|
"Unrecognized type <{0}> for ipv6 filter <{1}>".format(type(data["value"]), "value"),
|
||||||
)
|
)
|
||||||
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipv6")
|
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipv6")
|
||||||
valid, errors, updated_data = aav.validate()
|
valid, errors, updated_data = aav.validate()
|
||||||
|
@ -181,7 +184,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"ipv6": _ipv6
|
"ipv6": _ipv6,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -162,13 +162,14 @@ def _ipwrap(*args, **kwargs):
|
||||||
else:
|
else:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Unrecognized type <{0}> for ipwrap filter <{1}>".format(
|
"Unrecognized type <{0}> for ipwrap filter <{1}>".format(
|
||||||
type(data["value"]), "value"
|
type(data["value"]),
|
||||||
)
|
"value",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Unrecognized type <{0}> for ipwrap filter <{1}>".format(type(data["value"]), "value")
|
"Unrecognized type <{0}> for ipwrap filter <{1}>".format(type(data["value"]), "value"),
|
||||||
)
|
)
|
||||||
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipwrap")
|
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipwrap")
|
||||||
valid, errors, updated_data = aav.validate()
|
valid, errors, updated_data = aav.validate()
|
||||||
|
@ -204,7 +205,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"ipwrap": _ipwrap
|
"ipwrap": _ipwrap,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -119,7 +119,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"macaddr": _macaddr
|
"macaddr": _macaddr,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -151,7 +151,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"network_in_network": _network_in_network
|
"network_in_network": _network_in_network,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -153,7 +153,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"network_in_usable": _network_in_usable
|
"network_in_usable": _network_in_usable,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -143,7 +143,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"next_nth_usable": _next_nth_usable
|
"next_nth_usable": _next_nth_usable,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -143,7 +143,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"nthhost": _nthhost
|
"nthhost": _nthhost,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -172,7 +172,7 @@ def param_list_compare(*args, **kwargs):
|
||||||
if len(data) < 2:
|
if len(data) < 2:
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
"Missing either 'base' or 'other value in filter input,"
|
"Missing either 'base' or 'other value in filter input,"
|
||||||
"refer 'ansible.utils.param_list_compare' filter plugin documentation for details"
|
"refer 'ansible.utils.param_list_compare' filter plugin documentation for details",
|
||||||
)
|
)
|
||||||
|
|
||||||
valid, argspec_result, updated_params = check_argspec(
|
valid, argspec_result, updated_params = check_argspec(
|
||||||
|
@ -183,7 +183,7 @@ def param_list_compare(*args, **kwargs):
|
||||||
"{argspec_result} with errors: {argspec_errors}".format(
|
"{argspec_result} with errors: {argspec_errors}".format(
|
||||||
argspec_result=argspec_result.get("msg"),
|
argspec_result=argspec_result.get("msg"),
|
||||||
argspec_errors=argspec_result.get("errors"),
|
argspec_errors=argspec_result.get("errors"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
base = data["base"]
|
base = data["base"]
|
||||||
other = data["target"]
|
other = data["target"]
|
||||||
|
|
|
@ -142,7 +142,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"previous_nth_usable": _previous_nth_usable
|
"previous_nth_usable": _previous_nth_usable,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -141,7 +141,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"reduce_on_network": _reduce_on_network
|
"reduce_on_network": _reduce_on_network,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -135,7 +135,7 @@ class FilterModule(object):
|
||||||
|
|
||||||
filter_map = {
|
filter_map = {
|
||||||
# IP addresses and networks
|
# IP addresses and networks
|
||||||
"slaac": _slaac
|
"slaac": _slaac,
|
||||||
}
|
}
|
||||||
|
|
||||||
def filters(self):
|
def filters(self):
|
||||||
|
|
|
@ -173,7 +173,7 @@ def _usable_range(ip):
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
"Error while using plugin 'usable_range': {msg}".format(msg=to_text(e))
|
"Error while using plugin 'usable_range': {msg}".format(msg=to_text(e)),
|
||||||
)
|
)
|
||||||
|
|
||||||
return {"usable_ips": ips, "number_of_ips": no_of_ips}
|
return {"usable_ips": ips, "number_of_ips": no_of_ips}
|
||||||
|
|
|
@ -86,7 +86,7 @@ def validate(*args, **kwargs):
|
||||||
if len(args) < 2:
|
if len(args) < 2:
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
"Missing either 'data' or 'criteria' value in filter input,"
|
"Missing either 'data' or 'criteria' value in filter input,"
|
||||||
" refer 'ansible.utils.validate' filter plugin documentation for details"
|
" refer 'ansible.utils.validate' filter plugin documentation for details",
|
||||||
)
|
)
|
||||||
|
|
||||||
params = {"data": args[0], "criteria": args[1]}
|
params = {"data": args[0], "criteria": args[1]}
|
||||||
|
@ -101,7 +101,7 @@ def validate(*args, **kwargs):
|
||||||
"{argspec_result} with errors: {argspec_errors}".format(
|
"{argspec_result} with errors: {argspec_errors}".format(
|
||||||
argspec_result=argspec_result.get("msg"),
|
argspec_result=argspec_result.get("msg"),
|
||||||
argspec_errors=argspec_result.get("errors"),
|
argspec_errors=argspec_result.get("errors"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
validator_engine, validator_result = _load_validator(
|
validator_engine, validator_result = _load_validator(
|
||||||
|
@ -113,8 +113,8 @@ def validate(*args, **kwargs):
|
||||||
if validator_result.get("failed"):
|
if validator_result.get("failed"):
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
"validate lookup plugin failed with errors: {msg}".format(
|
"validate lookup plugin failed with errors: {msg}".format(
|
||||||
msg=validator_result.get("msg")
|
msg=validator_result.get("msg"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -126,7 +126,7 @@ def validate(*args, **kwargs):
|
||||||
"Unhandled exception from validator '{validator}'. Error: {err}".format(
|
"Unhandled exception from validator '{validator}'. Error: {err}".format(
|
||||||
validator=updated_params["engine"],
|
validator=updated_params["engine"],
|
||||||
err=to_text(exc, errors="surrogate_then_replace"),
|
err=to_text(exc, errors="surrogate_then_replace"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
return to_list(result.get("errors", []))
|
return to_list(result.get("errors", []))
|
||||||
|
|
|
@ -100,7 +100,7 @@ class LookupModule(LookupBase):
|
||||||
if len(terms) < 2:
|
if len(terms) < 2:
|
||||||
raise AnsibleLookupError(
|
raise AnsibleLookupError(
|
||||||
"missing either 'data' or 'criteria' value in lookup input,"
|
"missing either 'data' or 'criteria' value in lookup input,"
|
||||||
" refer ansible.utils.validate lookup plugin documentation for details"
|
" refer ansible.utils.validate lookup plugin documentation for details",
|
||||||
)
|
)
|
||||||
|
|
||||||
params = {"data": terms[0], "criteria": terms[1]}
|
params = {"data": terms[0], "criteria": terms[1]}
|
||||||
|
@ -119,7 +119,7 @@ class LookupModule(LookupBase):
|
||||||
"{argspec_result} with errors: {argspec_errors}".format(
|
"{argspec_result} with errors: {argspec_errors}".format(
|
||||||
argspec_result=argspec_result.get("msg"),
|
argspec_result=argspec_result.get("msg"),
|
||||||
argspec_errors=argspec_result.get("errors"),
|
argspec_errors=argspec_result.get("errors"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
validator_engine, validator_result = _load_validator(
|
validator_engine, validator_result = _load_validator(
|
||||||
|
@ -132,8 +132,8 @@ class LookupModule(LookupBase):
|
||||||
if validator_result.get("failed"):
|
if validator_result.get("failed"):
|
||||||
raise AnsibleLookupError(
|
raise AnsibleLookupError(
|
||||||
"validate lookup plugin failed with errors: {validator_result}".format(
|
"validate lookup plugin failed with errors: {validator_result}".format(
|
||||||
validator_result=validator_result.get("msg")
|
validator_result=validator_result.get("msg"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -145,7 +145,7 @@ class LookupModule(LookupBase):
|
||||||
"Unhandled exception from validator '{validator}'. Error: {err}".format(
|
"Unhandled exception from validator '{validator}'. Error: {err}".format(
|
||||||
validator=updated_params["engine"],
|
validator=updated_params["engine"],
|
||||||
err=to_text(exc, errors="surrogate_then_replace"),
|
err=to_text(exc, errors="surrogate_then_replace"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
return to_list(result.get("errors", []))
|
return to_list(result.get("errors", []))
|
||||||
|
|
|
@ -216,7 +216,7 @@ class AnsibleArgSpecValidator:
|
||||||
if invalid_keys:
|
if invalid_keys:
|
||||||
valid = False
|
valid = False
|
||||||
errors = "Invalid schema. Invalid keys found: {ikeys}".format(
|
errors = "Invalid schema. Invalid keys found: {ikeys}".format(
|
||||||
ikeys=",".join(invalid_keys)
|
ikeys=",".join(invalid_keys),
|
||||||
)
|
)
|
||||||
updated_data = {}
|
updated_data = {}
|
||||||
else:
|
else:
|
||||||
|
@ -239,8 +239,8 @@ class AnsibleArgSpecValidator:
|
||||||
valid = False
|
valid = False
|
||||||
errors = [
|
errors = [
|
||||||
"Invalid schema. Invalid keys found: {ikeys}".format(
|
"Invalid schema. Invalid keys found: {ikeys}".format(
|
||||||
ikeys=",".join(invalid_keys)
|
ikeys=",".join(invalid_keys),
|
||||||
)
|
),
|
||||||
]
|
]
|
||||||
updated_data = {}
|
updated_data = {}
|
||||||
return valid, errors, updated_data
|
return valid, errors, updated_data
|
||||||
|
|
|
@ -169,18 +169,22 @@ def index_of(
|
||||||
res.append(idx)
|
res.append(idx)
|
||||||
elif fail_on_missing:
|
elif fail_on_missing:
|
||||||
msg = ("'{key}' was not found in '{dyct}' at [{index}]").format(
|
msg = ("'{key}' was not found in '{dyct}' at [{index}]").format(
|
||||||
key=key, dyct=dyct, index=idx
|
key=key,
|
||||||
|
dyct=dyct,
|
||||||
|
index=idx,
|
||||||
)
|
)
|
||||||
errors.append(msg)
|
errors.append(msg)
|
||||||
if errors:
|
if errors:
|
||||||
_raise_error(
|
_raise_error(
|
||||||
("{errors}. fail_on_missing={fom}").format(
|
("{errors}. fail_on_missing={fom}").format(
|
||||||
errors=_list_to_and_str(errors), fom=str(fail_on_missing)
|
errors=_list_to_and_str(errors),
|
||||||
)
|
fom=str(fail_on_missing),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
msg = "Unknown key type, key ({key}) was a {type}. ".format(
|
msg = "Unknown key type, key ({key}) was a {type}. ".format(
|
||||||
key=key, type=type(_to_well_known_type(key)).__name__
|
key=key,
|
||||||
|
type=type(_to_well_known_type(key)).__name__,
|
||||||
)
|
)
|
||||||
_raise_error(msg)
|
_raise_error(msg)
|
||||||
if len(res) == 1 and not wantlist:
|
if len(res) == 1 and not wantlist:
|
||||||
|
|
|
@ -301,7 +301,7 @@ def _public_query(v, value):
|
||||||
not v_ip.is_loopback(),
|
not v_ip.is_loopback(),
|
||||||
not v_ip.is_netmask(),
|
not v_ip.is_netmask(),
|
||||||
not v_ip.is_hostmask(),
|
not v_ip.is_hostmask(),
|
||||||
]
|
],
|
||||||
):
|
):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
@ -462,7 +462,7 @@ def ipaddr(value, query="", version=False, alias="ipaddr"):
|
||||||
# TODO: and raise exception commented out below
|
# TODO: and raise exception commented out below
|
||||||
display.warning(
|
display.warning(
|
||||||
"The value '%s' is not a valid IP address or network, passing this value to ipaddr filter"
|
"The value '%s' is not a valid IP address or network, passing this value to ipaddr filter"
|
||||||
" might result in breaking change in future." % value
|
" might result in breaking change in future." % value,
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -600,7 +600,8 @@ def _need_netaddr(f_name, *args, **kwargs):
|
||||||
verify python's netaddr for these filters to work
|
verify python's netaddr for these filters to work
|
||||||
"""
|
"""
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
"The %s filter requires python's netaddr be " "installed on the ansible controller" % f_name
|
"The %s filter requires python's netaddr be "
|
||||||
|
"installed on the ansible controller" % f_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -82,11 +82,12 @@ def _validate_args(plugin, doc, params):
|
||||||
"{argspec_result} with errors: {argspec_errors}".format(
|
"{argspec_result} with errors: {argspec_errors}".format(
|
||||||
argspec_result=argspec_result.get("msg"),
|
argspec_result=argspec_result.get("msg"),
|
||||||
argspec_errors=argspec_result.get("errors"),
|
argspec_errors=argspec_result.get("errors"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _need_netaddr(f_name, *args, **kwargs):
|
def _need_netaddr(f_name, *args, **kwargs):
|
||||||
raise errors.AnsibleFilterError(
|
raise errors.AnsibleFilterError(
|
||||||
"The %s filter requires python's netaddr be " "installed on the ansible controller" % f_name
|
"The %s filter requires python's netaddr be "
|
||||||
|
"installed on the ansible controller" % f_name,
|
||||||
)
|
)
|
||||||
|
|
|
@ -29,5 +29,5 @@ def _validate_args(plugin, doc, params):
|
||||||
"{argspec_result} with errors: {argspec_errors}".format(
|
"{argspec_result} with errors: {argspec_errors}".format(
|
||||||
argspec_result=argspec_result.get("msg"),
|
argspec_result=argspec_result.get("msg"),
|
||||||
argspec_errors=argspec_result.get("errors"),
|
argspec_errors=argspec_result.get("errors"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
|
@ -67,7 +67,7 @@ class ValidateBase(object):
|
||||||
err=to_text(exc, errors="surrogate_or_strict"),
|
err=to_text(exc, errors="surrogate_or_strict"),
|
||||||
engine=self._engine,
|
engine=self._engine,
|
||||||
argspec=doc,
|
argspec=doc,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
options = argspec_obj.get("options", {})
|
options = argspec_obj.get("options", {})
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ class ValidateBase(object):
|
||||||
" should to be type dict".format(
|
" should to be type dict".format(
|
||||||
var_name_type=type(var_name_entry),
|
var_name_type=type(var_name_entry),
|
||||||
var_name_entry=var_name_entry,
|
var_name_entry=var_name_entry,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
var_name = var_name_entry.get("name")
|
var_name = var_name_entry.get("name")
|
||||||
if var_name and var_name in self._plugin_vars:
|
if var_name and var_name in self._plugin_vars:
|
||||||
|
@ -116,7 +116,7 @@ class ValidateBase(object):
|
||||||
" should to be type dict".format(
|
" should to be type dict".format(
|
||||||
env_name_entry_type=type(env_name_entry),
|
env_name_entry_type=type(env_name_entry),
|
||||||
env_name_entry=env_name_entry,
|
env_name_entry=env_name_entry,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
env_name = env_name_entry.get("name")
|
env_name = env_name_entry.get("name")
|
||||||
if env_name in os.environ:
|
if env_name in os.environ:
|
||||||
|
@ -131,7 +131,7 @@ class ValidateBase(object):
|
||||||
"{argspec_result} with errors: {argspec_errors}".format(
|
"{argspec_result} with errors: {argspec_errors}".format(
|
||||||
argspec_result=argspec_result.get("msg"),
|
argspec_result=argspec_result.get("msg"),
|
||||||
argspec_errors=argspec_result.get("errors"),
|
argspec_errors=argspec_result.get("errors"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
if updated_params:
|
if updated_params:
|
||||||
|
@ -183,6 +183,7 @@ def _load_validator(engine, data, criteria, plugin_vars=None, cls_name="Validate
|
||||||
result[
|
result[
|
||||||
"msg"
|
"msg"
|
||||||
] = "For engine '{engine}' error loading the corresponding validate plugin: {err}".format(
|
] = "For engine '{engine}' error loading the corresponding validate plugin: {err}".format(
|
||||||
engine=engine, err=to_native(exc)
|
engine=engine,
|
||||||
|
err=to_native(exc),
|
||||||
)
|
)
|
||||||
return None, result
|
return None, result
|
||||||
|
|
|
@ -60,7 +60,7 @@ def fail_on_filter(validator_func):
|
||||||
err.get("fail_missing_match_key"),
|
err.get("fail_missing_match_key"),
|
||||||
err.get("fail_duplicate"),
|
err.get("fail_duplicate"),
|
||||||
err.get("fail_missing_match_value"),
|
err.get("fail_missing_match_value"),
|
||||||
]
|
],
|
||||||
):
|
):
|
||||||
_raise_error(err)
|
_raise_error(err)
|
||||||
return res
|
return res
|
||||||
|
@ -92,14 +92,16 @@ def check_missing_match_key_duplicate(data_sources, fail_missing_match_key, fail
|
||||||
if fail_missing_match_key:
|
if fail_missing_match_key:
|
||||||
errors_match_key.append(
|
errors_match_key.append(
|
||||||
"missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}".format(
|
"missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}".format(
|
||||||
match_key=match_key, ds_idx=ds_idx, dd_idx=dd_idx
|
match_key=match_key,
|
||||||
)
|
ds_idx=ds_idx,
|
||||||
|
dd_idx=dd_idx,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate:
|
if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate:
|
||||||
errors_duplicate.append(
|
errors_duplicate.append(
|
||||||
"duplicate values in data source {ds_idx}".format(ds_idx=ds_idx)
|
"duplicate values in data source {ds_idx}".format(ds_idx=ds_idx),
|
||||||
)
|
)
|
||||||
results.append(set(ds_values))
|
results.append(set(ds_values))
|
||||||
return results, {
|
return results, {
|
||||||
|
@ -128,8 +130,9 @@ def check_missing_match_values(matched_keys, fail_missing_match_value):
|
||||||
m_matches = ", ".join(missing_match)
|
m_matches = ", ".join(missing_match)
|
||||||
errors_match_values.append(
|
errors_match_values.append(
|
||||||
"missing match value {m_matches} in data source {ds_idx}".format(
|
"missing match value {m_matches} in data source {ds_idx}".format(
|
||||||
ds_idx=ds_idx, m_matches=m_matches
|
ds_idx=ds_idx,
|
||||||
)
|
m_matches=m_matches,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
return all_values, {"fail_missing_match_value": errors_match_values}
|
return all_values, {"fail_missing_match_value": errors_match_values}
|
||||||
|
|
||||||
|
@ -176,7 +179,9 @@ def consolidate(
|
||||||
"""
|
"""
|
||||||
|
|
||||||
key_sets = check_missing_match_key_duplicate(
|
key_sets = check_missing_match_key_duplicate(
|
||||||
data_sources, fail_missing_match_key, fail_duplicate
|
data_sources,
|
||||||
|
fail_missing_match_key,
|
||||||
|
fail_duplicate,
|
||||||
)
|
)
|
||||||
key_vals = check_missing_match_values(key_sets, fail_missing_match_value)
|
key_vals = check_missing_match_values(key_sets, fail_missing_match_value)
|
||||||
consolidated_facts = consolidate_facts(data_sources, key_vals)
|
consolidated_facts = consolidate_facts(data_sources, key_vals)
|
||||||
|
|
|
@ -95,7 +95,9 @@ class CliParser(CliParserBase):
|
||||||
template_path = self._task_args.get("parser").get("template_path")
|
template_path = self._task_args.get("parser").get("template_path")
|
||||||
if template_path and not os.path.isfile(template_path):
|
if template_path and not os.path.isfile(template_path):
|
||||||
return {
|
return {
|
||||||
"errors": "error while reading template_path file {file}".format(file=template_path)
|
"errors": "error while reading template_path file {file}".format(
|
||||||
|
file=template_path,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
template = open(self._task_args.get("parser").get("template_path"))
|
template = open(self._task_args.get("parser").get("template_path"))
|
||||||
|
|
|
@ -98,7 +98,9 @@ class CliParser(CliParserBase):
|
||||||
)
|
)
|
||||||
if template_path and not os.path.isfile(template_path):
|
if template_path and not os.path.isfile(template_path):
|
||||||
return {
|
return {
|
||||||
"errors": "error while reading template_path file {file}".format(file=template_path)
|
"errors": "error while reading template_path file {file}".format(
|
||||||
|
file=template_path,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -88,7 +88,7 @@ class CliParser(CliParserBase):
|
||||||
cli_output = self._task_args.get("text")
|
cli_output = self._task_args.get("text")
|
||||||
|
|
||||||
network_os = self._task_args.get("parser").get("os") or self._task_vars.get(
|
network_os = self._task_args.get("parser").get("os") or self._task_vars.get(
|
||||||
"ansible_network_os"
|
"ansible_network_os",
|
||||||
)
|
)
|
||||||
# the nxos | xml includes a odd garbage line at the end, so remove it
|
# the nxos | xml includes a odd garbage line at the end, so remove it
|
||||||
if not network_os:
|
if not network_os:
|
||||||
|
|
|
@ -44,7 +44,8 @@ class FactDiff(FactDiffBase):
|
||||||
self._skip_lines[idx] = re.compile(regex)
|
self._skip_lines[idx] = re.compile(regex)
|
||||||
except re.error as exc:
|
except re.error as exc:
|
||||||
msg = "The regex '{regex}', is not valid. The error was {err}.".format(
|
msg = "The regex '{regex}', is not valid. The error was {err}.".format(
|
||||||
regex=regex, err=str(exc)
|
regex=regex,
|
||||||
|
err=str(exc),
|
||||||
)
|
)
|
||||||
self._errors.append(msg)
|
self._errors.append(msg)
|
||||||
|
|
||||||
|
|
|
@ -92,7 +92,7 @@ class Validate(ValidateBase):
|
||||||
msg = (
|
msg = (
|
||||||
"'criteria' option value is invalid, value should be valid YAML."
|
"'criteria' option value is invalid, value should be valid YAML."
|
||||||
" Failed to read with error '{err}'".format(
|
" Failed to read with error '{err}'".format(
|
||||||
err=to_text(exc, errors="surrogate_then_replace")
|
err=to_text(exc, errors="surrogate_then_replace"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
raise AnsibleError(msg)
|
raise AnsibleError(msg)
|
||||||
|
@ -105,7 +105,7 @@ class Validate(ValidateBase):
|
||||||
issues.append('Criteria {item} missing "action" key'.format(item=item))
|
issues.append('Criteria {item} missing "action" key'.format(item=item))
|
||||||
elif item["action"] not in ("warn", "fail"):
|
elif item["action"] not in ("warn", "fail"):
|
||||||
issues.append(
|
issues.append(
|
||||||
'Action in criteria {item} is not one of "warn" or "fail"'.format(item=item)
|
'Action in criteria {item} is not one of "warn" or "fail"'.format(item=item),
|
||||||
)
|
)
|
||||||
if "rule" not in item:
|
if "rule" not in item:
|
||||||
issues.append('Criteria {item} missing "rule" key'.format(item=item))
|
issues.append('Criteria {item} missing "rule" key'.format(item=item))
|
||||||
|
@ -114,7 +114,10 @@ class Validate(ValidateBase):
|
||||||
item["rule"] = re.compile(item["rule"])
|
item["rule"] = re.compile(item["rule"])
|
||||||
except re.error as exc:
|
except re.error as exc:
|
||||||
issues.append(
|
issues.append(
|
||||||
'Failed to compile regex "{rule}": {exc}'.format(rule=item["rule"], exc=exc)
|
'Failed to compile regex "{rule}": {exc}'.format(
|
||||||
|
rule=item["rule"],
|
||||||
|
exc=exc,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
if issues:
|
if issues:
|
||||||
|
|
|
@ -103,7 +103,7 @@ class Validate(ValidateBase):
|
||||||
msg = (
|
msg = (
|
||||||
"'data' option value is invalid, value should a valid JSON."
|
"'data' option value is invalid, value should a valid JSON."
|
||||||
" Failed to read with error '{err}'".format(
|
" Failed to read with error '{err}'".format(
|
||||||
err=to_text(exe, errors="surrogate_then_replace")
|
err=to_text(exe, errors="surrogate_then_replace"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
raise AnsibleError(msg)
|
raise AnsibleError(msg)
|
||||||
|
@ -121,7 +121,7 @@ class Validate(ValidateBase):
|
||||||
msg = (
|
msg = (
|
||||||
"'criteria' option value is invalid, value should a valid JSON."
|
"'criteria' option value is invalid, value should a valid JSON."
|
||||||
" Failed to read with error '{err}'".format(
|
" Failed to read with error '{err}'".format(
|
||||||
err=to_text(exe, errors="surrogate_then_replace")
|
err=to_text(exe, errors="surrogate_then_replace"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
raise AnsibleError(msg)
|
raise AnsibleError(msg)
|
||||||
|
@ -158,19 +158,23 @@ class Validate(ValidateBase):
|
||||||
for criteria in self._criteria:
|
for criteria in self._criteria:
|
||||||
if draft == "draft3":
|
if draft == "draft3":
|
||||||
validator = jsonschema.Draft3Validator(
|
validator = jsonschema.Draft3Validator(
|
||||||
criteria, format_checker=jsonschema.draft3_format_checker
|
criteria,
|
||||||
|
format_checker=jsonschema.draft3_format_checker,
|
||||||
)
|
)
|
||||||
elif draft == "draft4":
|
elif draft == "draft4":
|
||||||
validator = jsonschema.Draft4Validator(
|
validator = jsonschema.Draft4Validator(
|
||||||
criteria, format_checker=jsonschema.draft4_format_checker
|
criteria,
|
||||||
|
format_checker=jsonschema.draft4_format_checker,
|
||||||
)
|
)
|
||||||
elif draft == "draft6":
|
elif draft == "draft6":
|
||||||
validator = jsonschema.Draft6Validator(
|
validator = jsonschema.Draft6Validator(
|
||||||
criteria, format_checker=jsonschema.draft6_format_checker
|
criteria,
|
||||||
|
format_checker=jsonschema.draft6_format_checker,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
validator = jsonschema.Draft7Validator(
|
validator = jsonschema.Draft7Validator(
|
||||||
criteria, format_checker=jsonschema.draft7_format_checker
|
criteria,
|
||||||
|
format_checker=jsonschema.draft7_format_checker,
|
||||||
)
|
)
|
||||||
|
|
||||||
validation_errors = sorted(validator.iter_errors(self._data), key=lambda e: e.path)
|
validation_errors = sorted(validator.iter_errors(self._data), key=lambda e: e.path)
|
||||||
|
|
|
@ -90,7 +90,7 @@ def validate(*args, **kwargs):
|
||||||
if not len(args):
|
if not len(args):
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"Missing either 'data' value in test plugin input,"
|
"Missing either 'data' value in test plugin input,"
|
||||||
"refer ansible.utils.validate test plugin documentation for details"
|
"refer ansible.utils.validate test plugin documentation for details",
|
||||||
)
|
)
|
||||||
|
|
||||||
params = {"data": args[0]}
|
params = {"data": args[0]}
|
||||||
|
@ -107,7 +107,7 @@ def validate(*args, **kwargs):
|
||||||
"{argspec_result} with errors: {argspec_errors}".format(
|
"{argspec_result} with errors: {argspec_errors}".format(
|
||||||
argspec_result=argspec_result.get("msg"),
|
argspec_result=argspec_result.get("msg"),
|
||||||
argspec_errors=argspec_result.get("errors"),
|
argspec_errors=argspec_result.get("errors"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
validator_engine, validator_result = _load_validator(
|
validator_engine, validator_result = _load_validator(
|
||||||
|
@ -118,7 +118,7 @@ def validate(*args, **kwargs):
|
||||||
)
|
)
|
||||||
if validator_result.get("failed"):
|
if validator_result.get("failed"):
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"validate lookup plugin failed with errors: %s" % validator_result.get("msg")
|
"validate lookup plugin failed with errors: %s" % validator_result.get("msg"),
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -130,7 +130,7 @@ def validate(*args, **kwargs):
|
||||||
"Unhandled exception from validator '{validator}'. Error: {err}".format(
|
"Unhandled exception from validator '{validator}'. Error: {err}".format(
|
||||||
validator=updated_params["engine"],
|
validator=updated_params["engine"],
|
||||||
err=to_text(exc, errors="surrogate_then_replace"),
|
err=to_text(exc, errors="surrogate_then_replace"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
errors = to_list(result.get("errors", []))
|
errors = to_list(result.get("errors", []))
|
||||||
|
|
|
@ -117,10 +117,14 @@ class YamlTestUtils(object):
|
||||||
yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper)
|
yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper)
|
||||||
else:
|
else:
|
||||||
yaml_string_obj_from_stream = yaml.dump(
|
yaml_string_obj_from_stream = yaml.dump(
|
||||||
obj_from_stream, Dumper=AnsibleDumper, encoding=None
|
obj_from_stream,
|
||||||
|
Dumper=AnsibleDumper,
|
||||||
|
encoding=None,
|
||||||
)
|
)
|
||||||
yaml_string_obj_from_string = yaml.dump(
|
yaml_string_obj_from_string = yaml.dump(
|
||||||
obj_from_string, Dumper=AnsibleDumper, encoding=None
|
obj_from_string,
|
||||||
|
Dumper=AnsibleDumper,
|
||||||
|
encoding=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert yaml_string == yaml_string_obj_from_stream
|
assert yaml_string == yaml_string_obj_from_stream
|
||||||
|
|
|
@ -91,7 +91,7 @@ class TestCli_Parse(unittest.TestCase):
|
||||||
"parser": {
|
"parser": {
|
||||||
"name": "ansible.utils.textfsm",
|
"name": "ansible.utils.textfsm",
|
||||||
"command": "show version",
|
"command": "show version",
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
valid, result, updated_params = check_argspec(
|
valid, result, updated_params = check_argspec(
|
||||||
DOCUMENTATION, "cli_parse module", schema_conditionals=ARGSPEC_CONDITIONALS, **kwargs
|
DOCUMENTATION, "cli_parse module", schema_conditionals=ARGSPEC_CONDITIONALS, **kwargs
|
||||||
|
@ -232,7 +232,7 @@ class TestCli_Parse(unittest.TestCase):
|
||||||
name based on os provided in task
|
name based on os provided in task
|
||||||
"""
|
"""
|
||||||
self._plugin._task.args = {
|
self._plugin._task.args = {
|
||||||
"parser": {"command": "a command", "name": "a.b.c", "os": "myos"}
|
"parser": {"command": "a command", "name": "a.b.c", "os": "myos"},
|
||||||
}
|
}
|
||||||
with self.assertRaises(Exception) as error:
|
with self.assertRaises(Exception) as error:
|
||||||
self._plugin._update_template_path("yaml")
|
self._plugin._update_template_path("yaml")
|
||||||
|
@ -247,7 +247,9 @@ class TestCli_Parse(unittest.TestCase):
|
||||||
need to be in the default template folder
|
need to be in the default template folder
|
||||||
"""
|
"""
|
||||||
template_path = os.path.join(
|
template_path = os.path.join(
|
||||||
os.path.dirname(__file__), "fixtures", "nxos_show_version.yaml"
|
os.path.dirname(__file__),
|
||||||
|
"fixtures",
|
||||||
|
"nxos_show_version.yaml",
|
||||||
)
|
)
|
||||||
self._plugin._find_needle = MagicMock()
|
self._plugin._find_needle = MagicMock()
|
||||||
self._plugin._find_needle.return_value = template_path
|
self._plugin._find_needle.return_value = template_path
|
||||||
|
@ -348,7 +350,9 @@ class TestCli_Parse(unittest.TestCase):
|
||||||
mock_rpc.return_value = mock_out
|
mock_rpc.return_value = mock_out
|
||||||
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
|
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
|
||||||
template_path = os.path.join(
|
template_path = os.path.join(
|
||||||
os.path.dirname(__file__), "fixtures", "nxos_show_version.textfsm"
|
os.path.dirname(__file__),
|
||||||
|
"fixtures",
|
||||||
|
"nxos_show_version.textfsm",
|
||||||
)
|
)
|
||||||
self._plugin._task.args = {
|
self._plugin._task.args = {
|
||||||
"command": "show version",
|
"command": "show version",
|
||||||
|
@ -372,7 +376,9 @@ class TestCli_Parse(unittest.TestCase):
|
||||||
mock_rpc.return_value = mock_out
|
mock_rpc.return_value = mock_out
|
||||||
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
|
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
|
||||||
template_path = os.path.join(
|
template_path = os.path.join(
|
||||||
os.path.dirname(__file__), "fixtures", "nxos_show_version.textfsm"
|
os.path.dirname(__file__),
|
||||||
|
"fixtures",
|
||||||
|
"nxos_show_version.textfsm",
|
||||||
)
|
)
|
||||||
self._plugin._task.args = {
|
self._plugin._task.args = {
|
||||||
"command": "show version",
|
"command": "show version",
|
||||||
|
@ -453,7 +459,9 @@ class TestCli_Parse(unittest.TestCase):
|
||||||
|
|
||||||
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
|
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
|
||||||
template_path = os.path.join(
|
template_path = os.path.join(
|
||||||
os.path.dirname(__file__), "fixtures", "nxos_empty_parser.textfsm"
|
os.path.dirname(__file__),
|
||||||
|
"fixtures",
|
||||||
|
"nxos_empty_parser.textfsm",
|
||||||
)
|
)
|
||||||
self._plugin._task.args = {
|
self._plugin._task.args = {
|
||||||
"command": "show version",
|
"command": "show version",
|
||||||
|
@ -487,7 +495,9 @@ class TestCli_Parse(unittest.TestCase):
|
||||||
|
|
||||||
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
|
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
|
||||||
template_path = os.path.join(
|
template_path = os.path.join(
|
||||||
os.path.dirname(__file__), "fixtures", "nxos_empty_parser.textfsm"
|
os.path.dirname(__file__),
|
||||||
|
"fixtures",
|
||||||
|
"nxos_empty_parser.textfsm",
|
||||||
)
|
)
|
||||||
self._plugin._task.args = {
|
self._plugin._task.args = {
|
||||||
"command": "show version",
|
"command": "show version",
|
||||||
|
|
|
@ -28,7 +28,7 @@ VALID_DATA = {
|
||||||
"a": {
|
"a": {
|
||||||
"b": {"4.4": [{"1": {5: {"foo": 123}}}], 5.5: "float5.5"},
|
"b": {"4.4": [{"1": {5: {"foo": 123}}}], 5.5: "float5.5"},
|
||||||
"127.0.0.1": "localhost",
|
"127.0.0.1": "localhost",
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
VALID_TESTS = [
|
VALID_TESTS = [
|
||||||
|
@ -159,7 +159,7 @@ class TestUpdate_Fact(unittest.TestCase):
|
||||||
{"path": "a.b1.3", "value": 4},
|
{"path": "a.b1.3", "value": 4},
|
||||||
{"path": "a.b2.c", "value": 456},
|
{"path": "a.b2.c", "value": 456},
|
||||||
{"path": "a.b2.d", "value": True},
|
{"path": "a.b2.d", "value": True},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
result = self._plugin.run(task_vars=task_vars)
|
result = self._plugin.run(task_vars=task_vars)
|
||||||
self.assertEqual(result, expected)
|
self.assertEqual(result, expected)
|
||||||
|
|
|
@ -62,9 +62,9 @@ CRITERIA_CRC_ERROR_CHECK = {
|
||||||
"^.*": {
|
"^.*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}}
|
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ CRITERIA_OPER_STATUS_UP_CHECK = {
|
||||||
"^.*": {
|
"^.*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
|
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,11 +91,11 @@ CRITERIA_IN_RATE_CHECK = {
|
||||||
"properties": {
|
"properties": {
|
||||||
"counters": {
|
"counters": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}}
|
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}},
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ class TestConsolidate(unittest.TestCase):
|
||||||
"986",
|
"986",
|
||||||
"988",
|
"988",
|
||||||
"993",
|
"993",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -195,7 +195,7 @@ class TestConsolidate(unittest.TestCase):
|
||||||
"986",
|
"986",
|
||||||
"988",
|
"988",
|
||||||
"993",
|
"993",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"l3_interfaces": {"name": "GigabitEthernet0/1"},
|
"l3_interfaces": {"name": "GigabitEthernet0/1"},
|
||||||
|
@ -327,7 +327,7 @@ class TestConsolidate(unittest.TestCase):
|
||||||
"986",
|
"986",
|
||||||
"988",
|
"988",
|
||||||
"993",
|
"993",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -452,7 +452,7 @@ class TestConsolidate(unittest.TestCase):
|
||||||
"986",
|
"986",
|
||||||
"988",
|
"988",
|
||||||
"993",
|
"993",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -18,7 +18,7 @@ from ansible_collections.ansible.utils.plugins.filter.to_xml import _to_xml
|
||||||
INVALID_DATA = '<netconf-state xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring">'
|
INVALID_DATA = '<netconf-state xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring">'
|
||||||
|
|
||||||
VALID_DATA = {
|
VALID_DATA = {
|
||||||
"interface-configurations": {"@xmlns": "http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg"}
|
"interface-configurations": {"@xmlns": "http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg"},
|
||||||
}
|
}
|
||||||
|
|
||||||
OUTPUT = """<?xml version="1.0" encoding="utf-8"?>
|
OUTPUT = """<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
|
|
@ -54,9 +54,9 @@ CRITERIA_CRC_ERROR_CHECK = {
|
||||||
"^.*": {
|
"^.*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}}
|
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ CRITERIA_OPER_STATUS_UP_CHECK = {
|
||||||
"^.*": {
|
"^.*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
|
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,11 +83,11 @@ CRITERIA_IN_RATE_CHECK = {
|
||||||
"properties": {
|
"properties": {
|
||||||
"counters": {
|
"counters": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}}
|
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}},
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,9 +54,9 @@ CRITERIA_CRC_ERROR_CHECK = {
|
||||||
"^.*": {
|
"^.*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}}
|
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ CRITERIA_OPER_STATUS_UP_CHECK = {
|
||||||
"^.*": {
|
"^.*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
|
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,11 +83,11 @@ CRITERIA_IN_RATE_CHECK = {
|
||||||
"properties": {
|
"properties": {
|
||||||
"counters": {
|
"counters": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}}
|
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}},
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,9 @@ class TestTextfsmParser(unittest.TestCase):
|
||||||
def test_textfsm_parser(self):
|
def test_textfsm_parser(self):
|
||||||
nxos_cfg_path = os.path.join(os.path.dirname(__file__), "fixtures", "nxos_show_version.cfg")
|
nxos_cfg_path = os.path.join(os.path.dirname(__file__), "fixtures", "nxos_show_version.cfg")
|
||||||
nxos_template_path = os.path.join(
|
nxos_template_path = os.path.join(
|
||||||
os.path.dirname(__file__), "fixtures", "nxos_show_version.textfsm"
|
os.path.dirname(__file__),
|
||||||
|
"fixtures",
|
||||||
|
"nxos_show_version.textfsm",
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(nxos_cfg_path) as fhand:
|
with open(nxos_cfg_path) as fhand:
|
||||||
|
@ -47,7 +49,7 @@ class TestTextfsmParser(unittest.TestCase):
|
||||||
"OS": "7.0(3)I7(1)",
|
"OS": "7.0(3)I7(1)",
|
||||||
"PLATFORM": "9000v",
|
"PLATFORM": "9000v",
|
||||||
"UPTIME": "12 day(s), 23 hour(s), 48 minute(s), 10 second(s)",
|
"UPTIME": "12 day(s), 23 hour(s), 48 minute(s), 10 second(s)",
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
self.assertEqual(result, {"parsed": parsed_output})
|
self.assertEqual(result, {"parsed": parsed_output})
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,9 @@ class TestTextfsmParser(unittest.TestCase):
|
||||||
def test_ttp_parser(self):
|
def test_ttp_parser(self):
|
||||||
nxos_cfg_path = os.path.join(os.path.dirname(__file__), "fixtures", "nxos_show_version.cfg")
|
nxos_cfg_path = os.path.join(os.path.dirname(__file__), "fixtures", "nxos_show_version.cfg")
|
||||||
nxos_template_path = os.path.join(
|
nxos_template_path = os.path.join(
|
||||||
os.path.dirname(__file__), "fixtures", "nxos_show_version.ttp"
|
os.path.dirname(__file__),
|
||||||
|
"fixtures",
|
||||||
|
"nxos_show_version.ttp",
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(nxos_cfg_path) as fhand:
|
with open(nxos_cfg_path) as fhand:
|
||||||
|
@ -45,7 +47,7 @@ class TestTextfsmParser(unittest.TestCase):
|
||||||
"os": "7.0(3)I7(1)",
|
"os": "7.0(3)I7(1)",
|
||||||
"platform": "9000v",
|
"platform": "9000v",
|
||||||
"uptime": "12 day(s), 23 hour(s), 48 minute(s), 10 second(s)",
|
"uptime": "12 day(s), 23 hour(s), 48 minute(s), 10 second(s)",
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
self.assertEqual(result["parsed"][0][0], parsed_output)
|
self.assertEqual(result["parsed"][0][0], parsed_output)
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ class TestXmlParser(unittest.TestCase):
|
||||||
def test_valid_xml(self):
|
def test_valid_xml(self):
|
||||||
xml = "<tag1><tag2 arg='foo'>text</tag2></tag1>"
|
xml = "<tag1><tag2 arg='foo'>text</tag2></tag1>"
|
||||||
xml_dict = OrderedDict(
|
xml_dict = OrderedDict(
|
||||||
tag1=OrderedDict(tag2=OrderedDict([("@arg", "foo"), ("#text", "text")]))
|
tag1=OrderedDict(tag2=OrderedDict([("@arg", "foo"), ("#text", "text")])),
|
||||||
)
|
)
|
||||||
task_args = {"text": xml, "parser": {"os": "none"}}
|
task_args = {"text": xml, "parser": {"os": "none"}}
|
||||||
parser = CliParser(task_args=task_args, task_vars=[], debug=False)
|
parser = CliParser(task_args=task_args, task_vars=[], debug=False)
|
||||||
|
|
|
@ -67,7 +67,7 @@ def test_invalid_action(validator, test_rule):
|
||||||
error = to_text(exc)
|
error = to_text(exc)
|
||||||
|
|
||||||
expected_error = 'Action in criteria {item} is not one of "warn" or "fail"'.format(
|
expected_error = 'Action in criteria {item} is not one of "warn" or "fail"'.format(
|
||||||
item=original
|
item=original,
|
||||||
)
|
)
|
||||||
assert error == expected_error
|
assert error == expected_error
|
||||||
|
|
||||||
|
|
|
@ -54,9 +54,9 @@ CRITERIA_CRC_ERROR_CHECK = {
|
||||||
"^.*": {
|
"^.*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}}
|
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ CRITERIA_OPER_STATUS_UP_CHECK = {
|
||||||
"^.*": {
|
"^.*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
|
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,11 +83,11 @@ CRITERIA_IN_RATE_CHECK = {
|
||||||
"properties": {
|
"properties": {
|
||||||
"counters": {
|
"counters": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}}
|
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}},
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue