Add trailing comma support to pre-commit (#190)

pull/194/head
Bradley A. Thornton 2022-06-01 05:35:10 -07:00 committed by GitHub
parent 2541cb3f6a
commit a0622d06ce
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
54 changed files with 217 additions and 148 deletions

View File

@ -10,6 +10,11 @@ repos:
args: [--branch, main]
- id: trailing-whitespace
- repo: https://github.com/asottile/add-trailing-comma
rev: v2.2.3
hooks:
- id: add-trailing-comma
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "v2.6.2"
hooks:

View File

@ -0,0 +1,3 @@
---
trivial:
- Add trailing comma support to pre-commit

View File

@ -186,8 +186,8 @@ class ActionModule(ActionBase):
oper_sys = self._task_vars.get(hvar, "").split(".")[-1]
self._debug(
"OS set to {os}, derived from ansible_network_os".format(
os=oper_sys.lower()
)
os=oper_sys.lower(),
),
)
else:
oper_sys = self._task_vars.get(hvar)
@ -225,15 +225,17 @@ class ActionModule(ActionBase):
with open(template_path, "rb") as file_handler:
try:
template_contents = to_text(
file_handler.read(), errors="surrogate_or_strict"
file_handler.read(),
errors="surrogate_or_strict",
)
except UnicodeError:
raise AnsibleActionFail("Template source files must be utf-8 encoded")
except FileNotFoundError as exc:
raise AnsibleActionFail(
"Failed to open template '{tpath}'. Error: {err}".format(
tpath=template_path, err=to_native(exc)
)
tpath=template_path,
err=to_native(exc),
),
)
return template_contents
@ -330,8 +332,9 @@ class ActionModule(ActionBase):
except Exception as exc:
raise AnsibleActionFail(
"Unhandled exception from parser '{parser}'. Error: {err}".format(
parser=self._parser_name, err=to_native(exc)
)
parser=self._parser_name,
err=to_native(exc),
),
)
if result.get("errors"):

View File

@ -48,7 +48,9 @@ class ActionModule(ActionBase):
:type msg: str
"""
msg = "<{phost}> [fact_diff][{plugin}] {msg}".format(
phost=self._playhost, plugin=self._plugin, msg=msg
phost=self._playhost,
plugin=self._plugin,
msg=msg,
)
self._display.vvvv(msg)
@ -87,7 +89,8 @@ class ActionModule(ActionBase):
except Exception as exc:
self._result["failed"] = True
self._result["msg"] = "Error loading plugin '{plugin}': {err}".format(
plugin=plugin, err=to_native(exc)
plugin=plugin,
err=to_native(exc),
)
return None
@ -101,7 +104,8 @@ class ActionModule(ActionBase):
except Exception as exc:
msg = "Unhandled exception from plugin '{plugin}'. Error: {err}".format(
plugin=self._task.args["plugin"]["name"], err=to_native(exc)
plugin=self._task.args["plugin"]["name"],
err=to_native(exc),
)
self._result["failed"] = True
self._result["msg"] = msg
@ -134,6 +138,6 @@ class ActionModule(ActionBase):
"changed": bool(result["diff"]),
"diff_lines": diff_text.splitlines(),
"diff_text": diff_text,
}
},
)
return self._result

View File

@ -35,7 +35,9 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args, schema=DOCUMENTATION, name=self._task.action
data=self._task.args,
schema=DOCUMENTATION,
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
@ -117,7 +119,8 @@ class ActionModule(ActionBase):
new_obj = obj[first]
except (KeyError, TypeError):
msg = "Error: the key '{first}' was not found " "in {obj}.".format(
obj=obj, first=first
obj=obj,
first=first,
)
raise AnsibleActionFail(msg)
self.set_value(new_obj, rest, val)
@ -135,7 +138,8 @@ class ActionModule(ActionBase):
raise AnsibleActionFail(msg)
if first > len(obj):
msg = "Error: {obj} not long enough for item #{first} to be set.".format(
obj=obj, first=first
obj=obj,
first=first,
)
raise AnsibleActionFail(msg)
if first == len(obj):

View File

@ -84,7 +84,7 @@ class ActionModule(ActionBase):
"Unhandled exception from validator '{validator}'. Error: {err}".format(
validator=self._validator_engine,
err=to_text(exc, errors="surrogate_then_replace"),
)
),
)
self._result["msg"] = ""

View File

@ -177,7 +177,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"cidr_merge": _cidr_merge
"cidr_merge": _cidr_merge,
}
def filters(self):

View File

@ -118,7 +118,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"hwaddr": _hwaddr
"hwaddr": _hwaddr,
}
def filters(self):

View File

@ -119,7 +119,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"ip4_hex": _ip4_hex
"ip4_hex": _ip4_hex,
}
def filters(self):

View File

@ -263,13 +263,14 @@ def _ipaddr(*args, **kwargs):
else:
raise AnsibleError(
"Unrecognized type <{0}> for ipaddr filter <{1}>".format(
type(data["value"]), "value"
)
type(data["value"]),
"value",
),
)
except (TypeError, ValueError):
raise AnsibleError(
"Unrecognized type <{0}> for ipaddr filter <{1}>".format(type(data["value"]), "value")
"Unrecognized type <{0}> for ipaddr filter <{1}>".format(type(data["value"]), "value"),
)
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipaddr")
@ -284,7 +285,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"ipaddr": _ipaddr
"ipaddr": _ipaddr,
}
def filters(self):

View File

@ -160,7 +160,7 @@ def ipmath(value, amount):
if not isinstance(amount, int):
msg = ("You must pass an integer for arithmetic; " "{0} is not a valid integer").format(
amount
amount,
)
raise AnsibleFilterError(msg)
@ -172,7 +172,7 @@ class FilterModule(object):
filter_map = {
# This filter is designed to do simple IP math/arithmetic
"ipmath": _ipmath
"ipmath": _ipmath,
}
def filters(self):

View File

@ -327,7 +327,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"ipsubnet": _ipsubnet
"ipsubnet": _ipsubnet,
}
def filters(self):

View File

@ -140,12 +140,15 @@ def _ipv4(*args, **kwargs):
pass
else:
raise AnsibleError(
"Unrecognized type <{0}> for ipv4 filter <{1}>".format(type(data["value"]), "value")
"Unrecognized type <{0}> for ipv4 filter <{1}>".format(
type(data["value"]),
"value",
),
)
except (TypeError, ValueError):
raise AnsibleError(
"Unrecognized type <{0}> for ipv4 filter <{1}>".format(type(data["value"]), "value")
"Unrecognized type <{0}> for ipv4 filter <{1}>".format(type(data["value"]), "value"),
)
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipv4")
valid, errors, updated_data = aav.validate()
@ -163,7 +166,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"ipv4": _ipv4
"ipv4": _ipv4,
}
def filters(self):

View File

@ -158,12 +158,15 @@ def _ipv6(*args, **kwargs):
pass
else:
raise AnsibleError(
"Unrecognized type <{0}> for ipv6 filter <{1}>".format(type(data["value"]), "value")
"Unrecognized type <{0}> for ipv6 filter <{1}>".format(
type(data["value"]),
"value",
),
)
except (TypeError, ValueError):
raise AnsibleError(
"Unrecognized type <{0}> for ipv6 filter <{1}>".format(type(data["value"]), "value")
"Unrecognized type <{0}> for ipv6 filter <{1}>".format(type(data["value"]), "value"),
)
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipv6")
valid, errors, updated_data = aav.validate()
@ -181,7 +184,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"ipv6": _ipv6
"ipv6": _ipv6,
}
def filters(self):

View File

@ -162,13 +162,14 @@ def _ipwrap(*args, **kwargs):
else:
raise AnsibleError(
"Unrecognized type <{0}> for ipwrap filter <{1}>".format(
type(data["value"]), "value"
)
type(data["value"]),
"value",
),
)
except (TypeError, ValueError):
raise AnsibleError(
"Unrecognized type <{0}> for ipwrap filter <{1}>".format(type(data["value"]), "value")
"Unrecognized type <{0}> for ipwrap filter <{1}>".format(type(data["value"]), "value"),
)
aav = AnsibleArgSpecValidator(data=data, schema=DOCUMENTATION, name="ipwrap")
valid, errors, updated_data = aav.validate()
@ -204,7 +205,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"ipwrap": _ipwrap
"ipwrap": _ipwrap,
}
def filters(self):

View File

@ -119,7 +119,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"macaddr": _macaddr
"macaddr": _macaddr,
}
def filters(self):

View File

@ -151,7 +151,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"network_in_network": _network_in_network
"network_in_network": _network_in_network,
}
def filters(self):

View File

@ -153,7 +153,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"network_in_usable": _network_in_usable
"network_in_usable": _network_in_usable,
}
def filters(self):

View File

@ -143,7 +143,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"next_nth_usable": _next_nth_usable
"next_nth_usable": _next_nth_usable,
}
def filters(self):

View File

@ -143,7 +143,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"nthhost": _nthhost
"nthhost": _nthhost,
}
def filters(self):

View File

@ -172,7 +172,7 @@ def param_list_compare(*args, **kwargs):
if len(data) < 2:
raise AnsibleFilterError(
"Missing either 'base' or 'other value in filter input,"
"refer 'ansible.utils.param_list_compare' filter plugin documentation for details"
"refer 'ansible.utils.param_list_compare' filter plugin documentation for details",
)
valid, argspec_result, updated_params = check_argspec(
@ -183,7 +183,7 @@ def param_list_compare(*args, **kwargs):
"{argspec_result} with errors: {argspec_errors}".format(
argspec_result=argspec_result.get("msg"),
argspec_errors=argspec_result.get("errors"),
)
),
)
base = data["base"]
other = data["target"]

View File

@ -142,7 +142,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"previous_nth_usable": _previous_nth_usable
"previous_nth_usable": _previous_nth_usable,
}
def filters(self):

View File

@ -141,7 +141,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"reduce_on_network": _reduce_on_network
"reduce_on_network": _reduce_on_network,
}
def filters(self):

View File

@ -135,7 +135,7 @@ class FilterModule(object):
filter_map = {
# IP addresses and networks
"slaac": _slaac
"slaac": _slaac,
}
def filters(self):

View File

@ -173,7 +173,7 @@ def _usable_range(ip):
except Exception as e:
raise AnsibleFilterError(
"Error while using plugin 'usable_range': {msg}".format(msg=to_text(e))
"Error while using plugin 'usable_range': {msg}".format(msg=to_text(e)),
)
return {"usable_ips": ips, "number_of_ips": no_of_ips}

View File

@ -86,7 +86,7 @@ def validate(*args, **kwargs):
if len(args) < 2:
raise AnsibleFilterError(
"Missing either 'data' or 'criteria' value in filter input,"
" refer 'ansible.utils.validate' filter plugin documentation for details"
" refer 'ansible.utils.validate' filter plugin documentation for details",
)
params = {"data": args[0], "criteria": args[1]}
@ -101,7 +101,7 @@ def validate(*args, **kwargs):
"{argspec_result} with errors: {argspec_errors}".format(
argspec_result=argspec_result.get("msg"),
argspec_errors=argspec_result.get("errors"),
)
),
)
validator_engine, validator_result = _load_validator(
@ -113,8 +113,8 @@ def validate(*args, **kwargs):
if validator_result.get("failed"):
raise AnsibleFilterError(
"validate lookup plugin failed with errors: {msg}".format(
msg=validator_result.get("msg")
)
msg=validator_result.get("msg"),
),
)
try:
@ -126,7 +126,7 @@ def validate(*args, **kwargs):
"Unhandled exception from validator '{validator}'. Error: {err}".format(
validator=updated_params["engine"],
err=to_text(exc, errors="surrogate_then_replace"),
)
),
)
return to_list(result.get("errors", []))

View File

@ -100,7 +100,7 @@ class LookupModule(LookupBase):
if len(terms) < 2:
raise AnsibleLookupError(
"missing either 'data' or 'criteria' value in lookup input,"
" refer ansible.utils.validate lookup plugin documentation for details"
" refer ansible.utils.validate lookup plugin documentation for details",
)
params = {"data": terms[0], "criteria": terms[1]}
@ -119,7 +119,7 @@ class LookupModule(LookupBase):
"{argspec_result} with errors: {argspec_errors}".format(
argspec_result=argspec_result.get("msg"),
argspec_errors=argspec_result.get("errors"),
)
),
)
validator_engine, validator_result = _load_validator(
@ -132,8 +132,8 @@ class LookupModule(LookupBase):
if validator_result.get("failed"):
raise AnsibleLookupError(
"validate lookup plugin failed with errors: {validator_result}".format(
validator_result=validator_result.get("msg")
)
validator_result=validator_result.get("msg"),
),
)
try:
@ -145,7 +145,7 @@ class LookupModule(LookupBase):
"Unhandled exception from validator '{validator}'. Error: {err}".format(
validator=updated_params["engine"],
err=to_text(exc, errors="surrogate_then_replace"),
)
),
)
return to_list(result.get("errors", []))

View File

@ -216,7 +216,7 @@ class AnsibleArgSpecValidator:
if invalid_keys:
valid = False
errors = "Invalid schema. Invalid keys found: {ikeys}".format(
ikeys=",".join(invalid_keys)
ikeys=",".join(invalid_keys),
)
updated_data = {}
else:
@ -239,8 +239,8 @@ class AnsibleArgSpecValidator:
valid = False
errors = [
"Invalid schema. Invalid keys found: {ikeys}".format(
ikeys=",".join(invalid_keys)
)
ikeys=",".join(invalid_keys),
),
]
updated_data = {}
return valid, errors, updated_data

View File

@ -169,18 +169,22 @@ def index_of(
res.append(idx)
elif fail_on_missing:
msg = ("'{key}' was not found in '{dyct}' at [{index}]").format(
key=key, dyct=dyct, index=idx
key=key,
dyct=dyct,
index=idx,
)
errors.append(msg)
if errors:
_raise_error(
("{errors}. fail_on_missing={fom}").format(
errors=_list_to_and_str(errors), fom=str(fail_on_missing)
)
errors=_list_to_and_str(errors),
fom=str(fail_on_missing),
),
)
else:
msg = "Unknown key type, key ({key}) was a {type}. ".format(
key=key, type=type(_to_well_known_type(key)).__name__
key=key,
type=type(_to_well_known_type(key)).__name__,
)
_raise_error(msg)
if len(res) == 1 and not wantlist:

View File

@ -301,7 +301,7 @@ def _public_query(v, value):
not v_ip.is_loopback(),
not v_ip.is_netmask(),
not v_ip.is_hostmask(),
]
],
):
return value
@ -462,7 +462,7 @@ def ipaddr(value, query="", version=False, alias="ipaddr"):
# TODO: and raise exception commented out below
display.warning(
"The value '%s' is not a valid IP address or network, passing this value to ipaddr filter"
" might result in breaking change in future." % value
" might result in breaking change in future." % value,
)
return False
@ -600,7 +600,8 @@ def _need_netaddr(f_name, *args, **kwargs):
verify python's netaddr for these filters to work
"""
raise AnsibleFilterError(
"The %s filter requires python's netaddr be " "installed on the ansible controller" % f_name
"The %s filter requires python's netaddr be "
"installed on the ansible controller" % f_name,
)

View File

@ -82,11 +82,12 @@ def _validate_args(plugin, doc, params):
"{argspec_result} with errors: {argspec_errors}".format(
argspec_result=argspec_result.get("msg"),
argspec_errors=argspec_result.get("errors"),
)
),
)
def _need_netaddr(f_name, *args, **kwargs):
raise errors.AnsibleFilterError(
"The %s filter requires python's netaddr be " "installed on the ansible controller" % f_name
"The %s filter requires python's netaddr be "
"installed on the ansible controller" % f_name,
)

View File

@ -29,5 +29,5 @@ def _validate_args(plugin, doc, params):
"{argspec_result} with errors: {argspec_errors}".format(
argspec_result=argspec_result.get("msg"),
argspec_errors=argspec_result.get("errors"),
)
),
)

View File

@ -67,7 +67,7 @@ class ValidateBase(object):
err=to_text(exc, errors="surrogate_or_strict"),
engine=self._engine,
argspec=doc,
)
),
)
options = argspec_obj.get("options", {})
@ -98,7 +98,7 @@ class ValidateBase(object):
" should to be type dict".format(
var_name_type=type(var_name_entry),
var_name_entry=var_name_entry,
)
),
)
var_name = var_name_entry.get("name")
if var_name and var_name in self._plugin_vars:
@ -116,7 +116,7 @@ class ValidateBase(object):
" should to be type dict".format(
env_name_entry_type=type(env_name_entry),
env_name_entry=env_name_entry,
)
),
)
env_name = env_name_entry.get("name")
if env_name in os.environ:
@ -131,7 +131,7 @@ class ValidateBase(object):
"{argspec_result} with errors: {argspec_errors}".format(
argspec_result=argspec_result.get("msg"),
argspec_errors=argspec_result.get("errors"),
)
),
)
if updated_params:
@ -183,6 +183,7 @@ def _load_validator(engine, data, criteria, plugin_vars=None, cls_name="Validate
result[
"msg"
] = "For engine '{engine}' error loading the corresponding validate plugin: {err}".format(
engine=engine, err=to_native(exc)
engine=engine,
err=to_native(exc),
)
return None, result

View File

@ -60,7 +60,7 @@ def fail_on_filter(validator_func):
err.get("fail_missing_match_key"),
err.get("fail_duplicate"),
err.get("fail_missing_match_value"),
]
],
):
_raise_error(err)
return res
@ -92,14 +92,16 @@ def check_missing_match_key_duplicate(data_sources, fail_missing_match_key, fail
if fail_missing_match_key:
errors_match_key.append(
"missing match key '{match_key}' in data source {ds_idx} in list entry {dd_idx}".format(
match_key=match_key, ds_idx=ds_idx, dd_idx=dd_idx
)
match_key=match_key,
ds_idx=ds_idx,
dd_idx=dd_idx,
),
)
continue
if sorted(set(ds_values)) != sorted(ds_values) and fail_duplicate:
errors_duplicate.append(
"duplicate values in data source {ds_idx}".format(ds_idx=ds_idx)
"duplicate values in data source {ds_idx}".format(ds_idx=ds_idx),
)
results.append(set(ds_values))
return results, {
@ -128,8 +130,9 @@ def check_missing_match_values(matched_keys, fail_missing_match_value):
m_matches = ", ".join(missing_match)
errors_match_values.append(
"missing match value {m_matches} in data source {ds_idx}".format(
ds_idx=ds_idx, m_matches=m_matches
)
ds_idx=ds_idx,
m_matches=m_matches,
),
)
return all_values, {"fail_missing_match_value": errors_match_values}
@ -176,7 +179,9 @@ def consolidate(
"""
key_sets = check_missing_match_key_duplicate(
data_sources, fail_missing_match_key, fail_duplicate
data_sources,
fail_missing_match_key,
fail_duplicate,
)
key_vals = check_missing_match_values(key_sets, fail_missing_match_value)
consolidated_facts = consolidate_facts(data_sources, key_vals)

View File

@ -95,7 +95,9 @@ class CliParser(CliParserBase):
template_path = self._task_args.get("parser").get("template_path")
if template_path and not os.path.isfile(template_path):
return {
"errors": "error while reading template_path file {file}".format(file=template_path)
"errors": "error while reading template_path file {file}".format(
file=template_path,
),
}
try:
template = open(self._task_args.get("parser").get("template_path"))

View File

@ -98,7 +98,9 @@ class CliParser(CliParserBase):
)
if template_path and not os.path.isfile(template_path):
return {
"errors": "error while reading template_path file {file}".format(file=template_path)
"errors": "error while reading template_path file {file}".format(
file=template_path,
),
}
try:

View File

@ -88,7 +88,7 @@ class CliParser(CliParserBase):
cli_output = self._task_args.get("text")
network_os = self._task_args.get("parser").get("os") or self._task_vars.get(
"ansible_network_os"
"ansible_network_os",
)
# the nxos | xml includes a odd garbage line at the end, so remove it
if not network_os:

View File

@ -44,7 +44,8 @@ class FactDiff(FactDiffBase):
self._skip_lines[idx] = re.compile(regex)
except re.error as exc:
msg = "The regex '{regex}', is not valid. The error was {err}.".format(
regex=regex, err=str(exc)
regex=regex,
err=str(exc),
)
self._errors.append(msg)

View File

@ -92,7 +92,7 @@ class Validate(ValidateBase):
msg = (
"'criteria' option value is invalid, value should be valid YAML."
" Failed to read with error '{err}'".format(
err=to_text(exc, errors="surrogate_then_replace")
err=to_text(exc, errors="surrogate_then_replace"),
)
)
raise AnsibleError(msg)
@ -105,7 +105,7 @@ class Validate(ValidateBase):
issues.append('Criteria {item} missing "action" key'.format(item=item))
elif item["action"] not in ("warn", "fail"):
issues.append(
'Action in criteria {item} is not one of "warn" or "fail"'.format(item=item)
'Action in criteria {item} is not one of "warn" or "fail"'.format(item=item),
)
if "rule" not in item:
issues.append('Criteria {item} missing "rule" key'.format(item=item))
@ -114,7 +114,10 @@ class Validate(ValidateBase):
item["rule"] = re.compile(item["rule"])
except re.error as exc:
issues.append(
'Failed to compile regex "{rule}": {exc}'.format(rule=item["rule"], exc=exc)
'Failed to compile regex "{rule}": {exc}'.format(
rule=item["rule"],
exc=exc,
),
)
if issues:

View File

@ -103,7 +103,7 @@ class Validate(ValidateBase):
msg = (
"'data' option value is invalid, value should a valid JSON."
" Failed to read with error '{err}'".format(
err=to_text(exe, errors="surrogate_then_replace")
err=to_text(exe, errors="surrogate_then_replace"),
)
)
raise AnsibleError(msg)
@ -121,7 +121,7 @@ class Validate(ValidateBase):
msg = (
"'criteria' option value is invalid, value should a valid JSON."
" Failed to read with error '{err}'".format(
err=to_text(exe, errors="surrogate_then_replace")
err=to_text(exe, errors="surrogate_then_replace"),
)
)
raise AnsibleError(msg)
@ -158,19 +158,23 @@ class Validate(ValidateBase):
for criteria in self._criteria:
if draft == "draft3":
validator = jsonschema.Draft3Validator(
criteria, format_checker=jsonschema.draft3_format_checker
criteria,
format_checker=jsonschema.draft3_format_checker,
)
elif draft == "draft4":
validator = jsonschema.Draft4Validator(
criteria, format_checker=jsonschema.draft4_format_checker
criteria,
format_checker=jsonschema.draft4_format_checker,
)
elif draft == "draft6":
validator = jsonschema.Draft6Validator(
criteria, format_checker=jsonschema.draft6_format_checker
criteria,
format_checker=jsonschema.draft6_format_checker,
)
else:
validator = jsonschema.Draft7Validator(
criteria, format_checker=jsonschema.draft7_format_checker
criteria,
format_checker=jsonschema.draft7_format_checker,
)
validation_errors = sorted(validator.iter_errors(self._data), key=lambda e: e.path)

View File

@ -90,7 +90,7 @@ def validate(*args, **kwargs):
if not len(args):
raise AnsibleError(
"Missing either 'data' value in test plugin input,"
"refer ansible.utils.validate test plugin documentation for details"
"refer ansible.utils.validate test plugin documentation for details",
)
params = {"data": args[0]}
@ -107,7 +107,7 @@ def validate(*args, **kwargs):
"{argspec_result} with errors: {argspec_errors}".format(
argspec_result=argspec_result.get("msg"),
argspec_errors=argspec_result.get("errors"),
)
),
)
validator_engine, validator_result = _load_validator(
@ -118,7 +118,7 @@ def validate(*args, **kwargs):
)
if validator_result.get("failed"):
raise AnsibleError(
"validate lookup plugin failed with errors: %s" % validator_result.get("msg")
"validate lookup plugin failed with errors: %s" % validator_result.get("msg"),
)
try:
@ -130,7 +130,7 @@ def validate(*args, **kwargs):
"Unhandled exception from validator '{validator}'. Error: {err}".format(
validator=updated_params["engine"],
err=to_text(exc, errors="surrogate_then_replace"),
)
),
)
errors = to_list(result.get("errors", []))

View File

@ -117,10 +117,14 @@ class YamlTestUtils(object):
yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper)
else:
yaml_string_obj_from_stream = yaml.dump(
obj_from_stream, Dumper=AnsibleDumper, encoding=None
obj_from_stream,
Dumper=AnsibleDumper,
encoding=None,
)
yaml_string_obj_from_string = yaml.dump(
obj_from_string, Dumper=AnsibleDumper, encoding=None
obj_from_string,
Dumper=AnsibleDumper,
encoding=None,
)
assert yaml_string == yaml_string_obj_from_stream

View File

@ -91,7 +91,7 @@ class TestCli_Parse(unittest.TestCase):
"parser": {
"name": "ansible.utils.textfsm",
"command": "show version",
}
},
}
valid, result, updated_params = check_argspec(
DOCUMENTATION, "cli_parse module", schema_conditionals=ARGSPEC_CONDITIONALS, **kwargs
@ -232,7 +232,7 @@ class TestCli_Parse(unittest.TestCase):
name based on os provided in task
"""
self._plugin._task.args = {
"parser": {"command": "a command", "name": "a.b.c", "os": "myos"}
"parser": {"command": "a command", "name": "a.b.c", "os": "myos"},
}
with self.assertRaises(Exception) as error:
self._plugin._update_template_path("yaml")
@ -247,7 +247,9 @@ class TestCli_Parse(unittest.TestCase):
need to be in the default template folder
"""
template_path = os.path.join(
os.path.dirname(__file__), "fixtures", "nxos_show_version.yaml"
os.path.dirname(__file__),
"fixtures",
"nxos_show_version.yaml",
)
self._plugin._find_needle = MagicMock()
self._plugin._find_needle.return_value = template_path
@ -348,7 +350,9 @@ class TestCli_Parse(unittest.TestCase):
mock_rpc.return_value = mock_out
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
template_path = os.path.join(
os.path.dirname(__file__), "fixtures", "nxos_show_version.textfsm"
os.path.dirname(__file__),
"fixtures",
"nxos_show_version.textfsm",
)
self._plugin._task.args = {
"command": "show version",
@ -372,7 +376,9 @@ class TestCli_Parse(unittest.TestCase):
mock_rpc.return_value = mock_out
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
template_path = os.path.join(
os.path.dirname(__file__), "fixtures", "nxos_show_version.textfsm"
os.path.dirname(__file__),
"fixtures",
"nxos_show_version.textfsm",
)
self._plugin._task.args = {
"command": "show version",
@ -453,7 +459,9 @@ class TestCli_Parse(unittest.TestCase):
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
template_path = os.path.join(
os.path.dirname(__file__), "fixtures", "nxos_empty_parser.textfsm"
os.path.dirname(__file__),
"fixtures",
"nxos_empty_parser.textfsm",
)
self._plugin._task.args = {
"command": "show version",
@ -487,7 +495,9 @@ class TestCli_Parse(unittest.TestCase):
self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
template_path = os.path.join(
os.path.dirname(__file__), "fixtures", "nxos_empty_parser.textfsm"
os.path.dirname(__file__),
"fixtures",
"nxos_empty_parser.textfsm",
)
self._plugin._task.args = {
"command": "show version",

View File

@ -28,7 +28,7 @@ VALID_DATA = {
"a": {
"b": {"4.4": [{"1": {5: {"foo": 123}}}], 5.5: "float5.5"},
"127.0.0.1": "localhost",
}
},
}
VALID_TESTS = [
@ -159,7 +159,7 @@ class TestUpdate_Fact(unittest.TestCase):
{"path": "a.b1.3", "value": 4},
{"path": "a.b2.c", "value": 456},
{"path": "a.b2.d", "value": True},
]
],
}
result = self._plugin.run(task_vars=task_vars)
self.assertEqual(result, expected)

View File

@ -62,9 +62,9 @@ CRITERIA_CRC_ERROR_CHECK = {
"^.*": {
"type": "object",
"properties": {
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}}
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}},
},
}
},
},
}
@ -79,7 +79,7 @@ CRITERIA_OPER_STATUS_UP_CHECK = {
"^.*": {
"type": "object",
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
}
},
},
}
@ -91,11 +91,11 @@ CRITERIA_IN_RATE_CHECK = {
"properties": {
"counters": {
"properties": {
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}}
}
}
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}},
},
},
},
}
},
},
}

View File

@ -87,7 +87,7 @@ class TestConsolidate(unittest.TestCase):
"986",
"988",
"993",
]
],
},
},
{
@ -195,7 +195,7 @@ class TestConsolidate(unittest.TestCase):
"986",
"988",
"993",
]
],
},
},
"l3_interfaces": {"name": "GigabitEthernet0/1"},
@ -327,7 +327,7 @@ class TestConsolidate(unittest.TestCase):
"986",
"988",
"993",
]
],
},
},
{
@ -452,7 +452,7 @@ class TestConsolidate(unittest.TestCase):
"986",
"988",
"993",
]
],
},
},
{

View File

@ -18,7 +18,7 @@ from ansible_collections.ansible.utils.plugins.filter.to_xml import _to_xml
INVALID_DATA = '<netconf-state xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring">'
VALID_DATA = {
"interface-configurations": {"@xmlns": "http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg"}
"interface-configurations": {"@xmlns": "http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg"},
}
OUTPUT = """<?xml version="1.0" encoding="utf-8"?>

View File

@ -54,9 +54,9 @@ CRITERIA_CRC_ERROR_CHECK = {
"^.*": {
"type": "object",
"properties": {
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}}
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}},
},
}
},
},
}
@ -71,7 +71,7 @@ CRITERIA_OPER_STATUS_UP_CHECK = {
"^.*": {
"type": "object",
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
}
},
},
}
@ -83,11 +83,11 @@ CRITERIA_IN_RATE_CHECK = {
"properties": {
"counters": {
"properties": {
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}}
}
}
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}},
},
},
},
}
},
},
}

View File

@ -54,9 +54,9 @@ CRITERIA_CRC_ERROR_CHECK = {
"^.*": {
"type": "object",
"properties": {
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}}
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}},
},
}
},
},
}
@ -71,7 +71,7 @@ CRITERIA_OPER_STATUS_UP_CHECK = {
"^.*": {
"type": "object",
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
}
},
},
}
@ -83,11 +83,11 @@ CRITERIA_IN_RATE_CHECK = {
"properties": {
"counters": {
"properties": {
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}}
}
}
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}},
},
},
},
}
},
},
}

View File

@ -23,7 +23,9 @@ class TestTextfsmParser(unittest.TestCase):
def test_textfsm_parser(self):
nxos_cfg_path = os.path.join(os.path.dirname(__file__), "fixtures", "nxos_show_version.cfg")
nxos_template_path = os.path.join(
os.path.dirname(__file__), "fixtures", "nxos_show_version.textfsm"
os.path.dirname(__file__),
"fixtures",
"nxos_show_version.textfsm",
)
with open(nxos_cfg_path) as fhand:
@ -47,7 +49,7 @@ class TestTextfsmParser(unittest.TestCase):
"OS": "7.0(3)I7(1)",
"PLATFORM": "9000v",
"UPTIME": "12 day(s), 23 hour(s), 48 minute(s), 10 second(s)",
}
},
]
self.assertEqual(result, {"parsed": parsed_output})

View File

@ -21,7 +21,9 @@ class TestTextfsmParser(unittest.TestCase):
def test_ttp_parser(self):
nxos_cfg_path = os.path.join(os.path.dirname(__file__), "fixtures", "nxos_show_version.cfg")
nxos_template_path = os.path.join(
os.path.dirname(__file__), "fixtures", "nxos_show_version.ttp"
os.path.dirname(__file__),
"fixtures",
"nxos_show_version.ttp",
)
with open(nxos_cfg_path) as fhand:
@ -45,7 +47,7 @@ class TestTextfsmParser(unittest.TestCase):
"os": "7.0(3)I7(1)",
"platform": "9000v",
"uptime": "12 day(s), 23 hour(s), 48 minute(s), 10 second(s)",
}
},
]
self.assertEqual(result["parsed"][0][0], parsed_output)

View File

@ -21,7 +21,7 @@ class TestXmlParser(unittest.TestCase):
def test_valid_xml(self):
xml = "<tag1><tag2 arg='foo'>text</tag2></tag1>"
xml_dict = OrderedDict(
tag1=OrderedDict(tag2=OrderedDict([("@arg", "foo"), ("#text", "text")]))
tag1=OrderedDict(tag2=OrderedDict([("@arg", "foo"), ("#text", "text")])),
)
task_args = {"text": xml, "parser": {"os": "none"}}
parser = CliParser(task_args=task_args, task_vars=[], debug=False)

View File

@ -67,7 +67,7 @@ def test_invalid_action(validator, test_rule):
error = to_text(exc)
expected_error = 'Action in criteria {item} is not one of "warn" or "fail"'.format(
item=original
item=original,
)
assert error == expected_error

View File

@ -54,9 +54,9 @@ CRITERIA_CRC_ERROR_CHECK = {
"^.*": {
"type": "object",
"properties": {
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}}
"counters": {"properties": {"in_crc_errors": {"type": "number", "maximum": 0}}},
},
}
},
},
}
@ -71,7 +71,7 @@ CRITERIA_OPER_STATUS_UP_CHECK = {
"^.*": {
"type": "object",
"properties": {"oper_status": {"type": "string", "pattern": "up"}},
}
},
},
}
@ -83,11 +83,11 @@ CRITERIA_IN_RATE_CHECK = {
"properties": {
"counters": {
"properties": {
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}}
}
}
"rate": {"properties": {"in_rate": {"type": "number", "maximum": 0}}},
},
},
},
}
},
},
}