[PR #9297/d2088ccf backport][stable-9] Polish botmeta extra sanity test and make it work without warnings on Python 3.13 (#9299)

Polish botmeta extra sanity test and make it work without warnings on Python 3.13 (#9297)

Polish botmeta and make it work without warnings on Python 3.13.

(cherry picked from commit d2088ccfcc)

Co-authored-by: Felix Fontein <felix@fontein.de>
pull/9302/head
patchback[bot] 2024-12-21 17:02:28 +01:00 committed by GitHub
parent aa3a43cfa8
commit f9ee387f68
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 38 additions and 38 deletions

View File

@ -3,10 +3,9 @@
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
"""Check BOTMETA file.""" """Check BOTMETA file."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast from __future__ import annotations
import os import os
import re import re
import sys import sys
@ -66,26 +65,27 @@ AUTHOR_REGEX = re.compile(r'^\w.*\(@([\w-]+)\)(?![\w.])')
def read_authors(filename): def read_authors(filename):
data = {} data = {}
try: try:
with open(filename, 'rb') as b_module_data: documentation = []
M = ast.parse(b_module_data.read()) in_docs = False
with open(filename, 'r', encoding='utf-8') as f:
for line in f:
if line.startswith('DOCUMENTATION ='):
in_docs = True
elif line.startswith(("'''", '"""')) and in_docs:
in_docs = False
elif in_docs:
documentation.append(line)
if in_docs:
print(f'{filename}: cannot find DOCUMENTATION end')
return []
if not documentation:
print(f'{filename}: cannot find DOCUMENTATION')
return []
for child in M.body: data = yaml.safe_load('\n'.join(documentation))
if isinstance(child, ast.Assign):
for t in child.targets:
try:
theid = t.id
except AttributeError:
# skip errors can happen when trying to use the normal code
continue
if theid == 'DOCUMENTATION':
if isinstance(child.value, ast.Dict):
data = ast.literal_eval(child.value)
else:
data = yaml.safe_load(child.value.s)
except Exception as e: except Exception as e:
print('%s:%d:%d: Cannot load DOCUMENTATION: %s' % (filename, 0, 0, e)) print(f'{filename}:0:0: Cannot load DOCUMENTATION: {e}')
return [] return []
author = data.get('author') or [] author = data.get('author') or []
@ -108,21 +108,21 @@ def validate(filename, filedata):
return return
if filename.startswith(('plugins/doc_fragments/', 'plugins/module_utils/')): if filename.startswith(('plugins/doc_fragments/', 'plugins/module_utils/')):
return return
# Compile lis tof all active and inactive maintainers # Compile list of all active and inactive maintainers
all_maintainers = filedata['maintainers'] + filedata['ignore'] all_maintainers = filedata['maintainers'] + filedata['ignore']
if not filename.startswith('plugins/filter/'): if not filename.startswith(('plugins/action/', 'plugins/doc_fragments/', 'plugins/filter/', 'plugins/module_utils/', 'plugins/plugin_utils/')):
maintainers = read_authors(filename) maintainers = read_authors(filename)
for maintainer in maintainers: for maintainer in maintainers:
maintainer = extract_author_name(maintainer) maintainer = extract_author_name(maintainer)
if maintainer is not None and maintainer not in all_maintainers: if maintainer is not None and maintainer not in all_maintainers:
msg = 'Author %s not mentioned as active or inactive maintainer for %s (mentioned are: %s)' % ( others = ', '.join(all_maintainers)
maintainer, filename, ', '.join(all_maintainers)) msg = f'Author {maintainer} not mentioned as active or inactive maintainer for {filename} (mentioned are: {others})'
print('%s:%d:%d: %s' % (FILENAME, 0, 0, msg)) print(f'{FILENAME}:0:0: {msg}')
should_have_no_maintainer = filename in IGNORE_NO_MAINTAINERS should_have_no_maintainer = filename in IGNORE_NO_MAINTAINERS
if not all_maintainers and not should_have_no_maintainer: if not all_maintainers and not should_have_no_maintainer:
print('%s:%d:%d: %s' % (FILENAME, 0, 0, 'No (active or inactive) maintainer mentioned for %s' % filename)) print(f'{FILENAME}:0:0: No (active or inactive) maintainer mentioned for {filename}')
if all_maintainers and should_have_no_maintainer: if all_maintainers and should_have_no_maintainer:
print('%s:%d:%d: %s' % (FILENAME, 0, 0, 'Please remove %s from the ignore list of %s' % (filename, sys.argv[0]))) print(f'{FILENAME}:0:0: Please remove {filename} from the ignore list of {sys.argv[0]}')
def main(): def main():
@ -131,12 +131,12 @@ def main():
with open(FILENAME, 'rb') as f: with open(FILENAME, 'rb') as f:
botmeta = yaml.safe_load(f) botmeta = yaml.safe_load(f)
except yaml.error.MarkedYAMLError as ex: except yaml.error.MarkedYAMLError as ex:
print('%s:%d:%d: YAML load failed: %s' % (FILENAME, ex.context_mark.line + msg = re.sub(r'\s+', ' ', str(ex))
1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex)))) print('f{FILENAME}:{ex.context_mark.line + 1}:{ex.context_mark.column + 1}: YAML load failed: {msg}')
return return
except Exception as ex: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: YAML load failed: %s' % msg = re.sub(r'\s+', ' ', str(ex))
(FILENAME, 0, 0, re.sub(r'\s+', ' ', str(ex)))) print(f'{FILENAME}:0:0: YAML load failed: {msg}')
return return
# Validate schema # Validate schema
@ -169,7 +169,7 @@ def main():
except MultipleInvalid as ex: except MultipleInvalid as ex:
for error in ex.errors: for error in ex.errors:
# No way to get line/column numbers # No way to get line/column numbers
print('%s:%d:%d: %s' % (FILENAME, 0, 0, humanize_error(botmeta, error))) print(f'{FILENAME}:0:0: {humanize_error(botmeta, error)}')
return return
# Preprocess (substitute macros, convert to lists) # Preprocess (substitute macros, convert to lists)
@ -181,7 +181,7 @@ def main():
macro = m.group(1) macro = m.group(1)
replacement = (macros[macro] or '') replacement = (macros[macro] or '')
if macro == 'team_ansible_core': if macro == 'team_ansible_core':
return '$team_ansible_core %s' % replacement return f'$team_ansible_core {replacement}'
return replacement return replacement
return macro_re.sub(f, text) return macro_re.sub(f, text)
@ -196,13 +196,13 @@ def main():
if k in LIST_ENTRIES: if k in LIST_ENTRIES:
filedata[k] = v.split() filedata[k] = v.split()
except KeyError as e: except KeyError as e:
print('%s:%d:%d: %s' % (FILENAME, 0, 0, 'Found unknown macro %s' % e)) print(f'{FILENAME}:0:0: Found unknown macro {e}')
return return
# Scan all files # Scan all files
unmatched = set(files) unmatched = set(files)
for dirs in ('docs/docsite/rst', 'plugins', 'tests', 'changelogs'): for dirs in ('docs/docsite/rst', 'plugins', 'tests', 'changelogs'):
for dirpath, dirnames, filenames in os.walk(dirs): for dirpath, _dirnames, filenames in os.walk(dirs):
for file in sorted(filenames): for file in sorted(filenames):
if file.endswith('.pyc'): if file.endswith('.pyc'):
continue continue
@ -217,10 +217,10 @@ def main():
if file in unmatched: if file in unmatched:
unmatched.remove(file) unmatched.remove(file)
if not matching_files: if not matching_files:
print('%s:%d:%d: %s' % (FILENAME, 0, 0, 'Did not find any entry for %s' % filename)) print(f'{FILENAME}:0:0: Did not find any entry for {filename}')
matching_files.sort(key=lambda kv: kv[0]) matching_files.sort(key=lambda kv: kv[0])
filedata = dict() filedata = {}
for k in LIST_ENTRIES: for k in LIST_ENTRIES:
filedata[k] = [] filedata[k] = []
for dummy, data in matching_files: for dummy, data in matching_files:
@ -231,7 +231,7 @@ def main():
validate(filename, filedata) validate(filename, filedata)
for file in unmatched: for file in unmatched:
print('%s:%d:%d: %s' % (FILENAME, 0, 0, 'Entry %s was not used' % file)) print(f'{FILENAME}:0:0: Entry {file} was not used')
if __name__ == '__main__': if __name__ == '__main__':