#!/usr/bin/python3 # -*- coding: utf-8 -*- # (c) 2020 Matt Martz # GNU General Public License v3.0+ # (see https://www.gnu.org/licenses/gpl-3.0.txt) import argparse import json import re import sys from pathlib import Path from urllib.request import urlopen from git import Repo # gitpython import yaml # pyyaml IGNORE_PATHS = set(( 'test/sanity/ignore.txt', 'docs/docsite/rst/porting_guides/porting_guide_2.10.rst', 'lib/ansible/config/module_defaults.yml', )) IGNORE_COLLECTIONS = set(( 'sivel.jinja2', )) EMPTY_SHA256 = ( "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" ) parser = argparse.ArgumentParser() parser.add_argument('path', help='Path to ansible-base source', type=Path) args = parser.parse_args() repo = Repo(str(args.path)) core_files = set() class JSONEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, set): return list(o) return json.JSONEncoder.default(self, o) def is_core_file(path): if path in core_files: return True full_path = args.path / path if full_path.exists(): core_files.add(path) return True return False def get_collections(pattern): f = urlopen( 'https://sivel.eng.ansible.com/api/v1/collections/provides', data=json.dumps({'path': pattern.pattern}).encode() ) data = json.load(f) collections = set() for name, info in data.items(): if name in IGNORE_COLLECTIONS or name.startswith('testing.'): continue files = info['files'] if (len(files) == 1 and files[0]['chksum_sha256'] == EMPTY_SHA256 and files[0]['name'].endswith('/__init__.py')): continue collections.add(name) return collections def path_pattern(path): try: path = Path(path).relative_to('lib/ansible/') except ValueError: path = Path(path) parts = str(path).split('/') if parts[0] in ('modules', 'module_utils'): pattern = re.compile( r'plugins\/{}\/{}'.format( re.escape(parts[0]), re.escape(path.name) ) ) elif parts[0] == 'plugins': pattern = re.compile( re.escape(str(path)) ) elif parts[0] == 'test': parts[0] = 'tests' pattern = re.compile( re.escape('/'.join(parts)) ) else: raise ValueError return pattern def get_changed_files(path): for commit, _ in repo.blame('HEAD', path): yield from commit.stats.files.keys() moves = {} fragment_dir = args.path / 'changelogs' / 'fragments' for fragment in fragment_dir.iterdir(): rel_frag = str(fragment.relative_to(args.path)) changed_files = sorted(set(get_changed_files(rel_frag))) for filename in changed_files: if filename.startswith('changelogs/'): continue if filename in IGNORE_PATHS: continue if is_core_file(filename): collections = set(('ansible.builtin',)) pattern = re.compile( 'core - {}'.format(re.escape(filename)) ) else: try: pattern = path_pattern(filename) except ValueError: continue collections = get_collections(pattern) if collections: print(rel_frag, file=sys.stderr) print( ' {!r} - {}'.format(collections, pattern.pattern), file=sys.stderr ) try: moves[rel_frag]['collections'].update(collections) except KeyError: clog_data = yaml.safe_load(fragment.read_text()) moves[rel_frag] = { 'collections': collections, 'changelog': clog_data, } print(file=sys.stderr) print(json.dumps(moves, sort_keys=True, indent=4, cls=JSONEncoder))