diff --git a/.github/workflows/api-check.yml b/.github/workflows/api-check.yml new file mode 100644 index 000000000000..b642982ca685 --- /dev/null +++ b/.github/workflows/api-check.yml @@ -0,0 +1,129 @@ +name: API Check + +on: + pull_request: + branches: + - main + workflow_dispatch: + inputs: + new_commit: + type: string + required: true + description: New Commit + old_commit: + type: string + required: true + description: Old Commit + +jobs: + build: + runs-on: ubuntu-latest + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + steps: + - name: Checkout sources + uses: nordicbuilder/action-checkout-west-update@main + with: + git-fetch-depth: 0 + west-update-args: '' + + - name: cache-pip + uses: actions/cache@v3 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-doc-pip + + - name: Git rebase + if: github.event_name == 'pull_request' + env: + BASE_REF: ${{ github.base_ref }} + working-directory: ncs/nrf + run: | + git remote -v + git branch + git rebase origin/${BASE_REF} + # debug + git log --pretty=oneline -n 5 + + - name: Install packages + run: | + sudo apt update + sudo apt-get install -y ninja-build mscgen plantuml + sudo snap install yq + DOXYGEN_VERSION=$(yq ".doxygen.version" ./ncs/nrf/scripts/tools-versions-linux.yml) + wget --no-verbose "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz" + tar xf doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz + echo "${PWD}/doxygen-${DOXYGEN_VERSION}/bin" >> $GITHUB_PATH + cp -r ncs/nrf/scripts/ci/api_check . + + - name: Install Python dependencies + working-directory: ncs + run: | + sudo pip3 install -U setuptools wheel pip + pip3 install -r nrf/doc/requirements.txt + pip3 install -r ../api_check/requirements.txt + + - name: West zephyr-export + working-directory: ncs + run: | + west zephyr-export + + - name: Checkout new commit and west update + if: github.event_name == 'workflow_dispatch' + working-directory: ncs/nrf + run: | + git checkout ${{ github.event.inputs.new_commit }} + west update + + - name: Collect data from new commit + working-directory: ncs/nrf + run: | + source ../zephyr/zephyr-env.sh + echo =========== NEW COMMIT =========== + git log -n 1 + cmake -GNinja -Bdoc/_build -Sdoc + python3 ../../api_check/utils/interrupt_on.py "syncing doxygen output" ninja -C doc/_build nrf + python3 ../../api_check/headers doc/_build/nrf/doxygen/xml --save-input ../../headers-new.pkl + python3 ../../api_check/dts -n - --save-input ../../dts-new.pkl + rm -Rf doc/_build + + - name: Checkout old commit and west update + working-directory: ncs/nrf + run: | + git checkout ${{ github.event.inputs.old_commit }}${{ github.base_ref }} + cd .. + west update + + - name: Collect data from old commit + working-directory: ncs/nrf + run: | + source ../zephyr/zephyr-env.sh + echo =========== OLD COMMIT =========== + git log -n 1 + cmake -GNinja -Bdoc/_build -Sdoc + python3 ../../api_check/utils/interrupt_on.py "syncing doxygen output" ninja -C doc/_build nrf + python3 ../../api_check/headers doc/_build/nrf/doxygen/xml --save-input ../../headers-old.pkl + python3 ../../api_check/dts -n - --save-input ../../dts-old.pkl + + - name: Check + working-directory: ncs/nrf + run: | + python3 ../../api_check/headers --format github --resolve-paths . --relative-to . --save-stats ../../headers-stats.json ../../headers-new.pkl ../../headers-old.pkl || true + python3 ../../api_check/dts --format github --relative-to . --save-stats ../../dts-stats.json -n ../../dts-new.pkl -o ../../dts-old.pkl || true + echo Headers stats + cat ../../headers-stats.json || true + echo DTS stats + cat ../../dts-stats.json || true + + - name: Update PR + if: github.event_name == 'pull_request' + working-directory: ncs/nrf + env: + PR_NUMBER: ${{ github.event.number }} + GITHUB_ACTOR: ${{ github.actor }} + GITHUB_TOKEN: ${{ secrets.NCS_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + GITHUB_REPO: ${{ github.repository }} + GITHUB_RUN_ID: ${{ github.run_id }} + run: | + python3 ../../api_check/pr ../../headers-stats.json ../../dts-stats.json diff --git a/scripts/ci/api_check/dts/__main__.py b/scripts/ci/api_check/dts/__main__.py new file mode 100644 index 000000000000..ce7e40b98bba --- /dev/null +++ b/scripts/ci/api_check/dts/__main__.py @@ -0,0 +1,2 @@ +from main import main +main() diff --git a/scripts/ci/api_check/dts/args.py b/scripts/ci/api_check/dts/args.py new file mode 100644 index 000000000000..458b7dd87fca --- /dev/null +++ b/scripts/ci/api_check/dts/args.py @@ -0,0 +1,61 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import sys +import argparse +from pathlib import Path + + +class ArgsClass: + new: 'list[list[str]]' + old: 'list[list[str]]|None' + format: str + relative_to: 'Path | None' + save_stats: 'Path | None' + save_input: 'Path | None' + save_old_input: 'Path | None' + dump_json: 'Path | None' + + +def parse_args() -> ArgsClass: + parser = argparse.ArgumentParser(add_help=False, allow_abbrev=False, + description='Detect DTS binding changes.') + parser.add_argument('-n', '--new', nargs='+', action='append', required=True, + help='List of directories where to search the new DTS binding. ' + + 'The "-" will use the "ZEPHYR_BASE" environment variable to find ' + + 'DTS binding in default directories.') + parser.add_argument('-o', '--old', nargs='+', action='append', + help='List of directories where to search the old DTS binding. ' + + 'The "-" will use the "ZEPHYR_BASE" environment variable to find ' + + 'DTS binding in default directories. You should skip this if you ' + + 'want to pre-parse the input with the "--save-input" option.') + parser.add_argument('--format', choices=('text', 'github'), default='text', + help='Output format. Default is "text".') + parser.add_argument('--relative-to', type=Path, + help='Show relative paths in messages.') + parser.add_argument('--save-stats', type=Path, + help='Save statistics to JSON file.') + parser.add_argument('--save-input', metavar='FILE', type=Path, + help='Pre-parse and save the new input to a file. The file format may change ' + + 'from version to version. Use always the same version ' + + 'of this tool for one file.') + parser.add_argument('--save-old-input', metavar='FILE', type=Path, + help='Pre-parse and save the old input to a file.') + parser.add_argument('--dump-json', metavar='FILE', type=Path, + help='Dump input data to a JSON file (only for debug purposes).') + parser.add_argument('--help', action='help', + help='Show this help and exit.') + args: ArgsClass = parser.parse_args() + + if (args.old is None) and (args.save_input is None): + parser.print_usage() + print('error: at least one of the following arguments is required: old-input, --save-input', file=sys.stderr) + sys.exit(2) + + args.relative_to = args.relative_to.absolute() if args.relative_to else None + + return args + + +args: ArgsClass = parse_args() diff --git a/scripts/ci/api_check/dts/bindings_parser.py b/scripts/ci/api_check/dts/bindings_parser.py new file mode 100644 index 000000000000..d72ef8753fc3 --- /dev/null +++ b/scripts/ci/api_check/dts/bindings_parser.py @@ -0,0 +1,109 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import sys +import pickle +from pathlib import Path +from dts_tools import devicetree_sources, warning + +if devicetree_sources: + sys.path.insert(0, devicetree_sources) + +from devicetree import edtlib + + +class ParseResult: + bindings: 'list[Binding]' + binding_by_name: 'dict[str, Binding]' + def __init__(self): + self.bindings = [] + self.binding_by_name = {} + +class Property: + name: str + type: str + description: str + enum: 'set[str]' + const: 'str | None' + default: 'str | None' + deprecated: bool + required: bool + specifier_space: str + + def __init__(self, prop: edtlib.PropertySpec): + self.name = prop.name + self.type = prop.type or '' + self.description = prop.description or '' + self.enum = { str(x) for x in (prop.enum or []) } + self.const = str(prop.const) if prop.const else None + self.default = str(prop.default) if prop.default else None + self.deprecated = prop.deprecated or False + self.required = prop.required or False + self.specifier_space = str(prop.specifier_space or '') + +class Binding: + path: str + name: str + description: str + cells: str + buses: str + properties: 'dict[str, Property]' + + def __init__(self, binding: edtlib.Binding, file: Path): + self.path = str(file) + self.name = binding.compatible or self.path + if binding.on_bus is not None: + self.name += '@' + binding.on_bus + self.description = binding.description or '' + cells_array = [ + f'{name}={";".join(value)}' for name, value in (binding.specifier2cells or {}).items() + ] + cells_array.sort() + self.cells = '&'.join(cells_array) + busses_array = list(binding.buses or []) + busses_array.sort() + self.buses = ';'.join(busses_array) + self.properties = {} + for key, value in (binding.prop2specs or {}).items(): + prop = Property(value) + self.properties[key] = prop + + +def get_binding_files(bindings_dirs: 'list[Path]') -> 'list[Path]': + binding_files = [] + for bindings_dir in bindings_dirs: + if not bindings_dir.is_dir(): + raise FileNotFoundError(f'Bindings directory "{bindings_dir}" not found.') + for file in bindings_dir.glob('**/*.yaml'): + binding_files.append(file) + for file in bindings_dir.glob('**/*.yml'): + binding_files.append(file) + return binding_files + + +def parse_bindings(dirs_or_pickle: 'list[Path]|Path') -> ParseResult: + result = ParseResult() + if isinstance(dirs_or_pickle, list): + yaml_files = get_binding_files(dirs_or_pickle) + fname2path: 'dict[str, str]' = { + path.name: str(path) for path in yaml_files + } + for binding_file in yaml_files: + try: + binding = Binding(edtlib.Binding(str(binding_file), fname2path, None, False, False), binding_file) + if binding.name in result.binding_by_name: + warning(f'Repeating binding {binding.name}: {binding.path} {result.binding_by_name[binding.name].path}') + result.bindings.append(binding) + result.binding_by_name[binding.name] = binding + except edtlib.EDTError as err: + warning(err) + else: + with open(dirs_or_pickle, 'rb') as fd: + result = pickle.load(fd) + return result + + +def save_bindings(parse_result: ParseResult, file: Path): + with open(file, 'wb') as fd: + pickle.dump(parse_result, fd) diff --git a/scripts/ci/api_check/dts/dts_compare.py b/scripts/ci/api_check/dts/dts_compare.py new file mode 100644 index 000000000000..2ae27d46b205 --- /dev/null +++ b/scripts/ci/api_check/dts/dts_compare.py @@ -0,0 +1,154 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +from typing import Any +from bindings_parser import Binding, Property, ParseResult + + +UNCHANGED = 'unchanged' +ADDED = 'added' +DELETED = 'deleted' +MODIFIED = 'modified' +SET = 'set' +CLEARED = 'cleared' + + +class AnyChange: + kind: str + action: str + def __init__(self, action: str, new: Any, old: Any): + self.action = action + self.new = new + self.old = old + + +class BindingChange(AnyChange): + kind = 'binding' + new: Binding + old: Binding + path: bool = False + description: bool = False + cells: bool = False + buses: bool = False + properties: 'list[PropertyChange]' + def __init__(self, action: str, new: Any, old: Any): + super().__init__(action, new, old) + self.properties = [] + + +class PropertyChange(AnyChange): + kind = 'property' + new: Property + old: Property + type: bool = False + description: bool = False + enum: 'list[EnumChange]' + const: str = UNCHANGED + default: str = UNCHANGED + deprecated: str = UNCHANGED + required: str = UNCHANGED + specifier_space: bool = False + def __init__(self, action: str, new: Any, old: Any): + super().__init__(action, new, old) + self.enum = [] + + +class EnumChange(AnyChange): + kind = 'enum' + new: str + old: str + + +def get_str_action(new: 'str | None', old: 'str | None') -> str: + if (new is None) and (old is None): + return UNCHANGED + elif (new is None) and (old is not None): + return DELETED + elif (new is not None) and (old is None): + return ADDED + else: + return MODIFIED if new != old else UNCHANGED + + +def get_bool_action(new: bool, old: bool) -> str: + if (not new) and old: + return CLEARED + elif new and (not old): + return SET + elif (new is not None) and (old is None): + return UNCHANGED + + +def compare_properties(new: 'dict[str, Property]', old: 'dict[str, Property]') -> 'list[PropertyChange]': + new_keys = set(new.keys()) + old_keys = set(old.keys()) + added_keys = new_keys.difference(old_keys) + deleted_keys = old_keys.difference(new_keys) + remaining_keys = new_keys.intersection(old_keys) + result: 'list[PropertyChange]' = [] + for key in added_keys: + property_change = PropertyChange(ADDED, new[key], new[key]) + result.append(property_change) + for key in deleted_keys: + property_change = PropertyChange(DELETED, old[key], old[key]) + result.append(property_change) + for key in remaining_keys: + new_property = new[key] + old_property = old[key] + property_change = PropertyChange(MODIFIED, new[key], old[key]) + property_change.type = new_property.type != old_property.type + property_change.description = new_property.description != old_property.description + property_change.const = get_str_action(new_property.const, old_property.const) + property_change.default = get_str_action(new_property.default, old_property.default) + property_change.deprecated = get_bool_action(new_property.deprecated, old_property.deprecated) + property_change.required = get_bool_action(new_property.required, old_property.required) + property_change.specifier_space = new_property.specifier_space != old_property.specifier_space + for enum_value in new_property.enum.difference(old_property.enum): + property_change.enum.append(EnumChange(ADDED, enum_value, enum_value)) + for enum_value in old_property.enum.difference(new_property.enum): + property_change.enum.append(EnumChange(DELETED, enum_value, enum_value)) + changed = ( + property_change.type or + property_change.description or + property_change.const != UNCHANGED or + property_change.default != UNCHANGED or + property_change.deprecated != UNCHANGED or + property_change.required != UNCHANGED or + property_change.specifier_space or + len(property_change.enum)) + if changed: + result.append(property_change) + return result + + +def compare(new: ParseResult, old: ParseResult) -> 'list[BindingChange]': + new_keys = set(new.binding_by_name.keys()) + old_keys = set(old.binding_by_name.keys()) + added_keys = new_keys.difference(old_keys) + deleted_keys = old_keys.difference(new_keys) + remaining_keys = new_keys.intersection(old_keys) + result: 'list[BindingChange]' = [] + for key in added_keys: + binding_change = BindingChange(ADDED, new.binding_by_name[key], new.binding_by_name[key]) + result.append(binding_change) + for key in deleted_keys: + binding_change = BindingChange(DELETED, old.binding_by_name[key], old.binding_by_name[key]) + result.append(binding_change) + for key in remaining_keys: + new_binding = new.binding_by_name[key] + old_binding = old.binding_by_name[key] + binding_change = BindingChange(MODIFIED, new.binding_by_name[key], old.binding_by_name[key]) + binding_change.path = new_binding.path != old_binding.path + binding_change.description = new_binding.description != old_binding.description + binding_change.buses = new_binding.buses != old_binding.buses + binding_change.cells = new_binding.cells != old_binding.cells + binding_change.properties = compare_properties(new_binding.properties, old_binding.properties) + changed = (binding_change.path or + binding_change.description or + binding_change.buses or + binding_change.cells or + len(binding_change.properties)) + if changed: + result.append(binding_change) + return result diff --git a/scripts/ci/api_check/dts/dts_tools.py b/scripts/ci/api_check/dts/dts_tools.py new file mode 100644 index 000000000000..b6f3799146bf --- /dev/null +++ b/scripts/ci/api_check/dts/dts_tools.py @@ -0,0 +1,44 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import os +import sys +from pathlib import Path +from jinja2 import Template + + +def warning(*args, **kwargs): + args = ('\x1B[33mwarning:\x1B[0m', *args) + print(*args, **kwargs, file=sys.stderr) + + +def error(*args, **kwargs): + args = ('\x1B[31merror:\x1B[0m', *args) + print(*args, **kwargs, file=sys.stderr) + + +def compile_messages(messages): + result = {} + for key in messages.keys(): + result[key] = Template(messages[key]) + return result + + +def find_devicetree_sources() -> 'str|None': + sources = None + zephyr_base = os.getenv('ZEPHYR_BASE') + if zephyr_base is not None: + zephyr_base = Path(zephyr_base) + sources = zephyr_base / 'scripts/dts/python-devicetree/src' + if sources.exists(): + return str(sources) + west_root = Path(__file__).parent.parent.absolute() + for _i in range(0, 6): + sources = west_root / 'zephyr/scripts/dts/python-devicetree/src' + if sources.exists(): + return str(sources) + west_root = west_root.parent + return None + +devicetree_sources = find_devicetree_sources() diff --git a/scripts/ci/api_check/dts/main.py b/scripts/ci/api_check/dts/main.py new file mode 100644 index 000000000000..1a8a15f14001 --- /dev/null +++ b/scripts/ci/api_check/dts/main.py @@ -0,0 +1,132 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import os +import sys +import json +import out_text +from json import JSONEncoder +from pathlib import Path +from dts_tools import error +from args import args +from bindings_parser import parse_bindings, save_bindings +from dts_compare import compare + + +def collect_zephyr_inputs() -> 'list[Path]': + zephyr_base = os.getenv('ZEPHYR_BASE') + if zephyr_base is None: + error('You must specify "ZEPHYR_BASE" if you using "-" argument as an input.') + sys.exit(1) + zephyr_base = Path(zephyr_base) + west_root = zephyr_base.parent + bindings_dirs: 'list[Path]' = [] + for bindings_dir in west_root.glob('**/dts/bindings'): + rel = bindings_dir.relative_to(west_root) + if str(rel).count('test') or str(rel).count('sample'): + continue + bindings_dirs.append(bindings_dir) + return bindings_dirs + + +def collect_inputs(arguments: 'list[list[str]]') -> 'list[Path]|Path': + result_list: 'list[Path]' = [] + result_pickle: 'Path|None' = None + for arg_list in arguments: + for arg in arg_list: + if arg == '-': + result_list.extend(collect_zephyr_inputs()) + else: + arg = Path(arg) + if arg.is_file(): + result_pickle = arg + else: + result_list.append(arg) + if len(result_list) > 0 and result_pickle is not None: + error('Expecting pickled file or list of directories. Not both.') + sys.exit(1) + return result_pickle or result_list + + +def dump_json(file: Path, **kwargs): + def default_encode(o): + this_id = id(o) + if this_id in ids: + return f'__id__{this_id}' + if isinstance(o, set): + return list(o) + else: + ids.add(this_id) + d = {'__id__': f'__id__{this_id}'} + for name in tuple(dir(o)): + if not name.startswith('_'): + value = getattr(o, name) + if not callable(value): + d[name] = value + return d + ids = set() + with open(file, 'w') as fd: + fd.write('{\n') + first = True + for name, value in kwargs.items(): + json = JSONEncoder(sort_keys=False, indent=2, default=default_encode).encode(value) + if not first: + fd.write(',\n') + fd.write(f'"{name}": {json}') + first = False + fd.write('\n}\n') + +def main(): + new_input = parse_bindings(collect_inputs(args.new)) + + if args.old: + old_input = parse_bindings(collect_inputs(args.old)) + else: + old_input = None + + if args.save_input: + save_bindings(new_input, args.save_input) + + if args.save_old_input and old_input: + save_bindings(old_input, args.save_old_input) + + if args.dump_json: + if old_input: + dump_json(args.dump_json, + new_bindings=new_input.bindings, + new_binding_by_name=new_input.binding_by_name, + old_bindings=old_input.bindings, + old_binding_by_name=old_input.binding_by_name) + else: + dump_json(args.dump_json, + bindings=new_input.bindings, + binding_by_name=new_input.binding_by_name) + + level = 0 + + if old_input: + changes = compare(new_input, old_input) + if args.dump_json: + dump_json(args.dump_json, + new_bindings=new_input.bindings, + new_binding_by_name=new_input.binding_by_name, + old_bindings=old_input.bindings, + old_binding_by_name=old_input.binding_by_name, + changes=changes) + stats = out_text.generate(changes) + if args.save_stats: + args.save_stats.write_text(json.dumps({ + 'notice': stats[1], + 'warning': stats[2], + 'critical': stats[3], + }, indent=2)) + for i, count in enumerate(stats): + if count > 0: + level = i + + sys.exit(level) + + +if __name__ == '__main__': + main() diff --git a/scripts/ci/api_check/dts/out_text.py b/scripts/ci/api_check/dts/out_text.py new file mode 100644 index 000000000000..13e342a95043 --- /dev/null +++ b/scripts/ci/api_check/dts/out_text.py @@ -0,0 +1,154 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import re +from pathlib import Path +from jinja2 import Template +from dts_compare import AnyChange, BindingChange +from dts_tools import compile_messages +from args import args + + +spaces_re = re.compile(r'\s+') + + +messages: 'dict[str, Template]' = compile_messages({ + 'binding-added': 'ignore', + 'binding-deleted': 'critical: Binding "{{new.name}}" deleted.', + 'binding-modified-path': 'ignore', + 'binding-modified-description': 'notice: Binding "{{new.name}}" description changed.', + 'binding-modified-cells': 'notice: Binding "{{new.name}}" cells definition changed from "{{old.cells}}" to "{{new.cells}}".', + 'binding-modified-buses': 'warning: Binding "{{new.name}}" buses definition changed from "{{old.buses}}" to "{{new.buses}}".', + 'property-added': ''' + {% if new.const is not none %} + ignore + {% elif new.default is not none %} + notice: Property "{{new.name}}" of "{{binding.new.name}}" added with default value. + {% elif not new.required %} + warning: Property "{{new.name}}" of "{{binding.new.name}}" added, but it is not required. + {% else %} + critical: Required property "{{new.name}}" of "{{binding.new.name}}" added. + {% endif %} + ''', + 'property-deleted': ''' + {% if new.deprecated %} + notice: Deprecated property "{{new.name}}" of "{{binding.new.name}}" deleted. + {% else %} + critical: Property "{{new.name}}" of "{{binding.new.name}}" deleted. + {% endif %} + ''', + 'property-modified-type': '{% if new.const is not none %} critical: Property "{{new.name}}" of "{{binding.new.name}}" type changed. {% endif %}', + 'property-modified-description': '{% if new.const is not none %} notice: Property "{{new.name}}" of "{{binding.new.name}}" description changed. {% endif %}', + 'property-modified-const-added': 'critical: Property "{{new.name}}" of "{{binding.new.name}}" const value set.', + 'property-modified-const-deleted': ''' + {% if new.default is none %} + critical: Property "{{new.name}}" of "{{binding.new.name}}" const value removed. + {% else %} + notice: Property "{{new.name}}" of "{{binding.new.name}}" const value replaced by default value. + {% endif %} + ''', + 'property-modified-const-modified': 'ignore', + 'property-modified-default-added': 'ignore', + 'property-modified-default-deleted': 'critical: Property "{{new.name}}" of "{{binding.new.name}}" default value removed.', + 'property-modified-default-modified': 'critical: Property "{{new.name}}" of "{{binding.new.name}}" default value modified.', + 'property-modified-deprecated-set': 'ignore', + 'property-modified-deprecated-cleared': 'ignore', + 'property-modified-required-set': 'critical: Property "{{new.name}}" of "{{binding.new.name}}" is now required.', + 'property-modified-required-cleared': 'ignore', + 'property-modified-specifier_space': '{% if new.const is not none %} warning: Property "{{new.name}}" of "{{binding.new.name}}" specifier space changed. {% endif %}', + 'enum-added': 'ignore', + 'enum-deleted': 'critical: Enum value "{{new}}" of property "{{property.new.name}}" of "{{binding.new.name}}" deleted.', +}) + + +def get_message_level(message: str) -> int: + if message.startswith('ignore') or (message == ''): + return 0 + elif message.startswith('notice'): + return 1 + elif message.startswith('warning'): + return 2 + elif message.startswith('critical'): + return 3 + else: + raise ValueError(f'Unknown level of message: {message}') + +github_commands = [ + '::ignore', + '::notice', + '::warning', + '::error' +] + +github_titles = [ + 'Ignore', + 'Notice', + 'Warning', + 'Critical', +] + +def encode(text: str, is_message: bool): + if is_message: + return text.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A') + else: + return text.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A').replace(',', '%2C').replace('::', '%3A%3A') + +def show_message(file: Path, line: 'str | int | None', message: str, level: int): + if args.relative_to is not None: + file = file.relative_to(args.relative_to) + if args.format == 'github': + command = github_commands[level] + title = f'Compatibility {github_titles[level]}' + if line is not None: + print(f'{command} file={encode(str(file), False)},line={line},title={title}::{encode(message, True)}') + else: + print(f'{command} file={encode(str(file), False)},title={title}::{encode(message, True)}') + elif line is not None: + print(f'{file}:{line}: {message}') + else: + print(f'{file}: {message}') + + +def generate_changes(stats: 'list[int]', changes: 'list[AnyChange]', + location: Path, **kwargs) -> int: + for change in changes: + loc = Path(change.new.path) if isinstance(change, BindingChange) else location + prefix = f'{change.kind}-{change.action}' + for key, template in messages.items(): + if not key.startswith(prefix): + continue + matched = False + if key == prefix: + matched = True + else: + parts = key[len(prefix) + 1:].split('-') + field = parts[0] + expected = parts[1] if (len(parts) > 1) else True + value = getattr(change, field) + if value == expected: + matched = True + if not matched: + continue + data = {} + for name in dir(change): + value = getattr(change, name) + if (not callable(value)) and (not name.startswith('_')): + data[name] = value + for name, value in kwargs.items(): + data[name] = value + message = spaces_re.sub(template.render(**data), ' ').strip() + level = get_message_level(message) + if level > 0: + show_message(loc, None, message, level) + stats[level] += 1 + if prefix == 'binding-modified': + generate_changes(stats, change.properties, loc, binding=change) + elif prefix == 'property-modified': + generate_changes(stats, change.enum, loc, property=change, **kwargs) + + +def generate(compare_result: 'list[BindingChange]'): + stats = [0, 0, 0, 0] + generate_changes(stats, compare_result, Path()) + return stats diff --git a/scripts/ci/api_check/headers/__main__.py b/scripts/ci/api_check/headers/__main__.py new file mode 100644 index 000000000000..ce7e40b98bba --- /dev/null +++ b/scripts/ci/api_check/headers/__main__.py @@ -0,0 +1,2 @@ +from main import main +main() diff --git a/scripts/ci/api_check/headers/args.py b/scripts/ci/api_check/headers/args.py new file mode 100644 index 000000000000..47ee6cb957c6 --- /dev/null +++ b/scripts/ci/api_check/headers/args.py @@ -0,0 +1,65 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import sys +import argparse +from pathlib import Path + + +class ArgsClass: + new_input: Path + old_input: 'Path | None' + format: str + resolve_paths: 'Path | None' + relative_to: 'Path | None' + save_stats: 'Path | None' + save_input: 'Path | None' + save_old_input: 'Path | None' + dump_json: 'Path | None' + + +def parse_args() -> ArgsClass: + parser = argparse.ArgumentParser(add_help=False, allow_abbrev=False, + description='Detect API changes based on doxygen XML output.') + parser.add_argument('new_input', metavar='new-input', type=Path, + help='The directory containing doxygen XML output or pre-parsed file with ' + + 'the new API version. For details about ' + + 'doxygen XML output, see https://www.doxygen.nl/manual/output.html.') + parser.add_argument('old_input', metavar='old-input', nargs='?', type=Path, + help='The directory containing doxygen XML output or pre-parsed file with ' + + 'the old API version. You should skip this if you want to pre-parse ' + + 'the input with the "--save-input" option.') + parser.add_argument('--format', choices=('text', 'github'), default='text', + help='Output format. Default is "text".') + parser.add_argument('--resolve-paths', type=Path, + help='Resolve relative paths from doxygen input using this parameter as ' + + 'base directory.') + parser.add_argument('--relative-to', type=Path, + help='Show relative paths in messages.') + parser.add_argument('--save-stats', type=Path, + help='Save statistics to JSON file.') + parser.add_argument('--save-input', metavar='FILE', type=Path, + help='Pre-parse and save the "new-input" to a file. The file format may change ' + + 'from version to version. Use always the same version ' + + 'of this tool for one file.') + parser.add_argument('--save-old-input', metavar='FILE', type=Path, + help='Pre-parse and save the "old-input" to a file.') + parser.add_argument('--dump-json', metavar='FILE', type=Path, + help='Dump input data to a JSON file (only for debug purposes).') + parser.add_argument('--help', action='help', + help='Show this help and exit.') + args: ArgsClass = parser.parse_args() + + if (args.old_input is None) and (args.save_input is None): + parser.print_usage() + print('error: at least one of the following arguments is required: old-input, --save-input') + sys.exit(2) + + args.resolve_paths = args.resolve_paths.absolute() if args.resolve_paths else None + args.relative_to = args.relative_to.absolute() if args.relative_to else None + + return args + + +args: ArgsClass = parse_args() diff --git a/scripts/ci/api_check/headers/compare.py b/scripts/ci/api_check/headers/compare.py new file mode 100644 index 000000000000..cd6d93f1ce65 --- /dev/null +++ b/scripts/ci/api_check/headers/compare.py @@ -0,0 +1,406 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +from typing import Any +from nodes import Enum, EnumValue, File, Function, FunctionLike, Group, Node, Param, Struct, StructField, Typedef, Variable, Define +from dox_parser import ParseResult + + +ADDED = 'added' +DELETED = 'deleted' +MODIFIED = 'modified' + + +class GroupChanges: + name: str + title: str + files: 'list[FileChanges]' + changes: 'list[AnyChange]' + def __init__(self, name: str, title: str) -> None: + self.name = name + self.title = title + self.changes = [] + self.files = [] + + +class FileChanges: + name: str + groups: 'list[GroupChanges]' + changes: 'list[AnyChange]' + def __init__(self, name: str) -> None: + self.name = name + self.changes = [] + self.groups = [] + + +class AnyChange: + kind: str + action: str + def __init__(self, action: str, new: Any, old: Any): + self.action = action + self.new = new + self.old = old + + +class NodeChange(AnyChange): + file: bool = False + desc: bool = False + + +class TypedefChange(NodeChange): + kind = 'typedef' + type: bool = False + new: Typedef + old: Typedef + + +class VariableChange(NodeChange): + kind = 'var' + type: bool = False + new: Variable + old: Variable + + +class EnumValueChange(NodeChange): + kind = 'enum_value' + value: bool = False + new: EnumValue + old: EnumValue + + +class EnumChange(NodeChange): + kind = 'enum' + new: Variable + old: Variable + + +class StructFieldChange(AnyChange): + kind = 'field' + index: bool = False + type: bool = False + desc: bool = False + new: StructField + old: StructField + + +class StructChange(NodeChange): + kind = 'struct' + fields: 'list[StructFieldChange]' + new: Struct + old: Struct + def __init__(self, action: str, new: Any, old: Any): + super().__init__(action, new, old) + self.fields = [] + + +class ParamChange(AnyChange): + kind = 'param' + index: bool = False + type: bool = False + desc: bool = False + new: Param + old: Param + + +class FunctionLikeChange(NodeChange): + params: 'list[ParamChange]' + def __init__(self, action: str, new: Any, old: Any): + super().__init__(action, new, old) + self.params = [] + + +class FunctionChange(FunctionLikeChange): + kind: str = 'func' + return_type: bool = False + new: Function + old: Function + + +class DefineChange(FunctionLikeChange): + kind: str = 'def' + value: bool = False + new: Define + old: Define + + +def match_items(new: 'list[EnumValue | Param | StructField]', old: 'list[EnumValue | Param | StructField]') -> 'tuple[list[EnumValue | Param | StructField], list[tuple[EnumValue | Param | StructField, EnumValue | Param | StructField]], list[EnumValue | Param | StructField]]': + def by_name(items: 'list[EnumValue | Param | StructField]'): + result = {} + for item in items: + if item.name not in result: + result[item.name] = item + return result + + new_by_name = by_name(new) + old_by_name = by_name(old) + + deleted = set(old_by_name.values()) + matched = [] + added = [] + + for name, new_value in new_by_name.items(): + if name in old_by_name: + matched.append((new_value, old_by_name[name])) + if old_by_name[name] in deleted: + deleted.remove(old_by_name[name]) + else: + added.append(new_value) + + deleted = list(deleted) + + return deleted, matched, added + + +def get_add_delete_change(node: Node, action: str) -> 'list[AnyChange]': + if isinstance(node, Typedef): + return [TypedefChange(action, node, node)] + elif isinstance(node, Variable): + return [VariableChange(action, node, node)] + elif isinstance(node, EnumValue): + return [EnumValueChange(action, node, node)] + elif isinstance(node, Enum): + if node.name: + return [EnumChange(action, node, node)] + else: + return [] + elif isinstance(node, Struct): + return [StructChange(action, node, node)] + elif isinstance(node, Function): + return [FunctionChange(action, node, node)] + elif isinstance(node, Define): + return [DefineChange(action, node, node)] + else: + return [] + + +def get_modification_changes(new: Node, old: Node) -> 'list[AnyChange]': + result:'list[AnyChange]' = [] + + if (new.kind != old.kind) or (type(new) is type(old)) or isinstance(new, (File, Group)): + return [] + + node_change = None + updated = False + + if isinstance(new, Typedef): + new: Typedef + old: Typedef + node_change = TypedefChange(MODIFIED, new, old) + node_change.type = new.type != old.type + updated = node_change.type + + elif isinstance(new, Variable): + new: Variable + old: Variable + node_change = VariableChange(MODIFIED, new, old) + node_change.type = new.type != old.type + updated = node_change.type + + elif isinstance(new, EnumValue): + new: EnumValue + old: EnumValue + node_change = EnumValueChange(MODIFIED, new, old) + node_change.value = new.value != old.value + updated = node_change.value + + elif isinstance(new, Enum): + node_change = EnumChange(MODIFIED, new, old) + + elif isinstance(new, Struct): + new: Struct + old: Struct + node_change = StructChange(MODIFIED, new, old) + deleted, matched, added = match_items(new.fields, old.fields) + for field in deleted: + node_change.fields.append(StructFieldChange(DELETED, field, field)) + for field in added: + node_change.fields.append(StructFieldChange(ADDED, field, field)) + for new_field, old_field in matched: + field_change = StructFieldChange(MODIFIED, new_field, old_field) + field_change.index = new_field.index != old_field.index + field_change.type = new_field.type != old_field.type + field_change.desc = new_field.desc != old_field.desc + if field_change.index or field_change.type or field_change.desc: + node_change.fields.append(field_change) + updated = len(node_change.fields) != 0 + + elif isinstance(new, FunctionLike): + new: FunctionLike + old: FunctionLike + if isinstance(new, Function): + node_change = FunctionChange(MODIFIED, new, old) + node_change.return_type = new.return_type != old.return_type + updated = node_change.return_type + else: + node_change = DefineChange(MODIFIED, new, old) + node_change.value = new.value != old.value + updated = node_change.value + deleted, matched, added = match_items(new.params, old.params) + for param in deleted: + node_change.params.append(ParamChange(DELETED, param, param)) + for param in added: + node_change.params.append(ParamChange(ADDED, param, param)) + for new_param, old_param in matched: + param_change = ParamChange(MODIFIED, new_param, old_param) + param_change.index = new_param.index != old_param.index + param_change.type = new_param.type != old_param.type + param_change.desc = new_param.desc != old_param.desc + if param_change.index or param_change.type or param_change.desc: + node_change.params.append(param_change) + updated = updated or (len(node_change.params) != 0) + else: + raise ValueError(str(new)) + + node_change.file = new.file != old.file + node_change.desc = new.desc != old.desc + + if updated or node_change.file or node_change.desc: + result.append(node_change) + + return result + + +def convert_to_long_key(group: 'dict[None]') -> dict[None]: + result = {} + for group_key, group_node in group.items(): + result[group_key + '>' + group_node.id + '>' + str(group_node.line)] = group_node + return result + + +def match_groups(matched: 'list[tuple[Node, Node]]', added: 'list[Node]', old_matched: 'set[Node]', new_group: 'dict[str, Node]', old_group: 'dict[str, Node]'): + new_is_long_key = tuple(new_group.keys())[0].count('>') > 0 + old_is_long_key = tuple(old_group.keys())[0].count('>') > 0 + if new_is_long_key and not old_is_long_key: + old_group = convert_to_long_key(old_group) + elif old_is_long_key and not new_is_long_key: + new_group = convert_to_long_key(new_group) + + for key, new_node in new_group.items(): + if key in old_group: + old_node = old_group[key] + matched.append((new_node, old_node)) + old_matched.add(old_node) + else: + added.append(new_node) + + +def compare_nodes(new: ParseResult, old: ParseResult) -> 'list[AnyChange]': + deleted: 'list[Node]' = [] + matched: 'list[tuple[Node, Node]]' = [] + added: 'list[Node]' = [] + old_matched: 'set[Node]' = set() + + for short_id, new_node in new.nodes_by_short_id.items(): + if short_id in old.nodes_by_short_id: + old_node = old.nodes_by_short_id[short_id] + if isinstance(new_node, dict) and isinstance(old_node, dict): + match_groups(matched, added, old_matched, new_node, old_node) + elif isinstance(new_node, dict): + match_groups(matched, added, old_matched, new_node, { old_node.file: old_node }) + elif isinstance(old_node, dict): + match_groups(matched, added, old_matched, { new_node.file: new_node }, old_node) + else: + matched.append((new_node, old_node)) + old_matched.add(old_node) + else: + if isinstance(new_node, dict): + for n in new_node.values(): + added.append(n) + else: + added.append(new_node) + + deleted = list(set(old.nodes) - old_matched) + + changes:'list[AnyChange]' = [] + + for node in deleted: + changes.extend(get_add_delete_change(node, DELETED)) + + for node in added: + changes.extend(get_add_delete_change(node, ADDED)) + + for nodes in matched: + changes.extend(get_modification_changes(nodes[0], nodes[1])) + + return changes + + +class CompareResult: + changes: 'list[AnyChange]' + groups: 'list[GroupChanges]' + files: 'list[FileChanges]' + + +def sort_changes(result: CompareResult): + result.changes.sort(key=lambda x: (x.new.file, x.new.line)) + result.files.sort(key=lambda x: x.name) + result.groups.sort(key=lambda x: x.name) + for file in result.files: + file.changes.sort(key=lambda x: x.new.line) + file.groups.sort(key=lambda x: x.name) + for group in file.groups: + group.changes.sort(key=lambda x: x.new.line) + for group in result.groups: + group.changes.sort(key=lambda x: (x.new.file, x.new.line)) + group.files.sort(key=lambda x: x.name) + for file in group.files: + file.changes.sort(key=lambda x: x.new.line) + + +def compare(new: ParseResult, old: ParseResult) -> CompareResult: + groups: 'dict[str, GroupChanges]' = {} + groups_in_files: 'dict[str, GroupChanges]' = {} + files: 'dict[str, FileChanges]' = {} + files_in_groups: 'dict[str, FileChanges]' = {} + changes = compare_nodes(new, old) + for change in changes: + node: Node = change.new + group: 'Group | None' = None + for parent_id in (node.parent_ids or []): + parent = None + if parent_id in new.nodes_by_id: + parent = new.nodes_by_id[parent_id] + if parent_id in old.nodes_by_id: + parent = old.nodes_by_id[parent_id] + if parent and isinstance(parent, Group): + group = parent + file_name = node.file + group_name = group.name if group else '' + combined_name = f'{file_name}```{group_name}' + + if file_name in files: + file_changes = files[file_name] + else: + file_changes = FileChanges(file_name) + files[file_name] = file_changes + file_changes.changes.append(change) + + if group_name in groups: + group_changes = groups[group_name] + else: + group_changes = GroupChanges(group_name, group.title if group else 'Unassigned') + groups[group_name] = group_changes + group_changes.changes.append(change) + + if combined_name in files_in_groups: + file_in_group_changes = files_in_groups[combined_name] + group_in_file_changes = groups_in_files[combined_name] + else: + file_in_group_changes = FileChanges(file_name) + group_in_file_changes = GroupChanges(group_name, group.title if group else 'Unassigned') + files_in_groups[combined_name] = file_in_group_changes + groups_in_files[combined_name] = group_in_file_changes + group_changes.files.append(file_in_group_changes) + file_changes.groups.append(group_in_file_changes) + file_in_group_changes.changes.append(change) + group_in_file_changes.changes.append(change) + + result = CompareResult() + result.changes = changes + result.files = list(files.values()) + result.groups = list(groups.values()) + + sort_changes(result) + + return result diff --git a/scripts/ci/api_check/headers/dox_parser.py b/scripts/ci/api_check/headers/dox_parser.py new file mode 100644 index 000000000000..506cbb912b9c --- /dev/null +++ b/scripts/ci/api_check/headers/dox_parser.py @@ -0,0 +1,379 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import pickle +import re +import doxmlparser.index as dox_index +import doxmlparser.compound as dox_compound +from random import shuffle +from pathlib import Path +from json import JSONEncoder +from nodes import Define, Enum, EnumValue, File, Function, FunctionLike, Group, Node, SimpleNode, Struct, StructField, Typedef, Variable +from tools import concurrent_pool_iter, warning + +HEADER_FILE_EXTENSION = '.h' + +def parse_description(*args): + return '' # Convert descriptions to string + # + # + # + +def parse_location_description(node: Node, compound: 'dox_compound.compounddefType | dox_compound.memberdefType'): + loc = compound.location + if not loc: + node.file = '' + node.line = None + elif hasattr(loc, 'bodyfile') and loc.bodyfile and loc.bodyfile.endswith(HEADER_FILE_EXTENSION): + node.file = loc.bodyfile + node.line = loc.bodystart if hasattr(loc, 'bodystart') else None + elif hasattr(loc, 'file') and loc.file and loc.file.endswith(HEADER_FILE_EXTENSION): + node.file = loc.file + node.line = loc.line if hasattr(loc, 'line') else None + elif hasattr(loc, 'declfile') and loc.declfile and loc.declfile.endswith(HEADER_FILE_EXTENSION): + node.file = loc.declfile + node.line = loc.declline if hasattr(loc, 'declline') else None + elif hasattr(loc, 'bodyfile') and loc.bodyfile: + node.file = loc.bodyfile + node.line = loc.bodystart if hasattr(loc, 'bodystart') else None + elif hasattr(loc, 'file') and loc.file: + node.file = loc.file + node.line = loc.line if hasattr(loc, 'line') else None + elif hasattr(loc, 'declfile') and loc.declfile: + node.file = loc.declfile + node.line = loc.declline if hasattr(loc, 'declline') else None + else: + node.file = '' + node.line = None + node.desc = parse_description(compound) + + +def parse_linked_text(type: 'dox_compound.linkedTextType | None') -> str: + if not type: + return 'void' + result = '' + for part in type.content_: + part: dox_compound.MixedContainer + if part.category == dox_compound.MixedContainer.CategoryText: + result += part.value + elif (part.category == dox_compound.MixedContainer.CategoryComplex) and (part.name == 'ref'): + value: dox_compound.refTextType = part.value + result += value.valueOf_ + return result.strip() + + +def parse_function_like(node: FunctionLike, memberdef: dox_compound.memberdefType): + parse_location_description(node, memberdef) + for dox_param in memberdef.param: + dox_param: dox_compound.paramType + param = node.add_param() + param.desc = parse_description(dox_param) + param.name = dox_param.declname or dox_param.defname + param.type = parse_linked_text(dox_param.get_type()) + +def parse_function(memberdef: dox_compound.memberdefType) -> Function: + func = Function(memberdef.id, memberdef.name) + parse_function_like(func, memberdef) + func.return_type = parse_linked_text(memberdef.get_type()) + return func + +def parse_define(memberdef: dox_compound.memberdefType) -> Define: + define = Define(memberdef.id, memberdef.name) + parse_function_like(define, memberdef) + define.value = parse_linked_text(memberdef.initializer) + return define + +def parse_enum(memberdef: dox_compound.memberdefType, name_override: str=None) -> 'list[Enum | EnumValue]': + result: 'list[Enum | EnumValue]' = [] + enum = Enum(memberdef.id, name_override or memberdef.name) + parse_location_description(enum, memberdef) + result.append(enum) + last_value = '' + increment = 0 + for dox_value in memberdef.enumvalue: + dox_value: dox_compound.enumvalueType + enum_value = EnumValue(dox_value.id, dox_value.name) + enum_value.file = enum.file + enum_value.line = enum.line + enum_value.desc = parse_description(dox_value) + enum_value.value = parse_linked_text(dox_value.initializer) + while enum_value.value.startswith('='): + enum_value.value = enum_value.value[1:].strip() + if enum_value.value and (enum_value.value != 'void'): + last_value = enum_value.value + increment = 1 + else: + enum_value.value = f'{last_value} + {increment}' if last_value else str(increment) + increment += 1 + result.append(enum_value) + return result + +def parse_simple_node(node: SimpleNode, memberdef: dox_compound.memberdefType) -> SimpleNode: + parse_location_description(node, memberdef) + node.type = parse_linked_text(memberdef.get_type()) + (memberdef.argsstring or '') + return node + +def parse_memberdef(memberdef: dox_compound.memberdefType) -> 'list[Node]': + result: 'list[Node]' = [] + if memberdef.kind == dox_compound.DoxMemberKind.FUNCTION: + result.append(parse_function(memberdef)) + elif memberdef.kind == dox_compound.DoxMemberKind.DEFINE: + result.append(parse_define(memberdef)) + elif memberdef.kind == dox_compound.DoxMemberKind.ENUM: + result.extend(parse_enum(memberdef)) + elif memberdef.kind == dox_compound.DoxMemberKind.TYPEDEF: + result.append(parse_simple_node(Typedef(memberdef.id, memberdef.name), memberdef)) + elif memberdef.kind == dox_compound.DoxMemberKind.VARIABLE: + result.append(parse_simple_node(Variable(memberdef.id, memberdef.name), memberdef)) + else: + warning(f'Unknown member kind "{memberdef.kind}".') + return result + + +def parse_file_or_group(node: 'File | Group', compound: dox_compound.compounddefType) -> 'list[Node]': + result: 'list[Node]' = [node] + parse_location_description(node, compound) + for inner_ref in (compound.innerclass or []) + (compound.innergroup or []): + inner_ref: dox_compound.refType + node.add_child(inner_ref.refid) + for sectiondef in compound.sectiondef or []: + sectiondef: dox_compound.sectiondefType + for member in sectiondef.member: + member: dox_compound.MemberType + node.add_child(member.refid) + for memberdef in sectiondef.memberdef or []: + children = parse_memberdef(memberdef) + for child in children: + child: Node + node.add_child(child.id) + result.extend(children) + return result + + +def parse_file(compound: dox_compound.compounddefType) -> 'list[Node]': + file = File(compound.id, compound.compoundname) + return parse_file_or_group(file, compound) + + +def parse_group(compound: dox_compound.compounddefType) -> 'list[Node]': + group = Group(compound.id, compound.compoundname) + group.title = compound.title + return parse_file_or_group(group, compound) + + +def parse_field_with_macro(memberdef: dox_compound.memberdefType) -> StructField: + field = StructField(memberdef.id, memberdef.name) + parse_location_description(field, memberdef) + argsstring: str = (memberdef.argsstring or '') + regex = r'^\s*\(\s*([a-z_0-9]+)(?:\(.*?\)|.)*?\)(?:\s*([A-Z_0-9]+)\s*$)?' + matches = re.search(regex, argsstring, re.IGNORECASE | re.DOTALL) + field.type = parse_linked_text(memberdef.get_type()) + if matches: + if len(field.type) > 0: + field.type += ' ' + field.type += field.name + if matches.group(2): + field.type += (argsstring[:matches.start(2)].strip() + argsstring[matches.end(2):].strip()).strip() + field.name = matches.group(2) + else: + field.type += (argsstring[:matches.start(1)].strip() + argsstring[matches.end(1):].strip()).strip() + field.name = matches.group(1) + else: + field.type = parse_linked_text(memberdef.get_type()) + argsstring + return field + +def parse_struct(compound: dox_compound.compounddefType, is_union: bool) -> 'list[Node]': + result: 'list[Node]' = [] + struct = Struct(compound.id, compound.compoundname, is_union) + parse_location_description(struct, compound) + for sectiondef in compound.sectiondef or []: + sectiondef: dox_compound.sectiondefType + for memberdef in sectiondef.memberdef or []: + memberdef: dox_compound.memberdefType + if memberdef.kind == dox_compound.DoxMemberKind.VARIABLE: + field: StructField = parse_simple_node(StructField(memberdef.id, memberdef.name), memberdef) + field.index = len(struct.fields) + struct.fields.append(field) + elif memberdef.kind == dox_compound.DoxMemberKind.FUNCTION: + field = parse_field_with_macro(memberdef) + field.index = len(struct.fields) + struct.fields.append(field) + elif memberdef.kind == dox_compound.DoxMemberKind.ENUM: + full_name = memberdef.qualifiedname + if not memberdef.name: + full_name += '::' + memberdef.id + enum = parse_enum(memberdef, full_name) + result.extend(enum) + else: + warning(f'Unknown structure member kind "{memberdef.kind}", name {memberdef.name} in {struct.name}, {struct.file}:{struct.line}') + result.append(struct) + return result + + +def process_compound(file_name: str) -> 'list[Node]': + result: list[Node] = [] + for compound in dox_compound.parse(file_name, True, True).get_compounddef(): + compound: dox_compound.compounddefType + if compound.kind == dox_index.CompoundKind.FILE: + result.extend(parse_file(compound)) + elif compound.kind == dox_index.CompoundKind.GROUP: + result.extend(parse_group(compound)) + elif compound.kind in (dox_index.CompoundKind.STRUCT, + dox_index.CompoundKind.CLASS, + dox_index.CompoundKind.UNION): + result.extend(parse_struct(compound, (compound.kind == dox_index.CompoundKind.UNION))) + else: + warning(f'Unexpected doxygen compound kind: "{compound.kind}"') + return result + + +class ParseResult: + nodes: 'list[Node]' + nodes_by_id: 'dict[str, Node]' + nodes_by_short_id: 'dict[str, Node | dict[str, Node]]' + def __init__(self): + self.nodes = [] + self.nodes_by_id = {} + self.nodes_by_short_id = {} + + +def first_node(a: Node, b: Node): + properties = set(filter(lambda x: not x.startswith('_'), dir(a))) + properties = list(properties.union(set(filter(lambda x: not x.startswith('_'), dir(b))))) + properties.sort() + for name in properties: + a_value = getattr(a, name) if hasattr(a, name) else None + b_value = getattr(b, name) if hasattr(b, name) else None + if callable(a_value) or callable(b_value) or isinstance(a_value, (set, list)) or isinstance(b_value, (set, list)): + continue + if (a_value is None) and (b_value is None): continue + if (a_value is None) and (b_value is not None): return False + if (a_value is not None) and (b_value is None): return True + a_value = str(a_value) + b_value = str(b_value) + if a_value == b_value: + continue + return a_value > b_value + return True + + +def prepare_result(nodes: 'list[Node]') -> ParseResult: + result = ParseResult() + # Add node to nodes_by_id dictionary + for node in nodes: + if node.id in result.nodes_by_id: + other = result.nodes_by_id[node.id] + if node.get_short_id() != other.get_short_id(): + warning(f'Doxygen identifier repeated for multiple nodes: {node.id}') + # If overriding always select the same node to ensure deterministic behavior + result.nodes_by_id[node.id] = node if first_node(node, other) else other + else: + result.nodes_by_id[node.id] = node + # Use only accessible nodes + result.nodes = list(result.nodes_by_id.values()) + # Add node to nodes_by_short_id dictionary + for node in result.nodes: + short_id = node.get_short_id() + if short_id == "def:PTHREAD_PROCESS_PRIVATE": + short_id = short_id + if short_id in result.nodes_by_short_id: + # Create or update group with the same short id + other = result.nodes_by_short_id[short_id] + if isinstance(other, dict): + group = other + else: + group = {} + group[other.file] = other + # Generate a key for this node + if tuple(group.keys())[0].count('>') > 0: + # If group contains keys with doxygen id, use file path and id as key + key = node.file + '>' + node.id + '>' + str(node.line) + else: + # If group does not contain keys with doxygen id, use file path only + key = node.file + # In case of duplicate, convert keys to keys with doxygen id + if key in group: + key += '>' + node.id + '>' + str(node.line) + new_group = {} + for group_key, group_node in group.items(): + new_group[group_key + '>' + group_node.id + '>' + str(group_node.line)] = group_node + group = new_group + # Set node and group + group[key] = node + result.nodes_by_short_id[short_id] = group + else: + result.nodes_by_short_id[short_id] = node + # Fix parent-child relations: delete nonexisting links and create both ways link + for node in result.nodes: + if node.parent_ids: + new_set = set() + for parent_id in node.parent_ids: + if parent_id in result.nodes_by_id: + new_set.add(parent_id) + result.nodes_by_id[parent_id].add_child(node.id) + node.parent_ids = new_set + if node.children_ids: + new_set = set() + for child_id in node.children_ids: + if child_id in result.nodes_by_id: + new_set.add(child_id) + result.nodes_by_id[child_id].add_parent(node.id) + node.children_ids = new_set + return result + + +def save_doxygen(parse_result: ParseResult, file: Path): + with open(file, 'wb') as fd: + pickle.dump(parse_result.nodes, fd) + + +def dump_doxygen_json(parse_result: ParseResult, file: Path): + def default_nodes(o): + if isinstance(o, set): + return list(o) + else: + d = {'__id__': id(o)} + for name in tuple(dir(o)): + if not name.startswith('_'): + value = getattr(o, name) + if not callable(value): + d[name] = value + return d + def default_refs(o): + return f'__refid__{id(o)}' + nodes_json = JSONEncoder(sort_keys=False, indent=4, default=default_nodes).encode(parse_result.nodes) + by_id_json = JSONEncoder(sort_keys=False, indent=4, default=default_refs).encode(parse_result.nodes_by_id) + by_short_id_json = JSONEncoder(sort_keys=False, indent=4, default=default_refs).encode(parse_result.nodes_by_short_id) + with open(file, 'w') as fd: + fd.write('{\n"nodes": ' + nodes_json + ',\n"by_id": ' + by_id_json + ',\n"by_short_id": ' + by_short_id_json + '\n}\n') + + +def parse_doxygen(dir_or_file: Path) -> ParseResult: + nodes: 'list[Node]' = [] + if dir_or_file.is_dir(): + index = dox_index.parse(dir_or_file / 'index.xml', True, True) + files: 'list[str]' = [] + for compound in index.get_compound(): + if compound.kind in (dox_index.CompoundKind.FILE, + dox_index.CompoundKind.GROUP, + dox_index.CompoundKind.STRUCT, + dox_index.CompoundKind.CLASS, + dox_index.CompoundKind.UNION): + files.append(dir_or_file / (compound.refid + '.xml')) + elif compound.kind in (dox_index.CompoundKind.PAGE, + dox_index.CompoundKind.DIR, + dox_index.CompoundKind.CATEGORY, + dox_index.CompoundKind.CONCEPT, + dox_index.CompoundKind.EXAMPLE): + pass + else: + warning(f'Unknown doxygen compound kind: "{compound.kind}"') + shuffle(files) + #ids = ids[0:100] + for node, _, _ in concurrent_pool_iter(process_compound, files, True, 20): + nodes.extend(node) + else: + with open(dir_or_file, 'rb') as fd: + nodes = pickle.load(fd) + return prepare_result(nodes) diff --git a/scripts/ci/api_check/headers/main.py b/scripts/ci/api_check/headers/main.py new file mode 100644 index 000000000000..7a959b20a75a --- /dev/null +++ b/scripts/ci/api_check/headers/main.py @@ -0,0 +1,56 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import sys +import json +import out_text +from args import args +from compare import compare +from dox_parser import dump_doxygen_json, parse_doxygen, save_doxygen + + +def main(): + new_input = parse_doxygen(args.new_input) + + if args.old_input: + old_input = parse_doxygen(args.old_input) + else: + old_input = None + + if args.save_input: + save_doxygen(new_input, args.save_input) + + if args.save_old_input and old_input: + save_doxygen(old_input, args.save_old_input) + + if args.dump_json: + if old_input: + dir = args.dump_json.parent + name = args.dump_json.name + suffix = args.dump_json.suffix + dump_doxygen_json(new_input, dir / (name[0:-len(args.dump_json.suffix)] + '.new' + suffix)) + dump_doxygen_json(old_input, dir / (name[0:-len(args.dump_json.suffix)] + '.old' + suffix)) + else: + dump_doxygen_json(new_input, args.dump_json) + + level = 0 + + if old_input: + changes = compare(new_input, old_input) + stats = out_text.generate(changes) + if args.save_stats: + args.save_stats.write_text(json.dumps({ + 'notice': stats[1], + 'warning': stats[2], + 'critical': stats[3], + }, indent=2)) + for i, count in enumerate(stats): + if count > 0: + level = i + + sys.exit(level) + + +if __name__ == '__main__': + main() diff --git a/scripts/ci/api_check/headers/nodes.py b/scripts/ci/api_check/headers/nodes.py new file mode 100644 index 000000000000..0a2cd4d1f803 --- /dev/null +++ b/scripts/ci/api_check/headers/nodes.py @@ -0,0 +1,102 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + + +class Node: + id: str + kind: str = '' + name: str = '' + file: str = '' + line: str = '' + parent_ids: 'set[str] | None' = None + children_ids: 'set[str] | None' = None + desc: str = '' + def __init__(self, id: str, name: str): + self.id = id + self.name = name + def get_short_id(self): + return self.kind + ':' + str(self.name) + def add_parent(self, parent: str): + if not self.parent_ids: + self.parent_ids = set() + self.parent_ids.add(parent) + def add_child(self, child: str): + if not self.children_ids: + self.children_ids = set() + self.children_ids.add(child) + + +class File(Node): + kind: str = 'file' + + +class Group(Node): + kind: str = 'group' + title: str = '' + + +class SimpleNode(Node): + type: str = '' + + +class StructField(SimpleNode): + kind: str = 'field' + index: int = 0 + + +class Struct(Node): + kind: str + is_union: bool + fields: 'list[StructField]' + def __init__(self, id: str, name: str, is_union: bool): + super().__init__(id, name) + self.is_union = is_union + self.kind = 'union' if is_union else 'struct' + self.fields = [] + + +class Param: + index: int + name: str + type: str + desc: str + + +class FunctionLike(Node): + params: 'list[Param]' + def __init__(self, id: str, name: str): + super().__init__(id, name) + self.params = [] + def add_param(self): + param = Param() + param.index = len(self.params) + self.params.append(param) + return param + + +class Function(FunctionLike): + kind: str = 'func' + return_type: str = 'void' + + +class Define(FunctionLike): + kind: str = 'def' + value: str = '' + + +class EnumValue(Node): + kind: str = 'enum_value' + value: str + + +class Enum(Node): + kind: str = 'enum' + + +class Typedef(SimpleNode): + kind: str = 'typedef' + + +class Variable(SimpleNode): + kind: str = 'var' diff --git a/scripts/ci/api_check/headers/out_text.py b/scripts/ci/api_check/headers/out_text.py new file mode 100644 index 000000000000..10e4729ef263 --- /dev/null +++ b/scripts/ci/api_check/headers/out_text.py @@ -0,0 +1,157 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +from pathlib import Path +from compare import AnyChange, CompareResult +from jinja2 import Template +from args import args + + +def compile_messages(messages): + result = {} + for key in list(messages.keys()): + message = messages[key] + if message.startswith('ignore'): + level = 0 + elif message.startswith('notice'): + level = 1 + elif message.startswith('warning'): + level = 2 + elif message.startswith('critical'): + level = 3 + else: + raise ValueError(f'Unknown level of message: {message}') + result[key] = (Template(messages[key]), level) + return result + + +messages: 'dict[str, tuple[Template, int]]' = compile_messages({ + 'typedef-added': 'ignore', + 'typedef-deleted': 'critical: Type "{{old.name}}" definition deleted.', + 'typedef-modified-file': 'warning: Type "{{new.name}}" definition moved to a different file.', + 'typedef-modified-desc': 'notice: Type "{{new.name}}" definition description changed.', + 'typedef-modified-type': 'warning: Type "{{new.name}}" definition changed.', + 'var-added': 'ignore', + 'var-deleted': 'critical: Variable "{{old.name}}" deleted.', + 'var-modified-file': 'warning: Variable "{{new.name}}" moved to a different file.', + 'var-modified-desc': 'notice: Variable "{{new.name}}" description changed.', + 'var-modified-type': 'warning: Variable "{{new.name}}" type changed.', + 'enum_value-added': 'ignore', + 'enum_value-deleted': 'critical: Enum value "{{old.name}}" deleted.', + 'enum_value-modified-value': 'warning: Enum value "{{new.name}}" changed.', + 'enum_value-modified-desc': 'notice: Enum value "{{new.name}}" description changed.', + 'enum_value-modified-file': 'warning: Enum value "{{new.name}}" moved to a different file.', + 'enum-added': 'ignore', + 'enum-deleted': 'critical: Enum "{{old.name}}" deleted.', + 'enum-modified-file': 'warning: Enum "{{new.name}}" moved to a different file.', + 'enum-modified-desc': 'notice: Enum "{{new.name}}" description changed.', + 'struct-added': 'ignore', + 'struct-deleted': 'critical: Structure "{{old.name}}" deleted.', + 'struct-modified-file': 'warning: Structure "{{new.name}}" moved to a different file.', + 'struct-modified-desc': 'notice: Structure "{{new.name}}" description changed.', + 'func-added': 'ignore', + 'func-deleted': 'critical: Function "{{old.name}}" deleted.', + 'func-modified-return_type': 'warning: Function "{{new.name}}" return type changed.', + 'func-modified-file': 'warning: Function "{{new.name}}" moved to a different file.', + 'func-modified-desc': 'notice: Function "{{new.name}}" description changed.', + 'def-added': 'ignore', + 'def-deleted': 'critical: Definition "{{old.name}}" deleted.', + 'def-modified-value': 'notice: Definition "{{new.name}}" value changed.', + 'def-modified-file': 'warning: Definition "{{new.name}}" moved to a different file.', + 'def-modified-desc': 'notice: Definition "{{new.name}}" description changed.', + 'field-added': 'ignore', + 'field-deleted': 'critical: Structure "{{struct.new.name}}" field "{{new.name}}" deleted.', + 'field-modified-index': 'ignore', + 'field-modified-type': 'warning: Structure "{{struct.new.name}}" field "{{new.name}}" type changed.', + 'field-modified-desc': 'notice: Structure "{{struct.new.name}}" field "{{new.name}}" description changed.', + 'param-added': 'critical: Parameter "{{new.name}}" added in "{{parent.new.name}}".', + 'param-deleted': 'critical: Parameter "{{old.name}}" deleted from "{{parent.new.name}}".', + 'param-modified-index': 'critical: Parameter "{{new.name}}" reordered in "{{parent.new.name}}".', + 'param-modified-type': 'warning: Parameter "{{new.name}}" type changed in "{{parent.new.name}}".', + 'param-modified-desc': 'notice: Parameter "{{new.name}}" description changed in "{{parent.new.name}}".', +}) + + +github_commands = [ + '::ignore', + '::notice', + '::warning', + '::error' +] + +github_titles = [ + 'Ignore', + 'Notice', + 'Warning', + 'Critical', +] + +def encode(text: str, is_message: bool): + if is_message: + return text.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A') + else: + return text.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A').replace(',', '%2C').replace('::', '%3A%3A') + +def show_message(file: Path, line: 'str | int | None', message: str, level: int): + if args.resolve_paths is not None: + file = args.resolve_paths.joinpath(file).absolute() + if args.relative_to is not None: + file = file.relative_to(args.relative_to) + if args.format == 'github': + command = github_commands[level] + title = f'Compatibility {github_titles[level]}' + if line is not None: + print(f'{command} file={encode(str(file), False)},line={line},title={title}::{encode(message, True)}') + else: + print(f'{command} file={encode(str(file), False)},title={title}::{encode(message, True)}') + elif line is not None: + print(f'{file}:{line}: {message}') + else: + print(f'{file}: {message}') + + + +def generate_changes(stats: 'list[int]', changes: 'list[AnyChange]', + location: 'tuple[Path, int | None]', **kwargs): + for change in changes: + prefix = f'{change.kind}-{change.action}' + if change.new and hasattr(change.new, 'file') and change.new.file: + if change.new and hasattr(change.new, 'line') and change.new.line: + loc = (Path(change.new.file), change.new.line) + else: + loc = (Path(change.new.file), None) + else: + loc = location + for key, (template, level) in messages.items(): + if key.startswith(prefix) and (level > 0): + data = {} + for name in dir(change): + value = getattr(change, name) + if (not callable(value)) and (not name.startswith('_')): + data[name] = value + for name, value in kwargs.items(): + data[name] = value + message = template.render(**data) + if key == prefix: + show_message(loc[0], loc[1], message, level) + stats[level] += 1 + else: + field = key[len(prefix) + 1:] + value = getattr(change, field) + if value: + show_message(loc[0], loc[1], message, level) + stats[level] += 1 + if prefix == 'struct-modified': + generate_changes(stats, change.fields, loc, struct=change) + elif prefix in ('func-modified', 'def-modified'): + generate_changes(stats, change.params, loc, parent=change) + + +def generate(compare_result: CompareResult) -> 'list[int]': + stats = [0, 0, 0, 0] + for group in compare_result.groups: + if group.name: + print(f'=== Group {group.name}: {group.title} ===') + generate_changes(stats, group.changes, (Path(), None)) + return stats diff --git a/scripts/ci/api_check/headers/tools.py b/scripts/ci/api_check/headers/tools.py new file mode 100644 index 000000000000..8fb7f91a2486 --- /dev/null +++ b/scripts/ci/api_check/headers/tools.py @@ -0,0 +1,57 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import os +import sys +import concurrent.futures +from typing import Callable, Iterable + + +def warning(*args, **kwargs): + args = ('\x1B[33mwarning:\x1B[0m', *args) + print(*args, **kwargs, file=sys.stderr) + + +def error(*args, **kwargs): + args = ('\x1B[31merror:\x1B[0m', *args) + print(*args, **kwargs, file=sys.stderr) + + +process_executor = None +thread_executor = None + + +def concurrent_pool_iter(func: Callable, iterable: Iterable, use_process: bool=False, + threshold: int=2): + ''' Call a function for each item of iterable in a separate thread or process. + + Number of parallel executors will be determined by the CPU count or command line arguments. + + @param func Function to call + @param iterable Input iterator + @param use_process Runs function on separate process when True, thread if False + @param threshold If number of elements in iterable is less than threshold, no parallel + threads or processes will be started. + @returns Iterator over tuples cotaining: return value of func, input element, index + of that element (starting from 0) + ''' + global process_executor, thread_executor, executor_workers + collected = iterable if isinstance(iterable, tuple) else tuple(iterable) + if len(collected) >= threshold: + executor_workers = os.cpu_count() #args.processes if args.processes > 0 else os.cpu_count() + if executor_workers is None or executor_workers < 1: + executor_workers = 1 + if use_process: + if process_executor is None: + process_executor = concurrent.futures.ProcessPoolExecutor(executor_workers) + executor = process_executor + else: + if thread_executor is None: + thread_executor = concurrent.futures.ThreadPoolExecutor(executor_workers) + executor = thread_executor + chunksize = (len(collected) + executor_workers - 1) // executor_workers + it = executor.map(func, collected, chunksize=chunksize) + else: + it = map(func, collected) + return zip(it, collected, range(len(collected))) diff --git a/scripts/ci/api_check/pr/__main__.py b/scripts/ci/api_check/pr/__main__.py new file mode 100644 index 000000000000..ce7e40b98bba --- /dev/null +++ b/scripts/ci/api_check/pr/__main__.py @@ -0,0 +1,2 @@ +from main import main +main() diff --git a/scripts/ci/api_check/pr/main.py b/scripts/ci/api_check/pr/main.py new file mode 100644 index 000000000000..5e4fb46cbc66 --- /dev/null +++ b/scripts/ci/api_check/pr/main.py @@ -0,0 +1,122 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import os +import re +import sys +import json +from pathlib import Path +from github import Github +from types import SimpleNamespace +from jinja2 import Template +from github.Repository import Repository +from github.PullRequest import PullRequest +from github.IssueComment import IssueComment +from github.WorkflowRun import WorkflowRun + + +API_CHECK_COMMENT_INDICATOR = '' + + +class TemplateData(SimpleNamespace): + notice: int + warning: int + critical: int + github_actor: str + repo: Repository + pr: PullRequest + run: WorkflowRun + def __init__(self, file: os.PathLike): + with open(file, 'r') as fd: + dict = json.load(fd) + super().__init__(**dict) + +def fatal(*args, **kwargs): + print(*args, **kwargs, file=sys.stderr) + sys.exit(1) + +def get_stats() -> TemplateData: + stats: 'TemplateData | None' = None + for arg in sys.argv[1:]: + if not Path(arg).exists(): + fatal(f'The "{arg}" does not exist. Probably checking script failed.') + file_stats = TemplateData(arg) + if stats: + stats.notice += file_stats.notice + stats.warning += file_stats.warning + stats.critical += file_stats.critical + else: + stats = file_stats + if stats is None: + fatal('No input files.') + return stats + +def get_message(data: TemplateData) -> str: + template_path: Path = Path(__file__).parent / 'pr-comment.md.jinja' + template = Template(template_path.read_text()) + message = API_CHECK_COMMENT_INDICATOR + '\n' + template.render(**data.__dict__).strip() + return message + +def get_meta(message, keyword) -> list[str]: + result = [] + for match in re.finditer(r'', message, re.DOTALL): + result.append(match.group(1)) + return result + +def main(): + data = get_stats() + print('Stats', data) + + github = Github(os.environ['GITHUB_TOKEN']) + print(f'Github API connected. Remaining requests {github.rate_limiting[0]} of {github.rate_limiting[1]}.') + + data.github_actor = os.environ['GITHUB_ACTOR'] + print(f'Github user: {data.github_actor}') + + data.repo = github.get_repo(os.environ['GITHUB_REPO'], lazy=True) + data.pr = data.repo.get_pull(int(os.environ['PR_NUMBER'])) + print(f'Pull request: {data.pr.title} #{data.pr.number} {data.pr.html_url}') + + data.run = data.repo.get_workflow_run(int(os.environ['GITHUB_RUN_ID'])) + print(f'Workflow run: {data.run.id}') + + message = get_message(data) + print(f'Comment message:\n{message}\n------------------------------------') + + comment: IssueComment | None + for comment in data.pr.get_issue_comments(): + if comment.body.strip().startswith(API_CHECK_COMMENT_INDICATOR): + if message == comment.body: + print(f'Comment unchanged: {comment.html_url}') + else: + print(f'Editing comment: {comment.html_url}') + comment.edit(message) + break + else: + print(f'Adding new comment.') + comment = data.pr.create_issue_comment(message) + print(f'Added comment: {comment.html_url}') + + labels = get_meta(message, 'add-label') + if len(labels) > 0: + print(f'Adding labels: {", ".join(labels)}') + data.pr.add_to_labels(*labels) + + for label in get_meta(message, 'remove-label'): + print(f'Removing label: {label}') + for existing_label in data.pr.labels: + if existing_label.name == label: + data.pr.remove_from_labels(label) + break + else: + print(f'Label already removed: {label}') + + exit_code = 0 + for value in get_meta(message, 'exit-code'): + exit_code = int(value) + sys.exit(exit_code) + + +if __name__ == '__main__': + main() diff --git a/scripts/ci/api_check/pr/pr-comment.md.jinja b/scripts/ci/api_check/pr/pr-comment.md.jinja new file mode 100644 index 000000000000..caabc0d3d132 --- /dev/null +++ b/scripts/ci/api_check/pr/pr-comment.md.jinja @@ -0,0 +1,64 @@ +{# + # Copyright (c) 2024 Nordic Semiconductor ASA + # + # SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + + Jinja template for PR comment with API changes results. + + The following variables are available: + notice: int - number of notices detected + warning: int - number of warnings detected + critical: int - number of critical issues detected + github_actor: str - github user name responsible for this workflow + repo: Repository - https://pygithub.readthedocs.io/en/stable/github_objects/Repository.html + pr: PullRequest - https://pygithub.readthedocs.io/en/stable/github_objects/PullRequest.html + run: WorkflowRun - https://pygithub.readthedocs.io/en/stable/github_objects/WorkflowRun.html + + You can add the following metadata: + + Add label XYZ to the PR. + + Remove label XYZ from the PR. + + Set exit code of the script. Setting exit code different than 0 will cause + an error in workflow and it will block the PR. +#} + +{% if critical > 0 %} + + + + +> [!CAUTION] +> **This PR contains API-breaking changes. Remember to add necessary entry in the migration guide.** +> + +  | Count | Level | Comment +----------------|------------------|----------|--------- +:red_circle: | **{{critical}}** | critical | The modification is a breaking change. +:yellow_circle: | **{{warning}}** | warning | The modification may be a breaking change, but there is not enough context to determine this. +:white_circle: | **{{notice}}** | notice | The modification to API, but it probably keep the backward compatibility. + +See issue details in the [**job summary**]({{run.html_url}}?pr={{pr.number}}). + +{% elif notice + warning > 0 %} + + + +:+1: No critical API-breaking changes detected. You have [{{ + (warning|string) + " warnings" if warning > 1 else "1 warning" if warning > 0 else "" +}}{{ + " and " if notice > 0 and warning > 0 else "" +}}{{ + (notice|string) + " notices" if notice > 1 else "1 notice" if notice > 0 else "" +}}]({{run.html_url}}?pr={{pr.number}}) that you can review. + +{% else %} + + + +:+1: No API-breaking changes detected. + +{% endif %} + + diff --git a/scripts/ci/api_check/pr/test-locally.sh b/scripts/ci/api_check/pr/test-locally.sh new file mode 100755 index 000000000000..c4e0f1bfb8ec --- /dev/null +++ b/scripts/ci/api_check/pr/test-locally.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +set -e + +#GITHUB_TOKEN= from keyboard if commented +GITHUB_ACTOR=...user... +GITHUB_REPO=...user_or_organization/repo_name... +PR_NUMBER=...number... +GITHUB_RUN_ID=...number... + +rm -Rf /tmp/test-pr-api-check +mkdir -p /tmp/test-pr-api-check +echo '{ "notice": 1, "warning": 3, "critical": 0 }' > /tmp/test-pr-api-check/headers.stats.json +echo '{ "notice": 1, "warning": 0, "critical": 1 }' > /tmp/test-pr-api-check/dts.stats.json + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +if [ -z "$GITHUB_TOKEN" ]; then + read -p "GITHUB_TOKEN: " -s GITHUB_TOKEN +fi + +export GITHUB_TOKEN +export GITHUB_ACTOR +export GITHUB_REPO +export PR_NUMBER +export GITHUB_RUN_ID + +python3 $SCRIPT_DIR /tmp/test-pr-api-check/headers.stats.json /tmp/test-pr-api-check/dts.stats.json diff --git a/scripts/ci/api_check/requirements.txt b/scripts/ci/api_check/requirements.txt new file mode 100644 index 000000000000..60e0746bb53a --- /dev/null +++ b/scripts/ci/api_check/requirements.txt @@ -0,0 +1,4 @@ +doxmlparser ~= 1.10 +Jinja2 ~= 3.0 +psutil ~= 5.0 +PyGithub ~= 2.0 diff --git a/scripts/ci/api_check/utils/interrupt_on.py b/scripts/ci/api_check/utils/interrupt_on.py new file mode 100644 index 000000000000..d76bc6e8568d --- /dev/null +++ b/scripts/ci/api_check/utils/interrupt_on.py @@ -0,0 +1,44 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause + +import sys +import psutil +from signal import SIGINT +from subprocess import Popen, PIPE + +# For example to send Ctrl-C to ninja build when its outputs "syncing doxygen output": +# python interrupt_on.py "syncing doxygen output" ninja nrf + +def run_interrupted(match_text: str, command: 'list[str]'): + print('Run', command) + print(' and interrupt on:', match_text) + match_text = match_text.lower() + p = Popen(command, stdout=PIPE, stderr=None, encoding="utf-8") + interrupted = False + while True: + line = p.stdout.readline() + if line: + print(line, end='') + if line.lower().find(match_text) >= 0: + print('Sending SIGINT signal.') + parent = psutil.Process(p.pid) + for child in parent.children(recursive=True): + child.send_signal(SIGINT) + parent.send_signal(SIGINT) + interrupted = True + if p.poll() is not None: + break + if interrupted: + print('Correctly interrupted.') + elif p.returncode: + print(f'Failed with return code {p.returncode}.') + sys.exit(p.returncode) + else: + print('Build not interrupted. You may experience long building time.') + +if len(sys.argv) <= 2: + print(f'Usage: {sys.argv[0]} "Matching string" command parameters...') + sys.exit(1) + +run_interrupted(sys.argv[1], sys.argv[2:])