2018-03-01 14:53:49 +00:00
|
|
|
#!/usr/bin/env python3
|
2022-06-20 16:51:18 +00:00
|
|
|
"""This script compares the interfaces of two versions of Mbed TLS, looking
|
2021-04-23 14:37:12 +00:00
|
|
|
for backward incompatibilities between two different Git revisions within
|
|
|
|
an Mbed TLS repository. It must be run from the root of a Git working tree.
|
|
|
|
|
2022-06-20 16:51:44 +00:00
|
|
|
### How the script works ###
|
|
|
|
|
2021-04-23 14:37:12 +00:00
|
|
|
For the source (API) and runtime (ABI) interface compatibility, this script
|
|
|
|
is a small wrapper around the abi-compliance-checker and abi-dumper tools,
|
|
|
|
applying them to compare the header and library files.
|
|
|
|
|
|
|
|
For the storage format, this script compares the automatically generated
|
2022-02-22 18:02:44 +00:00
|
|
|
storage tests and the manual read tests, and complains if there is a
|
2022-03-04 18:59:55 +00:00
|
|
|
reduction in coverage. A change in test data will be signaled as a
|
2022-02-22 18:02:44 +00:00
|
|
|
coverage reduction since the old test data is no longer present. A change in
|
2022-03-04 18:59:55 +00:00
|
|
|
how test data is presented will be signaled as well; this would be a false
|
2022-02-22 18:02:44 +00:00
|
|
|
positive.
|
2021-04-23 14:37:12 +00:00
|
|
|
|
2022-02-22 18:02:44 +00:00
|
|
|
The results of the API/ABI comparison are either formatted as HTML and stored
|
|
|
|
at a configurable location, or are given as a brief list of problems.
|
|
|
|
Returns 0 on success, 1 on non-compliance, and 2 if there is an error
|
2021-04-23 14:37:12 +00:00
|
|
|
while running the script.
|
2022-03-03 09:23:09 +00:00
|
|
|
|
2022-06-20 16:51:44 +00:00
|
|
|
### How to interpret non-compliance ###
|
|
|
|
|
|
|
|
This script has relatively common false positives. In many scenarios, it only
|
|
|
|
reports a pass if there is a strict textual match between the old version and
|
|
|
|
the new version, and it reports problems where there is a sufficient semantic
|
|
|
|
match but not a textual match. This section lists some common false positives.
|
|
|
|
This is not an exhaustive list: in the end what matters is whether we are
|
|
|
|
breaking a backward compatibility goal.
|
|
|
|
|
|
|
|
**API**: the goal is that if an application works with the old version of the
|
|
|
|
library, it can be recompiled against the new version and will still work.
|
|
|
|
This is normally validated by comparing the declarations in `include/*/*.h`.
|
|
|
|
A failure is a declaration that has disappeared or that now has a different
|
|
|
|
type.
|
|
|
|
|
|
|
|
* It's ok to change or remove macros and functions that are documented as
|
|
|
|
for internal use only or as experimental.
|
|
|
|
* It's ok to rename function or macro parameters as long as the semantics
|
|
|
|
has not changed.
|
|
|
|
* It's ok to change or remove structure fields that are documented as
|
|
|
|
private.
|
|
|
|
* It's ok to add fields to a structure that already had private fields
|
|
|
|
or was documented as extensible.
|
|
|
|
|
|
|
|
**ABI**: the goal is that if an application was built against the old version
|
|
|
|
of the library, the same binary will work when linked against the new version.
|
|
|
|
This is normally validated by comparing the symbols exported by `libmbed*.so`.
|
|
|
|
A failure is a symbol that is no longer exported by the same library or that
|
|
|
|
now has a different type.
|
|
|
|
|
|
|
|
* All ABI changes are acceptable if the library version is bumped
|
|
|
|
(see `scripts/bump_version.sh`).
|
|
|
|
* ABI changes that concern functions which are declared only inside the
|
|
|
|
library directory, and not in `include/*/*.h`, are acceptable only if
|
|
|
|
the function was only ever used inside the same library (libmbedcrypto,
|
|
|
|
libmbedx509, libmbedtls). As a counter example, if the old version
|
|
|
|
of libmbedtls calls mbedtls_foo() from libmbedcrypto, and the new version
|
|
|
|
of libmbedcrypto no longer has a compatible mbedtls_foo(), this does
|
|
|
|
require a version bump for libmbedcrypto.
|
|
|
|
|
|
|
|
**Storage format**: the goal is to check that persistent keys stored by the
|
|
|
|
old version can be read by the new version. This is normally validated by
|
|
|
|
comparing the `*read*` test cases in `test_suite*storage_format*.data`.
|
|
|
|
A failure is a storage read test case that is no longer present with the same
|
|
|
|
function name and parameter list.
|
|
|
|
|
|
|
|
* It's ok if the same test data is present, but its presentation has changed,
|
|
|
|
for example if a test function is renamed or has different parameters.
|
|
|
|
* It's ok if redundant tests are removed.
|
|
|
|
|
|
|
|
**Generated test coverage**: the goal is to check that automatically
|
|
|
|
generated tests have as much coverage as before. This is normally validated
|
|
|
|
by comparing the test cases that are automatically generated by a script.
|
|
|
|
A failure is a generated test case that is no longer present with the same
|
|
|
|
function name and parameter list.
|
|
|
|
|
|
|
|
* It's ok if the same test data is present, but its presentation has changed,
|
|
|
|
for example if a test function is renamed or has different parameters.
|
|
|
|
* It's ok if redundant tests are removed.
|
|
|
|
|
2018-04-06 10:23:22 +00:00
|
|
|
"""
|
2018-03-01 14:53:49 +00:00
|
|
|
|
2020-08-07 11:07:28 +00:00
|
|
|
# Copyright The Mbed TLS Contributors
|
2023-11-02 19:47:20 +00:00
|
|
|
# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
|
2020-05-25 23:54:15 +00:00
|
|
|
|
2022-02-22 18:02:44 +00:00
|
|
|
import glob
|
2018-03-01 14:53:49 +00:00
|
|
|
import os
|
2021-04-23 14:37:12 +00:00
|
|
|
import re
|
2018-03-01 14:53:49 +00:00
|
|
|
import sys
|
|
|
|
import traceback
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import argparse
|
|
|
|
import logging
|
|
|
|
import tempfile
|
2019-02-25 11:35:05 +00:00
|
|
|
import fnmatch
|
2019-04-09 08:14:17 +00:00
|
|
|
from types import SimpleNamespace
|
2018-03-01 14:53:49 +00:00
|
|
|
|
2019-02-21 13:09:26 +00:00
|
|
|
import xml.etree.ElementTree as ET
|
|
|
|
|
2024-05-10 15:58:31 +00:00
|
|
|
import framework_scripts_path # pylint: disable=unused-import
|
2024-05-03 13:36:12 +00:00
|
|
|
from mbedtls_framework import build_tree
|
2022-09-18 19:17:09 +00:00
|
|
|
|
2018-03-01 14:53:49 +00:00
|
|
|
|
2020-03-24 17:25:17 +00:00
|
|
|
class AbiChecker:
|
2019-02-25 19:36:52 +00:00
|
|
|
"""API and ABI checker."""
|
2018-03-01 14:53:49 +00:00
|
|
|
|
2019-04-09 08:14:17 +00:00
|
|
|
def __init__(self, old_version, new_version, configuration):
|
2019-02-25 19:36:52 +00:00
|
|
|
"""Instantiate the API/ABI checker.
|
|
|
|
|
2019-03-05 16:25:38 +00:00
|
|
|
old_version: RepoVersion containing details to compare against
|
|
|
|
new_version: RepoVersion containing details to check
|
2019-04-12 14:17:02 +00:00
|
|
|
configuration.report_dir: directory for output files
|
|
|
|
configuration.keep_all_reports: if false, delete old reports
|
|
|
|
configuration.brief: if true, output shorter report to stdout
|
2022-03-04 18:59:55 +00:00
|
|
|
configuration.check_abi: if true, compare ABIs
|
2021-04-23 14:32:32 +00:00
|
|
|
configuration.check_api: if true, compare APIs
|
2021-04-23 14:37:12 +00:00
|
|
|
configuration.check_storage: if true, compare storage format tests
|
2019-04-12 14:17:02 +00:00
|
|
|
configuration.skip_file: path to file containing symbols and types to skip
|
2019-02-25 19:36:52 +00:00
|
|
|
"""
|
2018-03-01 14:53:49 +00:00
|
|
|
self.repo_path = "."
|
|
|
|
self.log = None
|
2019-04-09 08:14:17 +00:00
|
|
|
self.verbose = configuration.verbose
|
2019-03-05 16:30:39 +00:00
|
|
|
self._setup_logger()
|
2019-04-09 08:14:17 +00:00
|
|
|
self.report_dir = os.path.abspath(configuration.report_dir)
|
|
|
|
self.keep_all_reports = configuration.keep_all_reports
|
2019-04-11 14:50:41 +00:00
|
|
|
self.can_remove_report_dir = not (os.path.exists(self.report_dir) or
|
2019-04-09 08:14:17 +00:00
|
|
|
self.keep_all_reports)
|
2019-03-05 16:25:38 +00:00
|
|
|
self.old_version = old_version
|
|
|
|
self.new_version = new_version
|
2019-04-09 08:14:17 +00:00
|
|
|
self.skip_file = configuration.skip_file
|
2021-04-23 14:32:32 +00:00
|
|
|
self.check_abi = configuration.check_abi
|
|
|
|
self.check_api = configuration.check_api
|
|
|
|
if self.check_abi != self.check_api:
|
|
|
|
raise Exception('Checking API without ABI or vice versa is not supported')
|
2021-04-23 14:37:12 +00:00
|
|
|
self.check_storage_tests = configuration.check_storage
|
2019-04-09 08:14:17 +00:00
|
|
|
self.brief = configuration.brief
|
2018-03-01 14:53:49 +00:00
|
|
|
self.git_command = "git"
|
|
|
|
self.make_command = "make"
|
|
|
|
|
2019-03-05 16:30:39 +00:00
|
|
|
def _setup_logger(self):
|
2018-03-01 14:53:49 +00:00
|
|
|
self.log = logging.getLogger()
|
2019-03-08 11:30:04 +00:00
|
|
|
if self.verbose:
|
|
|
|
self.log.setLevel(logging.DEBUG)
|
|
|
|
else:
|
|
|
|
self.log.setLevel(logging.INFO)
|
2018-03-01 14:53:49 +00:00
|
|
|
self.log.addHandler(logging.StreamHandler())
|
|
|
|
|
2019-02-25 19:36:52 +00:00
|
|
|
@staticmethod
|
|
|
|
def check_abi_tools_are_installed():
|
2018-03-01 14:53:49 +00:00
|
|
|
for command in ["abi-dumper", "abi-compliance-checker"]:
|
|
|
|
if not shutil.which(command):
|
|
|
|
raise Exception("{} not installed, aborting".format(command))
|
|
|
|
|
2019-03-05 16:30:39 +00:00
|
|
|
def _get_clean_worktree_for_git_revision(self, version):
|
2019-03-05 16:25:38 +00:00
|
|
|
"""Make a separate worktree with version.revision checked out.
|
2019-02-25 19:36:52 +00:00
|
|
|
Do not modify the current worktree."""
|
2018-03-01 14:53:49 +00:00
|
|
|
git_worktree_path = tempfile.mkdtemp()
|
2019-03-05 16:25:38 +00:00
|
|
|
if version.repository:
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug(
|
2019-02-19 16:59:33 +00:00
|
|
|
"Checking out git worktree for revision {} from {}".format(
|
2019-03-05 16:25:38 +00:00
|
|
|
version.revision, version.repository
|
2019-02-19 16:59:33 +00:00
|
|
|
)
|
|
|
|
)
|
2019-04-12 15:24:25 +00:00
|
|
|
fetch_output = subprocess.check_output(
|
2019-03-05 16:25:38 +00:00
|
|
|
[self.git_command, "fetch",
|
|
|
|
version.repository, version.revision],
|
2019-02-19 16:59:33 +00:00
|
|
|
cwd=self.repo_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug(fetch_output.decode("utf-8"))
|
2019-02-19 16:59:33 +00:00
|
|
|
worktree_rev = "FETCH_HEAD"
|
|
|
|
else:
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug("Checking out git worktree for revision {}".format(
|
2019-03-05 16:25:38 +00:00
|
|
|
version.revision
|
|
|
|
))
|
|
|
|
worktree_rev = version.revision
|
2019-04-12 15:24:25 +00:00
|
|
|
worktree_output = subprocess.check_output(
|
2019-02-19 16:59:33 +00:00
|
|
|
[self.git_command, "worktree", "add", "--detach",
|
|
|
|
git_worktree_path, worktree_rev],
|
2018-03-01 14:53:49 +00:00
|
|
|
cwd=self.repo_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug(worktree_output.decode("utf-8"))
|
2019-07-04 17:01:22 +00:00
|
|
|
version.commit = subprocess.check_output(
|
2019-07-25 13:33:33 +00:00
|
|
|
[self.git_command, "rev-parse", "HEAD"],
|
2019-07-04 17:01:22 +00:00
|
|
|
cwd=git_worktree_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
).decode("ascii").rstrip()
|
|
|
|
self.log.debug("Commit is {}".format(version.commit))
|
2018-03-01 14:53:49 +00:00
|
|
|
return git_worktree_path
|
|
|
|
|
2019-03-05 16:30:39 +00:00
|
|
|
def _update_git_submodules(self, git_worktree_path, version):
|
2019-04-05 16:06:17 +00:00
|
|
|
"""If the crypto submodule is present, initialize it.
|
|
|
|
if version.crypto_revision exists, update it to that revision,
|
|
|
|
otherwise update it to the default revision"""
|
2019-04-12 15:24:25 +00:00
|
|
|
update_output = subprocess.check_output(
|
2018-11-02 16:35:09 +00:00
|
|
|
[self.git_command, "submodule", "update", "--init", '--recursive'],
|
|
|
|
cwd=git_worktree_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-04-12 15:24:25 +00:00
|
|
|
self.log.debug(update_output.decode("utf-8"))
|
2019-03-05 15:23:25 +00:00
|
|
|
if not (os.path.exists(os.path.join(git_worktree_path, "crypto"))
|
2019-03-05 16:25:38 +00:00
|
|
|
and version.crypto_revision):
|
2019-03-05 15:23:25 +00:00
|
|
|
return
|
|
|
|
|
2019-03-05 16:25:38 +00:00
|
|
|
if version.crypto_repository:
|
2019-04-12 15:24:25 +00:00
|
|
|
fetch_output = subprocess.check_output(
|
2019-03-08 11:12:19 +00:00
|
|
|
[self.git_command, "fetch", version.crypto_repository,
|
|
|
|
version.crypto_revision],
|
2019-03-05 15:23:25 +00:00
|
|
|
cwd=os.path.join(git_worktree_path, "crypto"),
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug(fetch_output.decode("utf-8"))
|
2019-03-08 11:12:19 +00:00
|
|
|
crypto_rev = "FETCH_HEAD"
|
|
|
|
else:
|
|
|
|
crypto_rev = version.crypto_revision
|
|
|
|
|
2019-04-12 15:24:25 +00:00
|
|
|
checkout_output = subprocess.check_output(
|
2019-03-08 11:12:19 +00:00
|
|
|
[self.git_command, "checkout", crypto_rev],
|
|
|
|
cwd=os.path.join(git_worktree_path, "crypto"),
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug(checkout_output.decode("utf-8"))
|
2018-11-02 16:35:09 +00:00
|
|
|
|
2019-03-05 16:30:39 +00:00
|
|
|
def _build_shared_libraries(self, git_worktree_path, version):
|
2019-02-25 19:36:52 +00:00
|
|
|
"""Build the shared libraries in the specified worktree."""
|
2018-03-01 14:53:49 +00:00
|
|
|
my_environment = os.environ.copy()
|
|
|
|
my_environment["CFLAGS"] = "-g -Og"
|
|
|
|
my_environment["SHARED"] = "1"
|
2019-05-09 12:03:05 +00:00
|
|
|
if os.path.exists(os.path.join(git_worktree_path, "crypto")):
|
|
|
|
my_environment["USE_CRYPTO_SUBMODULE"] = "1"
|
2019-04-12 15:24:25 +00:00
|
|
|
make_output = subprocess.check_output(
|
2019-02-28 11:52:39 +00:00
|
|
|
[self.make_command, "lib"],
|
2018-03-01 14:53:49 +00:00
|
|
|
env=my_environment,
|
|
|
|
cwd=git_worktree_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug(make_output.decode("utf-8"))
|
2019-04-12 14:18:02 +00:00
|
|
|
for root, _dirs, files in os.walk(git_worktree_path):
|
2019-02-25 11:35:05 +00:00
|
|
|
for file in fnmatch.filter(files, "*.so"):
|
2019-03-05 16:25:38 +00:00
|
|
|
version.modules[os.path.splitext(file)[0]] = (
|
2019-02-27 16:53:40 +00:00
|
|
|
os.path.join(root, file)
|
2019-02-25 11:35:05 +00:00
|
|
|
)
|
2018-03-01 14:53:49 +00:00
|
|
|
|
2019-07-04 17:01:22 +00:00
|
|
|
@staticmethod
|
|
|
|
def _pretty_revision(version):
|
|
|
|
if version.revision == version.commit:
|
|
|
|
return version.revision
|
|
|
|
else:
|
|
|
|
return "{} ({})".format(version.revision, version.commit)
|
|
|
|
|
2019-04-05 16:06:17 +00:00
|
|
|
def _get_abi_dumps_from_shared_libraries(self, version):
|
2019-02-25 19:36:52 +00:00
|
|
|
"""Generate the ABI dumps for the specified git revision.
|
2019-04-05 16:06:17 +00:00
|
|
|
The shared libraries must have been built and the module paths
|
|
|
|
present in version.modules."""
|
2019-03-05 16:25:38 +00:00
|
|
|
for mbed_module, module_path in version.modules.items():
|
2018-03-01 14:53:49 +00:00
|
|
|
output_path = os.path.join(
|
2019-04-04 13:39:33 +00:00
|
|
|
self.report_dir, "{}-{}-{}.dump".format(
|
|
|
|
mbed_module, version.revision, version.version
|
2019-02-27 16:53:40 +00:00
|
|
|
)
|
2018-03-01 14:53:49 +00:00
|
|
|
)
|
|
|
|
abi_dump_command = [
|
|
|
|
"abi-dumper",
|
2019-02-25 11:35:05 +00:00
|
|
|
module_path,
|
2018-03-01 14:53:49 +00:00
|
|
|
"-o", output_path,
|
2019-07-04 17:01:22 +00:00
|
|
|
"-lver", self._pretty_revision(version),
|
2018-03-01 14:53:49 +00:00
|
|
|
]
|
2019-04-12 15:24:25 +00:00
|
|
|
abi_dump_output = subprocess.check_output(
|
2018-03-01 14:53:49 +00:00
|
|
|
abi_dump_command,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug(abi_dump_output.decode("utf-8"))
|
2019-03-05 16:25:38 +00:00
|
|
|
version.abi_dumps[mbed_module] = output_path
|
2018-03-01 14:53:49 +00:00
|
|
|
|
2021-04-23 14:37:12 +00:00
|
|
|
@staticmethod
|
|
|
|
def _normalize_storage_test_case_data(line):
|
|
|
|
"""Eliminate cosmetic or irrelevant details in storage format test cases."""
|
|
|
|
line = re.sub(r'\s+', r'', line)
|
|
|
|
return line
|
|
|
|
|
2022-02-22 18:02:44 +00:00
|
|
|
def _read_storage_tests(self,
|
|
|
|
directory,
|
|
|
|
filename,
|
|
|
|
is_generated,
|
|
|
|
storage_tests):
|
2021-04-23 14:37:12 +00:00
|
|
|
"""Record storage tests from the given file.
|
|
|
|
|
|
|
|
Populate the storage_tests dictionary with test cases read from
|
|
|
|
filename under directory.
|
|
|
|
"""
|
|
|
|
at_paragraph_start = True
|
|
|
|
description = None
|
|
|
|
full_path = os.path.join(directory, filename)
|
2022-03-04 19:02:00 +00:00
|
|
|
with open(full_path) as fd:
|
|
|
|
for line_number, line in enumerate(fd, 1):
|
|
|
|
line = line.strip()
|
|
|
|
if not line:
|
|
|
|
at_paragraph_start = True
|
2022-02-22 18:02:44 +00:00
|
|
|
continue
|
2022-03-04 19:02:00 +00:00
|
|
|
if line.startswith('#'):
|
|
|
|
continue
|
|
|
|
if at_paragraph_start:
|
|
|
|
description = line.strip()
|
|
|
|
at_paragraph_start = False
|
|
|
|
continue
|
|
|
|
if line.startswith('depends_on:'):
|
|
|
|
continue
|
|
|
|
# We've reached a test case data line
|
|
|
|
test_case_data = self._normalize_storage_test_case_data(line)
|
|
|
|
if not is_generated:
|
|
|
|
# In manual test data, only look at read tests.
|
|
|
|
function_name = test_case_data.split(':', 1)[0]
|
|
|
|
if 'read' not in function_name.split('_'):
|
|
|
|
continue
|
|
|
|
metadata = SimpleNamespace(
|
|
|
|
filename=filename,
|
|
|
|
line_number=line_number,
|
|
|
|
description=description
|
|
|
|
)
|
|
|
|
storage_tests[test_case_data] = metadata
|
2021-04-23 14:37:12 +00:00
|
|
|
|
2022-02-22 18:02:44 +00:00
|
|
|
@staticmethod
|
|
|
|
def _list_generated_test_data_files(git_worktree_path):
|
|
|
|
"""List the generated test data files."""
|
2024-05-31 13:38:52 +00:00
|
|
|
generate_psa_tests = 'framework/scripts/generate_psa_tests.py'
|
|
|
|
if not os.path.isfile(git_worktree_path + '/' + generate_psa_tests):
|
|
|
|
# The checked-out revision is from before generate_psa_tests.py
|
|
|
|
# was moved to the framework submodule. Use the old location.
|
|
|
|
generate_psa_tests = 'tests/scripts/generate_psa_tests.py'
|
|
|
|
|
2022-02-22 18:02:44 +00:00
|
|
|
output = subprocess.check_output(
|
2024-05-31 13:38:52 +00:00
|
|
|
[generate_psa_tests, '--list'],
|
2022-02-22 18:02:44 +00:00
|
|
|
cwd=git_worktree_path,
|
|
|
|
).decode('ascii')
|
|
|
|
return [line for line in output.split('\n') if line]
|
|
|
|
|
2021-04-23 14:37:12 +00:00
|
|
|
def _get_storage_format_tests(self, version, git_worktree_path):
|
2022-02-22 18:02:44 +00:00
|
|
|
"""Record the storage format tests for the specified git version.
|
|
|
|
|
|
|
|
The storage format tests are the test suite data files whose name
|
|
|
|
contains "storage_format".
|
2021-04-23 14:37:12 +00:00
|
|
|
|
|
|
|
The version must be checked out at git_worktree_path.
|
2022-02-22 18:02:44 +00:00
|
|
|
|
|
|
|
This function creates or updates the generated data files.
|
2021-04-23 14:37:12 +00:00
|
|
|
"""
|
2022-02-22 18:02:44 +00:00
|
|
|
# Existing test data files. This may be missing some automatically
|
|
|
|
# generated files if they haven't been generated yet.
|
2024-07-11 17:50:54 +00:00
|
|
|
if os.path.isdir(os.path.join(git_worktree_path, 'tf-psa-crypto',
|
|
|
|
'tests', 'suites')):
|
|
|
|
storage_data_files = set(glob.glob(
|
|
|
|
'tf-psa-crypto/tests/suites/test_suite_*storage_format*.data'
|
|
|
|
))
|
|
|
|
else:
|
|
|
|
storage_data_files = set(glob.glob(
|
|
|
|
'tests/suites/test_suite_*storage_format*.data'
|
|
|
|
))
|
2022-02-22 18:02:44 +00:00
|
|
|
# Discover and (re)generate automatically generated data files.
|
|
|
|
to_be_generated = set()
|
|
|
|
for filename in self._list_generated_test_data_files(git_worktree_path):
|
|
|
|
if 'storage_format' in filename:
|
|
|
|
storage_data_files.add(filename)
|
|
|
|
to_be_generated.add(filename)
|
2024-05-31 13:38:52 +00:00
|
|
|
|
|
|
|
generate_psa_tests = 'framework/scripts/generate_psa_tests.py'
|
|
|
|
if not os.path.isfile(git_worktree_path + '/' + generate_psa_tests):
|
|
|
|
# The checked-out revision is from before generate_psa_tests.py
|
|
|
|
# was moved to the framework submodule. Use the old location.
|
|
|
|
generate_psa_tests = 'tests/scripts/generate_psa_tests.py'
|
2021-04-23 14:37:12 +00:00
|
|
|
subprocess.check_call(
|
2024-05-31 13:38:52 +00:00
|
|
|
[generate_psa_tests] + sorted(to_be_generated),
|
2021-04-23 14:37:12 +00:00
|
|
|
cwd=git_worktree_path,
|
|
|
|
)
|
2022-02-22 18:02:44 +00:00
|
|
|
for test_file in sorted(storage_data_files):
|
|
|
|
self._read_storage_tests(git_worktree_path,
|
|
|
|
test_file,
|
|
|
|
test_file in to_be_generated,
|
2021-04-23 14:37:12 +00:00
|
|
|
version.storage_tests)
|
|
|
|
|
2019-03-05 16:30:39 +00:00
|
|
|
def _cleanup_worktree(self, git_worktree_path):
|
2019-02-25 19:36:52 +00:00
|
|
|
"""Remove the specified git worktree."""
|
2018-03-01 14:53:49 +00:00
|
|
|
shutil.rmtree(git_worktree_path)
|
2019-04-12 15:24:25 +00:00
|
|
|
worktree_output = subprocess.check_output(
|
2018-03-01 14:53:49 +00:00
|
|
|
[self.git_command, "worktree", "prune"],
|
|
|
|
cwd=self.repo_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 11:30:04 +00:00
|
|
|
self.log.debug(worktree_output.decode("utf-8"))
|
2018-03-01 14:53:49 +00:00
|
|
|
|
2019-03-05 16:30:39 +00:00
|
|
|
def _get_abi_dump_for_ref(self, version):
|
2021-04-23 14:37:12 +00:00
|
|
|
"""Generate the interface information for the specified git revision."""
|
2019-03-05 16:30:39 +00:00
|
|
|
git_worktree_path = self._get_clean_worktree_for_git_revision(version)
|
|
|
|
self._update_git_submodules(git_worktree_path, version)
|
2021-04-23 14:32:32 +00:00
|
|
|
if self.check_abi:
|
|
|
|
self._build_shared_libraries(git_worktree_path, version)
|
|
|
|
self._get_abi_dumps_from_shared_libraries(version)
|
2021-04-23 14:37:12 +00:00
|
|
|
if self.check_storage_tests:
|
|
|
|
self._get_storage_format_tests(version, git_worktree_path)
|
2019-03-05 16:30:39 +00:00
|
|
|
self._cleanup_worktree(git_worktree_path)
|
2018-03-01 14:53:49 +00:00
|
|
|
|
2019-03-05 16:30:39 +00:00
|
|
|
def _remove_children_with_tag(self, parent, tag):
|
2019-02-21 13:09:26 +00:00
|
|
|
children = parent.getchildren()
|
|
|
|
for child in children:
|
|
|
|
if child.tag == tag:
|
|
|
|
parent.remove(child)
|
|
|
|
else:
|
2019-03-05 16:30:39 +00:00
|
|
|
self._remove_children_with_tag(child, tag)
|
2019-02-21 13:09:26 +00:00
|
|
|
|
2019-03-05 16:30:39 +00:00
|
|
|
def _remove_extra_detail_from_report(self, report_root):
|
2019-02-21 13:09:26 +00:00
|
|
|
for tag in ['test_info', 'test_results', 'problem_summary',
|
2019-06-05 11:57:50 +00:00
|
|
|
'added_symbols', 'affected']:
|
2019-03-05 16:30:39 +00:00
|
|
|
self._remove_children_with_tag(report_root, tag)
|
2019-02-21 13:09:26 +00:00
|
|
|
|
|
|
|
for report in report_root:
|
|
|
|
for problems in report.getchildren()[:]:
|
|
|
|
if not problems.getchildren():
|
|
|
|
report.remove(problems)
|
|
|
|
|
2019-07-04 17:17:40 +00:00
|
|
|
def _abi_compliance_command(self, mbed_module, output_path):
|
|
|
|
"""Build the command to run to analyze the library mbed_module.
|
|
|
|
The report will be placed in output_path."""
|
|
|
|
abi_compliance_command = [
|
|
|
|
"abi-compliance-checker",
|
|
|
|
"-l", mbed_module,
|
|
|
|
"-old", self.old_version.abi_dumps[mbed_module],
|
|
|
|
"-new", self.new_version.abi_dumps[mbed_module],
|
|
|
|
"-strict",
|
|
|
|
"-report-path", output_path,
|
|
|
|
]
|
|
|
|
if self.skip_file:
|
|
|
|
abi_compliance_command += ["-skip-symbols", self.skip_file,
|
|
|
|
"-skip-types", self.skip_file]
|
|
|
|
if self.brief:
|
|
|
|
abi_compliance_command += ["-report-format", "xml",
|
|
|
|
"-stdout"]
|
|
|
|
return abi_compliance_command
|
|
|
|
|
|
|
|
def _is_library_compatible(self, mbed_module, compatibility_report):
|
|
|
|
"""Test if the library mbed_module has remained compatible.
|
|
|
|
Append a message regarding compatibility to compatibility_report."""
|
|
|
|
output_path = os.path.join(
|
|
|
|
self.report_dir, "{}-{}-{}.html".format(
|
|
|
|
mbed_module, self.old_version.revision,
|
|
|
|
self.new_version.revision
|
|
|
|
)
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
subprocess.check_output(
|
|
|
|
self._abi_compliance_command(mbed_module, output_path),
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
|
|
|
except subprocess.CalledProcessError as err:
|
|
|
|
if err.returncode != 1:
|
|
|
|
raise err
|
|
|
|
if self.brief:
|
|
|
|
self.log.info(
|
|
|
|
"Compatibility issues found for {}".format(mbed_module)
|
|
|
|
)
|
|
|
|
report_root = ET.fromstring(err.output.decode("utf-8"))
|
|
|
|
self._remove_extra_detail_from_report(report_root)
|
|
|
|
self.log.info(ET.tostring(report_root).decode("utf-8"))
|
|
|
|
else:
|
|
|
|
self.can_remove_report_dir = False
|
|
|
|
compatibility_report.append(
|
|
|
|
"Compatibility issues found for {}, "
|
|
|
|
"for details see {}".format(mbed_module, output_path)
|
|
|
|
)
|
|
|
|
return False
|
|
|
|
compatibility_report.append(
|
|
|
|
"No compatibility issues for {}".format(mbed_module)
|
|
|
|
)
|
|
|
|
if not (self.keep_all_reports or self.brief):
|
|
|
|
os.remove(output_path)
|
|
|
|
return True
|
|
|
|
|
2021-04-23 14:37:12 +00:00
|
|
|
@staticmethod
|
|
|
|
def _is_storage_format_compatible(old_tests, new_tests,
|
|
|
|
compatibility_report):
|
|
|
|
"""Check whether all tests present in old_tests are also in new_tests.
|
|
|
|
|
|
|
|
Append a message regarding compatibility to compatibility_report.
|
|
|
|
"""
|
|
|
|
missing = frozenset(old_tests.keys()).difference(new_tests.keys())
|
|
|
|
for test_data in sorted(missing):
|
|
|
|
metadata = old_tests[test_data]
|
|
|
|
compatibility_report.append(
|
|
|
|
'Test case from {} line {} "{}" has disappeared: {}'.format(
|
|
|
|
metadata.filename, metadata.line_number,
|
|
|
|
metadata.description, test_data
|
|
|
|
)
|
|
|
|
)
|
|
|
|
compatibility_report.append(
|
|
|
|
'FAIL: {}/{} storage format test cases have changed or disappeared.'.format(
|
|
|
|
len(missing), len(old_tests)
|
|
|
|
) if missing else
|
|
|
|
'PASS: All {} storage format test cases are preserved.'.format(
|
|
|
|
len(old_tests)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
compatibility_report.append(
|
|
|
|
'Info: number of storage format tests cases: {} -> {}.'.format(
|
|
|
|
len(old_tests), len(new_tests)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return not missing
|
|
|
|
|
2018-03-01 14:53:49 +00:00
|
|
|
def get_abi_compatibility_report(self):
|
2019-02-25 19:36:52 +00:00
|
|
|
"""Generate a report of the differences between the reference ABI
|
2019-04-05 16:06:17 +00:00
|
|
|
and the new ABI. ABI dumps from self.old_version and self.new_version
|
|
|
|
must be available."""
|
2019-07-04 17:17:40 +00:00
|
|
|
compatibility_report = ["Checking evolution from {} to {}".format(
|
2019-07-04 17:01:22 +00:00
|
|
|
self._pretty_revision(self.old_version),
|
|
|
|
self._pretty_revision(self.new_version)
|
2019-07-04 17:17:40 +00:00
|
|
|
)]
|
2018-03-01 14:53:49 +00:00
|
|
|
compliance_return_code = 0
|
2021-04-23 14:37:12 +00:00
|
|
|
|
2021-04-23 14:32:32 +00:00
|
|
|
if self.check_abi:
|
|
|
|
shared_modules = list(set(self.old_version.modules.keys()) &
|
|
|
|
set(self.new_version.modules.keys()))
|
|
|
|
for mbed_module in shared_modules:
|
|
|
|
if not self._is_library_compatible(mbed_module,
|
|
|
|
compatibility_report):
|
|
|
|
compliance_return_code = 1
|
|
|
|
|
2021-04-23 14:37:12 +00:00
|
|
|
if self.check_storage_tests:
|
|
|
|
if not self._is_storage_format_compatible(
|
|
|
|
self.old_version.storage_tests,
|
|
|
|
self.new_version.storage_tests,
|
|
|
|
compatibility_report):
|
2019-07-04 17:17:40 +00:00
|
|
|
compliance_return_code = 1
|
2021-04-23 14:37:12 +00:00
|
|
|
|
2019-05-29 10:29:08 +00:00
|
|
|
for version in [self.old_version, self.new_version]:
|
|
|
|
for mbed_module, mbed_module_dump in version.abi_dumps.items():
|
|
|
|
os.remove(mbed_module_dump)
|
2019-02-25 17:01:55 +00:00
|
|
|
if self.can_remove_report_dir:
|
2018-03-01 14:53:49 +00:00
|
|
|
os.rmdir(self.report_dir)
|
2019-07-04 17:17:40 +00:00
|
|
|
self.log.info("\n".join(compatibility_report))
|
2018-03-01 14:53:49 +00:00
|
|
|
return compliance_return_code
|
|
|
|
|
|
|
|
def check_for_abi_changes(self):
|
2019-02-25 19:36:52 +00:00
|
|
|
"""Generate a report of ABI differences
|
|
|
|
between self.old_rev and self.new_rev."""
|
2022-09-18 19:17:09 +00:00
|
|
|
build_tree.check_repo_path()
|
2022-03-03 09:22:36 +00:00
|
|
|
if self.check_api or self.check_abi:
|
|
|
|
self.check_abi_tools_are_installed()
|
2019-03-05 16:30:39 +00:00
|
|
|
self._get_abi_dump_for_ref(self.old_version)
|
|
|
|
self._get_abi_dump_for_ref(self.new_version)
|
2018-03-01 14:53:49 +00:00
|
|
|
return self.get_abi_compatibility_report()
|
|
|
|
|
|
|
|
|
|
|
|
def run_main():
|
|
|
|
try:
|
|
|
|
parser = argparse.ArgumentParser(
|
2022-03-03 09:23:09 +00:00
|
|
|
description=__doc__
|
2018-03-01 14:53:49 +00:00
|
|
|
)
|
2019-03-08 11:30:04 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"-v", "--verbose", action="store_true",
|
|
|
|
help="set verbosity level",
|
|
|
|
)
|
2018-03-01 14:53:49 +00:00
|
|
|
parser.add_argument(
|
2018-04-16 11:02:29 +00:00
|
|
|
"-r", "--report-dir", type=str, default="reports",
|
2018-03-01 14:53:49 +00:00
|
|
|
help="directory where reports are stored, default is reports",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2018-04-16 11:02:29 +00:00
|
|
|
"-k", "--keep-all-reports", action="store_true",
|
2018-03-01 14:53:49 +00:00
|
|
|
help="keep all reports, even if there are no compatibility issues",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2019-03-01 09:54:44 +00:00
|
|
|
"-o", "--old-rev", type=str, help="revision for old version.",
|
|
|
|
required=True,
|
2018-03-01 14:53:49 +00:00
|
|
|
)
|
2019-02-25 11:35:05 +00:00
|
|
|
parser.add_argument(
|
2019-03-01 09:54:44 +00:00
|
|
|
"-or", "--old-repo", type=str, help="repository for old version."
|
2019-02-25 11:35:05 +00:00
|
|
|
)
|
2018-03-01 14:53:49 +00:00
|
|
|
parser.add_argument(
|
2019-03-01 09:54:44 +00:00
|
|
|
"-oc", "--old-crypto-rev", type=str,
|
|
|
|
help="revision for old crypto submodule."
|
2018-03-01 14:53:49 +00:00
|
|
|
)
|
2019-02-25 11:35:05 +00:00
|
|
|
parser.add_argument(
|
2019-03-01 09:54:44 +00:00
|
|
|
"-ocr", "--old-crypto-repo", type=str,
|
|
|
|
help="repository for old crypto submodule."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-n", "--new-rev", type=str, help="revision for new version",
|
|
|
|
required=True,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-nr", "--new-repo", type=str, help="repository for new version."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-nc", "--new-crypto-rev", type=str,
|
|
|
|
help="revision for new crypto version"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-ncr", "--new-crypto-repo", type=str,
|
|
|
|
help="repository for new crypto submodule."
|
2019-02-25 11:35:05 +00:00
|
|
|
)
|
2019-02-20 15:01:56 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"-s", "--skip-file", type=str,
|
2019-07-04 17:00:31 +00:00
|
|
|
help=("path to file containing symbols and types to skip "
|
|
|
|
"(typically \"-s identifiers\" after running "
|
|
|
|
"\"tests/scripts/list-identifiers.sh --internal\")")
|
2019-02-20 15:01:56 +00:00
|
|
|
)
|
2021-04-23 14:32:32 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--check-abi",
|
|
|
|
action='store_true', default=True,
|
|
|
|
help="Perform ABI comparison (default: yes)"
|
|
|
|
)
|
|
|
|
parser.add_argument("--no-check-abi", action='store_false', dest='check_abi')
|
|
|
|
parser.add_argument(
|
|
|
|
"--check-api",
|
|
|
|
action='store_true', default=True,
|
|
|
|
help="Perform API comparison (default: yes)"
|
|
|
|
)
|
|
|
|
parser.add_argument("--no-check-api", action='store_false', dest='check_api')
|
2021-04-23 14:37:12 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--check-storage",
|
|
|
|
action='store_true', default=True,
|
|
|
|
help="Perform storage tests comparison (default: yes)"
|
|
|
|
)
|
|
|
|
parser.add_argument("--no-check-storage", action='store_false', dest='check_storage')
|
2019-02-21 13:09:26 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"-b", "--brief", action="store_true",
|
|
|
|
help="output only the list of issues to stdout, instead of a full report",
|
|
|
|
)
|
2018-03-01 14:53:49 +00:00
|
|
|
abi_args = parser.parse_args()
|
2019-04-11 14:50:41 +00:00
|
|
|
if os.path.isfile(abi_args.report_dir):
|
|
|
|
print("Error: {} is not a directory".format(abi_args.report_dir))
|
|
|
|
parser.exit()
|
2019-04-09 08:14:17 +00:00
|
|
|
old_version = SimpleNamespace(
|
|
|
|
version="old",
|
|
|
|
repository=abi_args.old_repo,
|
|
|
|
revision=abi_args.old_rev,
|
2019-07-04 17:01:22 +00:00
|
|
|
commit=None,
|
2019-04-09 08:14:17 +00:00
|
|
|
crypto_repository=abi_args.old_crypto_repo,
|
|
|
|
crypto_revision=abi_args.old_crypto_rev,
|
|
|
|
abi_dumps={},
|
2021-04-23 14:37:12 +00:00
|
|
|
storage_tests={},
|
2019-04-09 08:14:17 +00:00
|
|
|
modules={}
|
2019-04-05 16:06:17 +00:00
|
|
|
)
|
2019-04-09 08:14:17 +00:00
|
|
|
new_version = SimpleNamespace(
|
|
|
|
version="new",
|
|
|
|
repository=abi_args.new_repo,
|
|
|
|
revision=abi_args.new_rev,
|
2019-07-04 17:01:22 +00:00
|
|
|
commit=None,
|
2019-04-09 08:14:17 +00:00
|
|
|
crypto_repository=abi_args.new_crypto_repo,
|
|
|
|
crypto_revision=abi_args.new_crypto_rev,
|
|
|
|
abi_dumps={},
|
2021-04-23 14:37:12 +00:00
|
|
|
storage_tests={},
|
2019-04-09 08:14:17 +00:00
|
|
|
modules={}
|
2019-04-05 16:06:17 +00:00
|
|
|
)
|
2019-04-09 08:14:17 +00:00
|
|
|
configuration = SimpleNamespace(
|
|
|
|
verbose=abi_args.verbose,
|
|
|
|
report_dir=abi_args.report_dir,
|
|
|
|
keep_all_reports=abi_args.keep_all_reports,
|
|
|
|
brief=abi_args.brief,
|
2021-04-23 14:32:32 +00:00
|
|
|
check_abi=abi_args.check_abi,
|
|
|
|
check_api=abi_args.check_api,
|
2021-04-23 14:37:12 +00:00
|
|
|
check_storage=abi_args.check_storage,
|
2019-04-09 08:14:17 +00:00
|
|
|
skip_file=abi_args.skip_file
|
2018-03-01 14:53:49 +00:00
|
|
|
)
|
2019-04-09 08:14:17 +00:00
|
|
|
abi_check = AbiChecker(old_version, new_version, configuration)
|
2018-03-01 14:53:49 +00:00
|
|
|
return_code = abi_check.check_for_abi_changes()
|
|
|
|
sys.exit(return_code)
|
2019-02-25 20:39:42 +00:00
|
|
|
except Exception: # pylint: disable=broad-except
|
|
|
|
# Print the backtrace and exit explicitly so as to exit with
|
|
|
|
# status 2, not 1.
|
2018-03-15 10:12:06 +00:00
|
|
|
traceback.print_exc()
|
2018-03-01 14:53:49 +00:00
|
|
|
sys.exit(2)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
run_main()
|