diff options
-rw-r--r-- | ports/qemu/README.md | 4 | ||||
-rw-r--r-- | ports/qemu/boards/VIRT_RV32/mpconfigboard.mk | 2 | ||||
-rwxr-xr-x | tests/run-natmodtests.py | 58 |
3 files changed, 55 insertions, 9 deletions
diff --git a/ports/qemu/README.md b/ports/qemu/README.md index 70edf97f5..c7d0dc1f4 100644 --- a/ports/qemu/README.md +++ b/ports/qemu/README.md @@ -105,9 +105,7 @@ can also be tested with this command (this is currently supported only for the $ make test_natmod The same remarks about manually running the tests apply for native modules, but -`run-natmodtests.py` should be run instead of `run-tests.py`. In this case you -also have to explicitly pass the architecture you are running native modules to -`run-natmodtests.py` ("--arch rv32imc" for the `VIRT_RV32` board). +`run-natmodtests.py` should be run instead of `run-tests.py`. Extra make options ------------------ diff --git a/ports/qemu/boards/VIRT_RV32/mpconfigboard.mk b/ports/qemu/boards/VIRT_RV32/mpconfigboard.mk index dd9264800..ce1272092 100644 --- a/ports/qemu/boards/VIRT_RV32/mpconfigboard.mk +++ b/ports/qemu/boards/VIRT_RV32/mpconfigboard.mk @@ -9,5 +9,3 @@ LDSCRIPT = mcu/rv32/virt.ld SRC_BOARD_O += shared/runtime/gchelper_native.o shared/runtime/gchelper_rv32i.o MPY_CROSS_FLAGS += -march=rv32imc - -RUN_NATMODTESTS_ARGS = --arch rv32imc diff --git a/tests/run-natmodtests.py b/tests/run-natmodtests.py index 1fe44bec1..b858989da 100755 --- a/tests/run-natmodtests.py +++ b/tests/run-natmodtests.py @@ -28,6 +28,23 @@ TEST_MAPPINGS = { "re": "re/re_$(ARCH).mpy", } +# Supported architectures for native mpy modules +AVAILABLE_ARCHS = ( + "x86", + "x64", + "armv6", + "armv6m", + "armv7m", + "armv7em", + "armv7emsp", + "armv7emdp", + "xtensa", + "xtensawin", + "rv32imc", +) + +ARCH_MAPPINGS = {"armv7em": "armv7m"} + # Code to allow a target MicroPython to import an .mpy from RAM injected_import_hook_code = """\ import sys, io, vfs @@ -96,14 +113,33 @@ class TargetPyboard: return b"", er -def run_tests(target_truth, target, args, stats): +def detect_architecture(target): + with open("./feature_check/target_info.py", "rb") as f: + target_info_data = f.read() + result_out, error = target.run_script(target_info_data) + if error is not None: + return None, None, error + info = result_out.split(b" ") + if len(info) < 2: + return None, None, "unexpected target info: {}".format(info) + platform = info[0].strip().decode() + arch = info[1].strip().decode() + if arch not in AVAILABLE_ARCHS: + if arch == "None": + return None, None, "the target does not support dynamic modules" + else: + return None, None, "{} is not a supported architecture".format(arch) + return platform, arch, None + + +def run_tests(target_truth, target, args, stats, resolved_arch): for test_file in args.files: # Find supported test test_file_basename = os.path.basename(test_file) for k, v in TEST_MAPPINGS.items(): if test_file_basename.startswith(k): test_module = k - test_mpy = v.replace("$(ARCH)", args.arch) + test_mpy = v.replace("$(ARCH)", resolved_arch) break else: print("---- {} - no matching mpy".format(test_file)) @@ -174,7 +210,7 @@ def main(): "-d", "--device", default="/dev/ttyACM0", help="the device for pyboard.py" ) cmd_parser.add_argument( - "-a", "--arch", default="x64", help="native architecture of the target" + "-a", "--arch", choices=AVAILABLE_ARCHS, help="override native architecture of the target" ) cmd_parser.add_argument("files", nargs="*", help="input test files") args = cmd_parser.parse_args() @@ -186,8 +222,22 @@ def main(): else: target = TargetSubprocess([MICROPYTHON]) + if hasattr(args, "arch") and args.arch is not None: + target_arch = args.arch + target_platform = None + else: + target_platform, target_arch, error = detect_architecture(target) + if error: + print("Cannot run tests: {}".format(error)) + sys.exit(1) + target_arch = ARCH_MAPPINGS.get(target_arch, target_arch) + + if target_platform: + print("platform={} ".format(target_platform), end="") + print("arch={}".format(target_arch)) + stats = {"total": 0, "pass": 0, "fail": 0, "skip": 0} - run_tests(target_truth, target, args, stats) + run_tests(target_truth, target, args, stats, target_arch) target.close() target_truth.close() |