diff --git a/trunner/harness/plo.py b/trunner/harness/plo.py index 107810c6..721db75a 100644 --- a/trunner/harness/plo.py +++ b/trunner/harness/plo.py @@ -146,14 +146,24 @@ def erase(self, device: str, offset: int, size: int, timeout: Optional[int] = No e.cmd = cmd raise e - def jffs2(self, device: str, erase: bool, cleanmarkers: PloJffs2CleanmarkerSpec, timeout: Optional[int] = None): + def jffs2(self, device: str, erase: bool, cleanmarkers: PloJffs2CleanmarkerSpec, block_timeout: int): """Performs jffs2 command.""" + block_count = cleanmarkers.number_of_blocks + cmd = f"jffs2 -d {device} -c {cleanmarkers}" if erase: cmd += " -e" - self.cmd(cmd, timeout) + self.send_cmd(cmd) + + for i in range(0, block_count): + try: + self.dut.expect_exact(f"jffs2: block {i}/{block_count}", timeout=block_timeout) + except pexpect.TIMEOUT as e: + raise PloError("Wrong jffs2 command output!", cmd=cmd, output=self.dut.before) from e + + self._assert_prompt() def app( self, device: str, file: str, imap: str, dmap: str, exec: bool = False @@ -220,7 +230,8 @@ class PloJffsImageProperty(PloImageProperty): flash_device_id: str cleanmarkers_args: PloJffs2CleanmarkerSpec - timeout: int + # optimal timeout for erasing 1 block using jffs2 command + block_timeout: Optional[int] = 1 class PloImageLoader(TerminalHarness, PloInterface): @@ -258,7 +269,7 @@ def __call__(self): self.plo_loader() if isinstance(self.image, PloJffsImageProperty): - self.jffs2(self.image.flash_device_id, True, self.image.cleanmarkers_args, self.image.timeout) + self.jffs2(self.image.flash_device_id, True, self.image.cleanmarkers_args, self.image.block_timeout) with self.phoenixd.run(): self.copy_file2mem( diff --git a/trunner/target/armv7a7.py b/trunner/target/armv7a7.py index 31992a7b..d1582c79 100644 --- a/trunner/target/armv7a7.py +++ b/trunner/target/armv7a7.py @@ -84,7 +84,6 @@ class IMX6ULLEvkTarget(ARMv7A7Target): block_size=0x10000, cleanmarker_size=0x10, ), - timeout=275, ) name = "armv7a7-imx6ull-evk" diff --git a/trunner/target/armv7a9.py b/trunner/target/armv7a9.py index 2e841e49..833f8e91 100644 --- a/trunner/target/armv7a9.py +++ b/trunner/target/armv7a9.py @@ -121,7 +121,6 @@ class Zynq7000ZedboardTarget(ARMv7A9Target): block_size=0x10000, cleanmarker_size=0x10, ), - timeout=140, ) name = "armv7a9-zynq7000-zedboard" diff --git a/trunner/test_runner.py b/trunner/test_runner.py index 171b7305..293ae579 100644 --- a/trunner/test_runner.py +++ b/trunner/test_runner.py @@ -29,6 +29,7 @@ def resolve_project_path(): class LogWrapper(StringIO): """Wrapper for saving all logs into StringIO and also streaming directly to stream""" + def __init__(self, stream: TextIO): super().__init__("") self.stream = stream @@ -152,7 +153,8 @@ def flash(self) -> TestResult: self.target.flash_dut() result.set_stage(TestStage.DONE) except (FlashError, HarnessError) as exc: - print(bold("ERROR WHILE FLASHING THE DEVICE")) + # the newline is needed to avoid printing exception in the same line as plo prompt + print(bold("\nERROR WHILE FLASHING THE DEVICE")) print(exc) result.fail_harness_exception(exc) @@ -185,7 +187,7 @@ def _export_results_csv(self, results: Sequence[TestResult]): fname = self.ctx.output + ".csv" - with open(fname, 'w', encoding='utf-8') as out_csv: + with open(fname, "w", encoding="utf-8") as out_csv: out_csv.write(TestResult.get_csv_header() + "\n") for res in results: out_csv.write(res.to_csv() + "\n") @@ -206,8 +208,8 @@ def _export_results_xml(self, results: Sequence[TestResult]): suite = res.to_junit_testsuite(self.ctx.target.name) suite.hostname = self.ctx.host.name if is_github_actions(): - suite.add_property('url', get_ci_url()) - suite.add_property('SHA', os.environ['GITHUB_SHA']) + suite.add_property("url", get_ci_url()) + suite.add_property("SHA", os.environ["GITHUB_SHA"]) xml.add_testsuite(suite) @@ -220,7 +222,7 @@ def _export_results_xml(self, results: Sequence[TestResult]): from lxml import etree except ImportError: from xml.etree import ElementTree as etree - with open(fname, 'wb') as out_xml: + with open(fname, "wb") as out_xml: text = etree.tostring(xml._elem) out_xml.write(text) @@ -321,10 +323,12 @@ def run(self) -> bool: sums = Counter(res.status for res in results) - print(f"TESTS: {len(results)} " - f"{green('PASSED')}: {sums.get(Status.OK, 0)} " - f"{red('FAILED')}: {sums.get(Status.FAIL, 0)} " - f"{yellow('SKIPPED')}: {sums.get(Status.SKIP, 0)}") + print( + f"TESTS: {len(results)} " + f"{green('PASSED')}: {sums.get(Status.OK, 0)} " + f"{red('FAILED')}: {sums.get(Status.FAIL, 0)} " + f"{yellow('SKIPPED')}: {sums.get(Status.SKIP, 0)}" + ) self._export_results_csv(results) self._export_results_xml(results)