diff --git a/.github/cases/blastoise/subsystem.json b/.github/cases/blastoise/subsystem.json new file mode 100644 index 000000000..16eadb3af --- /dev/null +++ b/.github/cases/blastoise/subsystem.json @@ -0,0 +1,17 @@ +{ + "rvv-vp-intrinsic-add-mlir": 291, + "rvv-vp-intrinsic-add-scalable-mlir": 448, + "hello-mlir": 72, + "stripmining-mlir": 23635, + "conv2d-less-m2-intrinsic": 27600, + "linear_normalization-intrinsic": 1, + "matmul-intrinsic": 1, + "softmax-intrinsic": 1, + "uarttest-intrinsic": 1, + "fpsmoke-asm": 1, + "memcpy-asm": 1, + "mmm-asm": 61401, + "smoke-asm": 7539, + "strlen-asm": 1, + "utf8-count-asm": 1 +} diff --git a/.github/cases/v1024-l8-b2/subsystem.json b/.github/cases/v1024-l8-b2/subsystem.json new file mode 100644 index 000000000..16eadb3af --- /dev/null +++ b/.github/cases/v1024-l8-b2/subsystem.json @@ -0,0 +1,17 @@ +{ + "rvv-vp-intrinsic-add-mlir": 291, + "rvv-vp-intrinsic-add-scalable-mlir": 448, + "hello-mlir": 72, + "stripmining-mlir": 23635, + "conv2d-less-m2-intrinsic": 27600, + "linear_normalization-intrinsic": 1, + "matmul-intrinsic": 1, + "softmax-intrinsic": 1, + "uarttest-intrinsic": 1, + "fpsmoke-asm": 1, + "memcpy-asm": 1, + "mmm-asm": 61401, + "smoke-asm": 7539, + "strlen-asm": 1, + "utf8-count-asm": 1 +} diff --git a/.github/scripts/ci.sc b/.github/scripts/ci.sc index d447cea36..09de76329 100755 --- a/.github/scripts/ci.sc +++ b/.github/scripts/ci.sc @@ -51,7 +51,8 @@ def scheduleTasks(allTasksFile: Seq[os.Path], bucketSize: Int): Seq[String] = { .toSeq }) // Initialize a list of buckets - val cargo = (0 until bucketSize).map(_ => new BucketBuffer()) + val size = bucketSize.min(allCycleData.size) + val cargo = (0 until size).map(_ => new BucketBuffer()) // _2 is the cycle number val (unProcessedData, normalData) = allCycleData.partition(_._2 <= 0) // Group tests that have cycle data into subset by their cycle size @@ -63,8 +64,8 @@ def scheduleTasks(allTasksFile: Seq[os.Path], bucketSize: Int): Seq[String] = { }) // For unprocessed data, just split them into subset that have equal size cargo.zipWithIndex.foreach { case(buffer, i) => - val startIdx = i * bucketSize - val endIdx = math.min((i + 1) * bucketSize, unProcessedData.length) + val startIdx = i * size + val endIdx = math.min((i + 1) * size, unProcessedData.length) unProcessedData.slice(startIdx, endIdx).foreach { case(name, cycle) => buffer.push_back(name, cycle) } } cargo.map(_.mkString).toSeq @@ -80,18 +81,15 @@ def toMatrixJson(buckets: Seq[String]) = // Read default tests information from '.github/cases/default.txt' file, and use that information to generate GitHub CI matrix. // The result will be printed to stdout, and should be pipe into $GITHUB_OUTPUT @main -def generateCiMatrix( - runnersAmount: Int, -) = { - val defaultCases = os.pwd / os.RelPath(".github/cases/default.txt") - println(toMatrixJson( - scheduleTasks( - os.read - .lines(defaultCases) - .map(defaultCases / os.up / os.RelPath(_)), - runnersAmount - ), - )) +def generateCiMatrix(defaultCases: String = "default.json", runnersAmount: Int) = { + println( + toMatrixJson( + scheduleTasks( + os.walk(os.pwd/".github"/"cases").filter(_.last == defaultCases), + runnersAmount + ), + ) + ) } // Resolve all the executable test and filter out unpassed tests, appending perf testcases @@ -220,7 +218,7 @@ def writeCycleUpdates(job: String, testRunDir: os.Path, resultDir: os.Path) = { // @param: resultDir output directory of the test results, default to ./test-results // @param: dontBail don't throw exception when test fail. Useful for postpr. @main -def runTests(jobs: String, resultDir: Option[os.Path], dontBail: Boolean = false) = { +def runTests(jobs: String, runTarget: String = "ip", resultDir: Option[os.Path], dontBail: Boolean = false) = { var actualResultDir = resultDir.getOrElse(os.pwd / "test-results") val testRunDir = os.pwd / "testrun" os.makeDir.all(actualResultDir / "failed-logs") @@ -229,8 +227,11 @@ def runTests(jobs: String, resultDir: Option[os.Path], dontBail: Boolean = false case(failed, (job, i)) => { val Array(config, caseName) = job.split(",") System.err.println(s"\n\n\n>>>[${i+1}/${totalJobs.length}] Running test case $config,$caseName") + val args = Seq("scripts/run-test.py", runTarget, "-c", config, "--base-out-dir", testRunDir.toString) ++ + { if (runTarget == "ip") Seq("--no-log") else Seq() } ++ + Seq(caseName) val handle = os - .proc("scripts/run-test.py", "ip", "-c", config, "--no-log", "--base-out-dir", testRunDir, caseName) + .proc(args) .call(check=false) if (handle.exitCode != 0) { val outDir = testRunDir / config / caseName @@ -238,7 +239,10 @@ def runTests(jobs: String, resultDir: Option[os.Path], dontBail: Boolean = false os.write(actualResultDir / "failed-logs" / s"$job.txt", handle.out.text) failed :+ job } else { - writeCycleUpdates(job, testRunDir, actualResultDir) + if (runTarget == "ip") { + writeCycleUpdates(job, testRunDir, actualResultDir) + } + failed } } @@ -263,14 +267,14 @@ def runTests(jobs: String, resultDir: Option[os.Path], dontBail: Boolean = false // @param: jobs A semicolon-separated list of job names of the form $config,$caseName,$runConfig // @param: output directory of the test results, default to ./test-results @main -def runFailedTests(jobs: String) = { +def runFailedTests(jobs: String, runTarget: String = "ip") = { val testRunDir = os.pwd / "testrun" val totalJobs = jobs.split(";") val failed = totalJobs.zipWithIndex.foreach { case (job, i) => { val Array(config, caseName) = job.split(",") System.err.println(s"[${i+1}/${totalJobs.length}] Running test case with trace $config,$caseName") val handle = os - .proc("scripts/run-test.py", "ip", "-c", config, "--trace", "--no-log", "--base-out-dir", testRunDir, caseName) + .proc("scripts/run-test.py", runTarget, "-c", config, "--trace", "--no-log", "--base-out-dir", testRunDir, caseName) .call(check=false) }} } diff --git a/.github/workflows/subsystem.yml b/.github/workflows/subsystem.yml new file mode 100644 index 000000000..2790c0e4a --- /dev/null +++ b/.github/workflows/subsystem.yml @@ -0,0 +1,199 @@ +name: subsystem +on: [pull_request] +env: + USER: runner + JAVA_OPTS: "-Duser.home=/run/github-runner/sequencer" + +# Cancel the current workflow when new commit pushed +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + build-emulators: + name: "Build Emulators" + runs-on: [self-hosted, linux, nixos, AMD Ryzen 9 7940HS w/ Radeon 780M Graphics] + strategy: + matrix: + config: + - squirtle + - blastoise + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: "Build verilator emulator" + run: | + nix build '.#t1.${{ matrix.config }}.subsystem.emu' -L --no-link --cores 64 + + gen-matrix: + name: "Prepare for running testcases" + needs: [build-emulators] + runs-on: [self-hosted, linux, nixos, "AMD Ryzen 9 7940HS w/ Radeon 780M Graphics"] + env: + RUNNERS: 70 + outputs: + ci-tests: ${{ steps.gen-matrix.outputs.matrix }} + steps: + # actions/checkout will use the "event" commit to checkout repository, + # which will lead to an unexpected issue that the "event" commit doesn't belongs to the repository, + # and causing the derivation build output cannot be cache correctly. + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: "Build all testcases" + run: | + # Build testcases with vlen 1024 and vlen 4096 + nix build ".#t1.bulbasaur.cases.all" --max-jobs auto -L --no-link --cores 64 + nix build ".#t1.psyduck.cases.all" --max-jobs auto -L --no-link --cores 64 + - id: gen-matrix + name: "Generate test matrix" + run: | + echo -n matrix= >> "$GITHUB_OUTPUT" + nix shell ".#ammonite" -c .github/scripts/ci.sc generateCiMatrix subsystem.json "$RUNNERS" >> "$GITHUB_OUTPUT" + + build-trace-emulators: + name: "Build trace emulator" + needs: [gen-matrix] + runs-on: [self-hosted, linux, nixos, AMD Ryzen 9 7940HS w/ Radeon 780M Graphics] + strategy: + fail-fast: false + matrix: + config: + - squirtle + - blastoise + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: "Build verilator emulator with trace" + run: nix build '.#t1.${{ matrix.config }}.subsystem.emu-trace' -L --no-link --cores 64 + + test-emit: + name: "Test elaborate" + runs-on: [self-hosted, linux, nixos] + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: "Test elaborate" + run: | + set -e + nix build '.#t1.configgen' -L --out-link ./config-gen + for cfg in $(jq -r '.[]' ./config-gen/share/all-supported-configs.json); do + echo "Building .#t1.${cfg}.subsystem.rtl" + nix build ".#t1.${cfg}.subsystem.rtl" -L + done + + run-testcases: + name: "Run testcases" + needs: [gen-matrix] + strategy: + fail-fast: false + matrix: ${{ fromJSON(needs.gen-matrix.outputs.ci-tests) }} + runs-on: [self-hosted, linux, nixos] + outputs: + result: ${{ steps.ci-run.outputs.result }} + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: "Run testcases" + id: ci-run + run: | + nix shell ".#ammonite" -c .github/scripts/ci.sc runTests \ + --runTarget subsystem \ + --jobs "${{ matrix.jobs }}" \ + --resultDir test-results-$(head -c 10 /dev/urandom | base32) + + - uses: actions/upload-artifact@v4 + if: ${{ !cancelled() }} + with: + name: test-reports-${{ matrix.id }} + path: | + test-results-*/failed-tests.md + + - uses: actions/upload-artifact@v4 + if: failure() + with: + name: failed-logs-${{ matrix.id }} + path: test-results-*/failed-logs + + gen-fail-wave-matrix: + name: "Generate matrix for re-testing failing tests" + if: ${{ !cancelled() }} + needs: [run-testcases] + runs-on: [self-hosted, linux, nixos] + outputs: + generate_wave: ${{ steps.generate-matrix.outputs.generate_wave }} + retry_tasks: ${{ steps.generate-matrix.outputs.retry_tasks }} + steps: + - uses: actions/download-artifact@v4 + with: + pattern: test-reports-* + merge-multiple: true + - id: generate-matrix + name: "Generate matrix" + run: | + touch all-fail-tests.txt + shopt -s nullglob + cat test-results-*/failed-tests.md > all-failed-tests.txt + retry=$(sed 's/\* //' all-failed-tests.txt | shuf -n3) # only retry last three failed + echo "build wave for: $retry" + if [ -n "$retry" ]; then + echo "generate_wave=true" >> "$GITHUB_OUTPUT" + echo -n "retry_tasks=" >> "$GITHUB_OUTPUT" + echo "$retry" | \ + jq -nR --indent 0 '{"include": [inputs | {"job": ., "id": (input_line_number)}]}' >> "$GITHUB_OUTPUT" + fi + + build-fail-wave: + name: "Generate wave for failing tests" + needs: [build-emulators, gen-fail-wave-matrix] + if: ${{ !cancelled() && needs.gen-fail-wave-matrix.outputs.generate_wave == 'true' }} + strategy: + fail-fast: false + matrix: ${{ fromJSON(needs.gen-fail-wave-matrix.outputs.retry_tasks) }} + runs-on: [self-hosted, linux, nixos] + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: "Run failed testcases" + run: | + nix shell ".#ammonite" -c .github/scripts/ci.sc runFailedTests --runTarget subsystem --jobs "${{ matrix.job }}" + waveFile=$(find testrun -name 'wave.fst') + hierFile=$(find testrun -name 'wave.hier') + if [[ -z "$waveFile" || -n "$hierFile" ]]; then # when emulator failed, .hier is not merged into .fst + echo "Verilator doesn't generate wave correctly" + exit 1 + fi + mv "$waveFile" ./wave-${{ matrix.job }}.fst + - uses: actions/upload-artifact@v4 + with: + name: failed-tests-wave-${{ matrix.id }} + path: 'wave-*.fst' + - run: | + echo "Test ${{ matrix.job }} run fail" + # We are just running post action for failing test, so we need to avoid the workflow finishing successfully + exit 1 + + report: + name: "Report CI result" + if: ${{ !cancelled() }} + needs: [run-testcases] + runs-on: [self-hosted, linux, nixos] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.head_ref }} + - uses: actions/download-artifact@v4 + with: + pattern: test-reports-* + merge-multiple: true + - name: "Print step summary" + run: | + echo -e "\n## Failed tests\n" >> $GITHUB_STEP_SUMMARY + shopt -s nullglob + cat test-results-*/failed-tests.md >> $GITHUB_STEP_SUMMARY diff --git a/nix/t1/testcases/make-intrinsic-case.nix b/nix/t1/testcases/make-intrinsic-case.nix index 34a28fa6e..7b7b2917f 100644 --- a/nix/t1/testcases/make-intrinsic-case.nix +++ b/nix/t1/testcases/make-intrinsic-case.nix @@ -6,6 +6,8 @@ stdenv.mkDerivation (rec { name = "${caseName}-intrinsic"; unpackPhase = '' + runHook preUnpack + if [ -z "''${srcs:-}" ]; then if [ -z "''${src:-}" ]; then echo 'variable $src or $srcs should point to the source' @@ -13,6 +15,8 @@ stdenv.mkDerivation (rec { fi srcs="$src" fi + + runHook postUnpack ''; NIX_CFLAGS_COMPILE = [ diff --git a/scripts/run-test.py b/scripts/run-test.py index 2917049d6..d44fed177 100755 --- a/scripts/run-test.py +++ b/scripts/run-test.py @@ -179,11 +179,11 @@ def load_elf_from_dir(config, cases_dir, case_name, use_individual_drv, force_x8 ) else: nix_args.append(f".#t1.{config}.{cases_attr_name}.all") - logger.info(f'Run "{" ".join(nix_args)}"') + logger.info(f'Get ELFs dir by cmd: "{" ".join(nix_args)}"') cases_dir = subprocess.check_output(nix_args).strip().decode("UTF-8") cases_dir = Path(cases_dir) - logger.info(f"Running cases in {cases_dir}") + logger.info(f"Reading ELFs in {cases_dir}") case_config_path = ( cases_dir / f"{case_name}.json" @@ -244,7 +244,7 @@ def run_test(args): "-t", f"{args.out_dir}", ] - logger.info(f'Run "{" ".join(configgen_args)}"') + logger.info(f'Get emulator config by cmd: "{" ".join(configgen_args)}"') subprocess.Popen(configgen_args).wait() assert ( elaborate_config_path.exists() @@ -319,7 +319,7 @@ def optionals(cond, items): ] ) + emu_args - logger.info(f'Run "{" ".join(process_args)}"') + logger.info(f'Run {emu_type} emulator with cmd: "{" ".join(process_args)}"') return_code = subprocess.Popen(process_args).wait() if return_code != 0: diff --git a/subsystememu/csrc/dpic.cc b/subsystememu/csrc/dpic.cc index 593358281..b8b410905 100644 --- a/subsystememu/csrc/dpic.cc +++ b/subsystememu/csrc/dpic.cc @@ -184,11 +184,13 @@ extern "C" void AXI4MMIODPI( while (uart.exist_tx()) { char c = uart.getc(); - printf("%c",c); - fflush(stdout); if (c == -1) { exit(0); } + else { + printf("%c",c); + fflush(stdout); + } } // CTRL END } diff --git a/tests/intrinsic/main.S b/tests/intrinsic/main.S index d8a4c7e94..ec151bd97 100644 --- a/tests/intrinsic/main.S +++ b/tests/intrinsic/main.S @@ -17,7 +17,7 @@ exit: .p2align 2 heap_start: - .zero 1073741824 + .zero 2048 heap_end: stack_start: diff --git a/tests/intrinsic/uarttest/default.nix b/tests/intrinsic/uarttest/default.nix new file mode 100644 index 000000000..df62ac6df --- /dev/null +++ b/tests/intrinsic/uarttest/default.nix @@ -0,0 +1,19 @@ +{ testcase-env }: +testcase-env.mkIntrinsicCase { + caseName = "uarttest"; + + srcs = [ + ./uarttest.c + ../main.S + ]; + + postUnpack = '' + mkdir -p inc + cp ${./uart.h} ./inc/uart.h + ''; + + preBuild = '' + NIX_CFLAGS_COMPILE="-Iinc $NIX_CFLAGS_COMPILE" + ''; +} + diff --git a/tests/intrinsic/uart.h b/tests/intrinsic/uarttest/uart.h similarity index 100% rename from tests/intrinsic/uart.h rename to tests/intrinsic/uarttest/uart.h diff --git a/tests/intrinsic/uarttest.c b/tests/intrinsic/uarttest/uarttest.c similarity index 100% rename from tests/intrinsic/uarttest.c rename to tests/intrinsic/uarttest/uarttest.c