Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[cosim] adjust uart addr to 0x90000000 to align with soc #442

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .github/cases/blastoise/subsystem.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"rvv-vp-intrinsic-add-mlir": 291,
"rvv-vp-intrinsic-add-scalable-mlir": 448,
"hello-mlir": 72,
"stripmining-mlir": 23635,
"conv2d-less-m2-intrinsic": 27600,
"linear_normalization-intrinsic": 1,
"matmul-intrinsic": 1,
"softmax-intrinsic": 1,
"uarttest-intrinsic": 1,
"fpsmoke-asm": 1,
"memcpy-asm": 1,
"mmm-asm": 61401,
"smoke-asm": 7539,
"strlen-asm": 1,
"utf8-count-asm": 1
}
17 changes: 17 additions & 0 deletions .github/cases/v1024-l8-b2/subsystem.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"rvv-vp-intrinsic-add-mlir": 291,
"rvv-vp-intrinsic-add-scalable-mlir": 448,
"hello-mlir": 72,
"stripmining-mlir": 23635,
"conv2d-less-m2-intrinsic": 27600,
"linear_normalization-intrinsic": 1,
"matmul-intrinsic": 1,
"softmax-intrinsic": 1,
"uarttest-intrinsic": 1,
"fpsmoke-asm": 1,
"memcpy-asm": 1,
"mmm-asm": 61401,
"smoke-asm": 7539,
"strlen-asm": 1,
"utf8-count-asm": 1
}
44 changes: 24 additions & 20 deletions .github/scripts/ci.sc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ def scheduleTasks(allTasksFile: Seq[os.Path], bucketSize: Int): Seq[String] = {
.toSeq
})
// Initialize a list of buckets
val cargo = (0 until bucketSize).map(_ => new BucketBuffer())
val size = bucketSize.min(allCycleData.size)
val cargo = (0 until size).map(_ => new BucketBuffer())
// _2 is the cycle number
val (unProcessedData, normalData) = allCycleData.partition(_._2 <= 0)
// Group tests that have cycle data into subset by their cycle size
Expand All @@ -63,8 +64,8 @@ def scheduleTasks(allTasksFile: Seq[os.Path], bucketSize: Int): Seq[String] = {
})
// For unprocessed data, just split them into subset that have equal size
cargo.zipWithIndex.foreach { case(buffer, i) =>
val startIdx = i * bucketSize
val endIdx = math.min((i + 1) * bucketSize, unProcessedData.length)
val startIdx = i * size
val endIdx = math.min((i + 1) * size, unProcessedData.length)
unProcessedData.slice(startIdx, endIdx).foreach { case(name, cycle) => buffer.push_back(name, cycle) }
}
cargo.map(_.mkString).toSeq
Expand All @@ -80,18 +81,15 @@ def toMatrixJson(buckets: Seq[String]) =
// Read default tests information from '.github/cases/default.txt' file, and use that information to generate GitHub CI matrix.
// The result will be printed to stdout, and should be pipe into $GITHUB_OUTPUT
@main
def generateCiMatrix(
runnersAmount: Int,
) = {
val defaultCases = os.pwd / os.RelPath(".github/cases/default.txt")
println(toMatrixJson(
scheduleTasks(
os.read
.lines(defaultCases)
.map(defaultCases / os.up / os.RelPath(_)),
runnersAmount
),
))
def generateCiMatrix(defaultCases: String = "default.json", runnersAmount: Int) = {
println(
toMatrixJson(
scheduleTasks(
os.walk(os.pwd/".github"/"cases").filter(_.last == defaultCases),
runnersAmount
),
)
)
}

// Resolve all the executable test and filter out unpassed tests, appending perf testcases
Expand Down Expand Up @@ -220,7 +218,7 @@ def writeCycleUpdates(job: String, testRunDir: os.Path, resultDir: os.Path) = {
// @param: resultDir output directory of the test results, default to ./test-results
// @param: dontBail don't throw exception when test fail. Useful for postpr.
@main
def runTests(jobs: String, resultDir: Option[os.Path], dontBail: Boolean = false) = {
def runTests(jobs: String, runTarget: String = "ip", resultDir: Option[os.Path], dontBail: Boolean = false) = {
var actualResultDir = resultDir.getOrElse(os.pwd / "test-results")
val testRunDir = os.pwd / "testrun"
os.makeDir.all(actualResultDir / "failed-logs")
Expand All @@ -229,16 +227,22 @@ def runTests(jobs: String, resultDir: Option[os.Path], dontBail: Boolean = false
case(failed, (job, i)) => {
val Array(config, caseName) = job.split(",")
System.err.println(s"\n\n\n>>>[${i+1}/${totalJobs.length}] Running test case $config,$caseName")
val args = Seq("scripts/run-test.py", runTarget, "-c", config, "--base-out-dir", testRunDir.toString) ++
{ if (runTarget == "ip") Seq("--no-log") else Seq() } ++
Seq(caseName)
val handle = os
.proc("scripts/run-test.py", "ip", "-c", config, "--no-log", "--base-out-dir", testRunDir, caseName)
.proc(args)
.call(check=false)
if (handle.exitCode != 0) {
val outDir = testRunDir / config / caseName
System.err.println(s"Test case $job failed")
os.write(actualResultDir / "failed-logs" / s"$job.txt", handle.out.text)
failed :+ job
} else {
writeCycleUpdates(job, testRunDir, actualResultDir)
if (runTarget == "ip") {
writeCycleUpdates(job, testRunDir, actualResultDir)
}

failed
}
}
Expand All @@ -263,14 +267,14 @@ def runTests(jobs: String, resultDir: Option[os.Path], dontBail: Boolean = false
// @param: jobs A semicolon-separated list of job names of the form $config,$caseName,$runConfig
// @param: output directory of the test results, default to ./test-results
@main
def runFailedTests(jobs: String) = {
def runFailedTests(jobs: String, runTarget: String = "ip") = {
val testRunDir = os.pwd / "testrun"
val totalJobs = jobs.split(";")
val failed = totalJobs.zipWithIndex.foreach { case (job, i) => {
val Array(config, caseName) = job.split(",")
System.err.println(s"[${i+1}/${totalJobs.length}] Running test case with trace $config,$caseName")
val handle = os
.proc("scripts/run-test.py", "ip", "-c", config, "--trace", "--no-log", "--base-out-dir", testRunDir, caseName)
.proc("scripts/run-test.py", runTarget, "-c", config, "--trace", "--no-log", "--base-out-dir", testRunDir, caseName)
.call(check=false)
}}
}
Expand Down
199 changes: 199 additions & 0 deletions .github/workflows/subsystem.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
name: subsystem
on: [pull_request]
env:
USER: runner
JAVA_OPTS: "-Duser.home=/run/github-runner/sequencer"

# Cancel the current workflow when new commit pushed
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true

jobs:
build-emulators:
name: "Build Emulators"
runs-on: [self-hosted, linux, nixos, AMD Ryzen 9 7940HS w/ Radeon 780M Graphics]
strategy:
matrix:
config:
- squirtle
- blastoise
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: "Build verilator emulator"
run: |
nix build '.#t1.${{ matrix.config }}.subsystem.emu' -L --no-link --cores 64

gen-matrix:
name: "Prepare for running testcases"
needs: [build-emulators]
runs-on: [self-hosted, linux, nixos, "AMD Ryzen 9 7940HS w/ Radeon 780M Graphics"]
env:
RUNNERS: 70
outputs:
ci-tests: ${{ steps.gen-matrix.outputs.matrix }}
steps:
# actions/checkout will use the "event" commit to checkout repository,
# which will lead to an unexpected issue that the "event" commit doesn't belongs to the repository,
# and causing the derivation build output cannot be cache correctly.
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: "Build all testcases"
run: |
# Build testcases with vlen 1024 and vlen 4096
nix build ".#t1.bulbasaur.cases.all" --max-jobs auto -L --no-link --cores 64
nix build ".#t1.psyduck.cases.all" --max-jobs auto -L --no-link --cores 64
- id: gen-matrix
name: "Generate test matrix"
run: |
echo -n matrix= >> "$GITHUB_OUTPUT"
nix shell ".#ammonite" -c .github/scripts/ci.sc generateCiMatrix subsystem.json "$RUNNERS" >> "$GITHUB_OUTPUT"

build-trace-emulators:
name: "Build trace emulator"
needs: [gen-matrix]
runs-on: [self-hosted, linux, nixos, AMD Ryzen 9 7940HS w/ Radeon 780M Graphics]
strategy:
fail-fast: false
matrix:
config:
- squirtle
- blastoise
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: "Build verilator emulator with trace"
run: nix build '.#t1.${{ matrix.config }}.subsystem.emu-trace' -L --no-link --cores 64

test-emit:
name: "Test elaborate"
runs-on: [self-hosted, linux, nixos]
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: "Test elaborate"
run: |
set -e
nix build '.#t1.configgen' -L --out-link ./config-gen
for cfg in $(jq -r '.[]' ./config-gen/share/all-supported-configs.json); do
echo "Building .#t1.${cfg}.subsystem.rtl"
nix build ".#t1.${cfg}.subsystem.rtl" -L
done

run-testcases:
name: "Run testcases"
needs: [gen-matrix]
strategy:
fail-fast: false
matrix: ${{ fromJSON(needs.gen-matrix.outputs.ci-tests) }}
runs-on: [self-hosted, linux, nixos]
outputs:
result: ${{ steps.ci-run.outputs.result }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: "Run testcases"
id: ci-run
run: |
nix shell ".#ammonite" -c .github/scripts/ci.sc runTests \
--runTarget subsystem \
--jobs "${{ matrix.jobs }}" \
--resultDir test-results-$(head -c 10 /dev/urandom | base32)

- uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
with:
name: test-reports-${{ matrix.id }}
path: |
test-results-*/failed-tests.md

- uses: actions/upload-artifact@v4
if: failure()
with:
name: failed-logs-${{ matrix.id }}
path: test-results-*/failed-logs

gen-fail-wave-matrix:
name: "Generate matrix for re-testing failing tests"
if: ${{ !cancelled() }}
needs: [run-testcases]
runs-on: [self-hosted, linux, nixos]
outputs:
generate_wave: ${{ steps.generate-matrix.outputs.generate_wave }}
retry_tasks: ${{ steps.generate-matrix.outputs.retry_tasks }}
steps:
- uses: actions/download-artifact@v4
with:
pattern: test-reports-*
merge-multiple: true
- id: generate-matrix
name: "Generate matrix"
run: |
touch all-fail-tests.txt
shopt -s nullglob
cat test-results-*/failed-tests.md > all-failed-tests.txt
retry=$(sed 's/\* //' all-failed-tests.txt | shuf -n3) # only retry last three failed
echo "build wave for: $retry"
if [ -n "$retry" ]; then
echo "generate_wave=true" >> "$GITHUB_OUTPUT"
echo -n "retry_tasks=" >> "$GITHUB_OUTPUT"
echo "$retry" | \
jq -nR --indent 0 '{"include": [inputs | {"job": ., "id": (input_line_number)}]}' >> "$GITHUB_OUTPUT"
fi

build-fail-wave:
name: "Generate wave for failing tests"
needs: [build-emulators, gen-fail-wave-matrix]
if: ${{ !cancelled() && needs.gen-fail-wave-matrix.outputs.generate_wave == 'true' }}
strategy:
fail-fast: false
matrix: ${{ fromJSON(needs.gen-fail-wave-matrix.outputs.retry_tasks) }}
runs-on: [self-hosted, linux, nixos]
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: "Run failed testcases"
run: |
nix shell ".#ammonite" -c .github/scripts/ci.sc runFailedTests --runTarget subsystem --jobs "${{ matrix.job }}"
waveFile=$(find testrun -name 'wave.fst')
hierFile=$(find testrun -name 'wave.hier')
if [[ -z "$waveFile" || -n "$hierFile" ]]; then # when emulator failed, .hier is not merged into .fst
echo "Verilator doesn't generate wave correctly"
exit 1
fi
mv "$waveFile" ./wave-${{ matrix.job }}.fst
- uses: actions/upload-artifact@v4
with:
name: failed-tests-wave-${{ matrix.id }}
path: 'wave-*.fst'
- run: |
echo "Test ${{ matrix.job }} run fail"
# We are just running post action for failing test, so we need to avoid the workflow finishing successfully
exit 1

report:
name: "Report CI result"
if: ${{ !cancelled() }}
needs: [run-testcases]
runs-on: [self-hosted, linux, nixos]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ github.head_ref }}
- uses: actions/download-artifact@v4
with:
pattern: test-reports-*
merge-multiple: true
- name: "Print step summary"
run: |
echo -e "\n## Failed tests\n" >> $GITHUB_STEP_SUMMARY
shopt -s nullglob
cat test-results-*/failed-tests.md >> $GITHUB_STEP_SUMMARY
4 changes: 4 additions & 0 deletions nix/t1/testcases/make-intrinsic-case.nix
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,17 @@ stdenv.mkDerivation (rec {
name = "${caseName}-intrinsic";

unpackPhase = ''
runHook preUnpack

if [ -z "''${srcs:-}" ]; then
if [ -z "''${src:-}" ]; then
echo 'variable $src or $srcs should point to the source'
exit 1
fi
srcs="$src"
fi

runHook postUnpack
'';

NIX_CFLAGS_COMPILE = [
Expand Down
8 changes: 4 additions & 4 deletions scripts/run-test.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,11 +179,11 @@ def load_elf_from_dir(config, cases_dir, case_name, use_individual_drv, force_x8
)
else:
nix_args.append(f".#t1.{config}.{cases_attr_name}.all")
logger.info(f'Run "{" ".join(nix_args)}"')
logger.info(f'Get ELFs dir by cmd: "{" ".join(nix_args)}"')
cases_dir = subprocess.check_output(nix_args).strip().decode("UTF-8")

cases_dir = Path(cases_dir)
logger.info(f"Running cases in {cases_dir}")
logger.info(f"Reading ELFs in {cases_dir}")

case_config_path = (
cases_dir / f"{case_name}.json"
Expand Down Expand Up @@ -244,7 +244,7 @@ def run_test(args):
"-t",
f"{args.out_dir}",
]
logger.info(f'Run "{" ".join(configgen_args)}"')
logger.info(f'Get emulator config by cmd: "{" ".join(configgen_args)}"')
subprocess.Popen(configgen_args).wait()
assert (
elaborate_config_path.exists()
Expand Down Expand Up @@ -319,7 +319,7 @@ def optionals(cond, items):
]
) + emu_args

logger.info(f'Run "{" ".join(process_args)}"')
logger.info(f'Run {emu_type} emulator with cmd: "{" ".join(process_args)}"')
return_code = subprocess.Popen(process_args).wait()

if return_code != 0:
Expand Down
Loading
Loading