diff --git a/.github/actions/api_leak_detector/action.yaml b/.github/actions/api_leak_detector/action.yaml deleted file mode 100644 index b40b06234c64b6..00000000000000 --- a/.github/actions/api_leak_detector/action.yaml +++ /dev/null @@ -1,26 +0,0 @@ -name: Run API Leak Detector -description: Runs the API leak detector -inputs: - relative_manifest_path: - description: "Path to leak manifest file." - required: false - default: "" - -runs: - using: "composite" - steps: - - name: Run Detector - shell: bash - run: | - set -x - env - MANIFEST_FLAG="" - if [ "${{ inputs.relative_manifest_path }}" != "" ]; then - MANIFEST_FLAG="--relative-manifest-path ${{ matrix.target_platform }}/${{matrix.config}}/${{ inputs.relative_manifest_path }}" - fi - if [ -z "${{matrix.sb_api_version}}"]; then - SB_API_VERSION_FLAG="" - else - SB_API_VERSION_FLAG="--sb_api_version=${{matrix.sb_api_version}}" - fi - python3 starboard/tools/api_leak_detector/api_leak_detector.py -p ${{ matrix.target_platform }} -c ${{matrix.config}} --submit-check $MANIFEST_FLAG ${SB_API_VERSION_FLAG} diff --git a/.github/actions/build/action.yaml b/.github/actions/build/action.yaml index 927ec0bec3260a..9d6c3366bd9edf 100644 --- a/.github/actions/build/action.yaml +++ b/.github/actions/build/action.yaml @@ -1,40 +1,62 @@ name: Build Cobalt description: Builds Cobalt targets +inputs: + targets: + description: "List of ninja targets for Chrobalt build." + required: true runs: using: "composite" steps: - - name: Set up Cloud SDK - if: startsWith(${{matrix.target_platform}}, 'android') - uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Set Android env vars - if: startsWith(${{matrix.target_platform}}, 'android') - run: | - echo "ANDROID_HOME=/root/starboard-toolchains/AndroidSdk/" >> $GITHUB_ENV - PROJECT_NAME=$(gcloud config get-value project) - echo "GCS_NIGHTLY_PATH=gs://${PROJECT_NAME}-build-artifacts" >> $GITHUB_ENV - shell: bash - - name: Build - run: | - set -x - env - if [ -z ${COBALT_EVERGREEN_LOADER+x} ]; then - BUILD_PLATFORM=${{ matrix.target_platform }} - BUILD_TARGET=all - if [[ "${{matrix.config}}" =~ ^(qa|gold)$ ]]; then - BUILD_TARGET=default - fi - else - BUILD_PLATFORM=${COBALT_EVERGREEN_LOADER} - BUILD_TARGET='loader_app_install elf_loader_sandbox_install native_target/crashpad_handler' - fi - # GitHub Runners have home set to /github/home. - if [ -d /root/starboard-toolchains ]; then - ln -s /root/starboard-toolchains /github/home/starboard-toolchains - fi - # Set Ninja output format - NINJA_STATUS="[%e sec | %f/%t %u remaining | %c/sec | j%r] " - ninja -C ${GITHUB_WORKSPACE}/out/${BUILD_PLATFORM}_${{matrix.config}} ${BUILD_TARGET} - shell: bash - - name: Show Sccache Stats - run: sccache -s - shell: bash + - name: Get depot tools + run: git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git + shell: bash + - name: Add directory to PATH + run: echo "$GITHUB_WORKSPACE/depot_tools" >> $GITHUB_PATH + shell: bash + - name: Generate gclient file + run: gclient config --name=src https://github.com/${{ github.repository }} + shell: bash + - name: Set target OS for Android + if: startsWith(${{matrix.platform}}, 'android') + run: | + echo "target_os=['android']" >> .gclient + gclient validate + shell: bash + - name: Gclient sync + run: gclient sync -v --shallow --no-history -r ${{ github.sha }} + shell: bash + - name: sccache check + run: sccache -s + shell: bash + - name: gn gen + run: | + cd src + chrobalt/build/gn.py -p ${{ matrix.platform }} -C ${{ matrix.config }} --no-check + shell: bash + - name: gn dump args + run: | + cd src + gn args --list --short --overrides-only out/${{ matrix.platform }}_${{ matrix.config }} + shell: bash + - name: ninja build + env: + TARGETS: ${{ inputs.targets }} + run: | + set -x + cd src + ninja -C out/${{ matrix.platform }}_${{ matrix.config }} $(echo "${TARGETS}" | tr -d '"') + shell: bash + - name: sccache check + run: sccache -s + shell: bash + - name: find apks + run: find src/out -name \*.apk -print + shell: bash + - name: Archive Android APKs + if: startsWith(${{matrix.platform}}, 'android') + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.platform }} APKs + path: | + src/out/${{ matrix.platform }}_qa/apks/*.apk + src/out/${{ matrix.platform }}_qa/*_apk/*.apk diff --git a/.github/actions/check_artifact_size/action.yaml b/.github/actions/check_artifact_size/action.yaml deleted file mode 100644 index 380351ed8b5d4c..00000000000000 --- a/.github/actions/check_artifact_size/action.yaml +++ /dev/null @@ -1,153 +0,0 @@ -name: Check Artifact Size -description: Check if the increase in artifact size exceeds the threshold, and if so, apply a label to the pull request. -inputs: - workflow: - description: "Workflow to check artifact binary size for." - required: true - name: - description: "Name of the uploaded artifact, artifact is a zip file that can contain more than one binary" - required: true - path: - description: "Path to the newly created binary artifacts being checked." - required: true - thresholds: - description: "Thresholds is a JSON-formatted string that specifies the maximum permissible percentage increase in the size of each respective binary artifact." - required: true - token: - description: "Github token needed for downloading artifacts." - required: true -runs: - using: "composite" - steps: - - name: 'Download artifact from main branch' - id: download-artifact - uses: actions/github-script@v6 - with: - github-token: ${{inputs.token}} - script: | - const fs = require('fs'); - const path = require('path'); - - // Get the latest successful workflow run on the main branch. - const workflowRuns = await github.rest.actions.listWorkflowRuns({ - owner: context.repo.owner, - repo: context.repo.repo, - workflow_id: '${{ inputs.workflow }}.yaml', - branch: 'main', - status: 'success', - per_page: 1 - }); - - const latestRun = workflowRuns.data.workflow_runs[0].id; - - // Get the artifact uploaded on the latest successful workflow run on the main branch. - let allArtifacts = []; - let page = 1; - let hasNextPage = true; - while (hasNextPage) { - const response = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: latestRun, - page: page - }); - - allArtifacts = allArtifacts.concat(response.data.artifacts); - - if (response.data.artifacts.length === 0) { - hasNextPage = false; - } else { - page++; - } - } - - const matchArtifacts = allArtifacts.filter((artifact) => { - return artifact.name == '${{ inputs.name }}'; - }); - - if (matchArtifacts.length == 1) { - console.log(`Found the latest uploaded artifact ${{ inputs.name }} on the main branch.`); - - const download = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifacts[0].id, - archive_format: 'zip', - }); - - const downloadDir = path.join(process.env.GITHUB_WORKSPACE, 'artifact_tmp'); - fs.mkdirSync(downloadDir); - fs.writeFileSync(path.join(downloadDir, `${{ inputs.name }}.zip`), Buffer.from(download.data)); - - core.setOutput("downloadDir", downloadDir); - } else { - core.setFailed(`Expected one artifact with name ${{ inputs.name }}. Found ${matchArtifacts.length}.`); - } - - - name: 'Unzip artifact from main branch' - id: unzip-downloaded-artifact - shell: bash - run: | - unzip "${{ steps.download-artifact.outputs.downloadDir }}/${{ inputs.name }}.zip" -d "${{ steps.download-artifact.outputs.downloadDir }}" - - - name: 'Check new artifact size against main branch' - id: check-artifact-size - uses: actions/github-script@v6 - with: - script: | - const fs = require('fs'); - const path = require('path'); - - const fileSizeThresholds = JSON.parse('${{ inputs.thresholds }}'); - - for (let file in fileSizeThresholds) { - console.log(`Checking file size of ${file}.`); - - const fileName = file.split('/').pop(); - const downloadFilePath = path.join('${{ steps.download-artifact.outputs.downloadDir }}', fileName); - if (!fs.existsSync(downloadFilePath)) { - core.setFailed(`File ${file} was not uploaded to the main branch.`); - } - - const filePath = path.join(process.env.GITHUB_WORKSPACE, '${{ inputs.path }}', file); - if (!fs.existsSync(filePath)) { - core.setFailed(`File ${file} was not created in the current workflow run.`); - } - - const oldStats = fs.statSync(downloadFilePath); - const oldSize = oldStats.size; - const newStats = fs.statSync(filePath); - const newSize = newStats.size; - - console.log(`Latest uploaded artifact size on the main branch is ${oldSize / 1024}kB, new artifact size generated in this PR is ${newSize / 1024}kB.`); - - const deltaSize = newSize - oldSize; - const deltaThreshold = (Math.abs(deltaSize) / oldSize * 100).toFixed(1); - - if (deltaSize < 0) { - console.log(`Artifact size is decreased by ${Math.abs(deltaSize)} (${deltaThreshold}%).`); - } else { - console.log(`Artifact size is increased by ${deltaSize} (${deltaThreshold}%).`); - const threshold = (fileSizeThresholds[file] * 100).toFixed(1); - if (deltaThreshold > threshold) { - console.error(`Artifact size increase exceeds threshold ${threshold}%.`); - core.setOutput("addLabel", true); - } - } - } - - name: 'Remove downloaded artifact' - id: remove-downloaded-artifact - shell: bash - run: rm -r "${{ steps.download-artifact.outputs.downloadDir }}" - - name: 'Add label for artifact size increase violation' - id: add-label - if: | - steps.check-artifact-size.outputs.addLabel && - github.event.pull_request.merged == true && - github.event.pull_request.merge_commit_sha != null - shell: bash - run: | - curl -s -X POST -H "Authorization: token ${{ inputs.token }}" \ - -H "Accept: application/vnd.github.v3+json" \ - -d '["artifact size increase violation"]' \ - "https://api.github.com/repos/${{ github.event.repository.full_name }}/issues/${{ github.event.number }}/labels" diff --git a/.github/actions/docker/action.yaml b/.github/actions/docker/action.yaml index d32282f5250686..3b8eecd079bdd5 100644 --- a/.github/actions/docker/action.yaml +++ b/.github/actions/docker/action.yaml @@ -4,110 +4,68 @@ inputs: docker_service: description: "Docker compose service." required: true - docker_image: - description: "Docker image name." - required: true runs: using: "composite" steps: - - name: Rename Limit - run: git config diff.renameLimit 999999 - shell: bash - - name: Get docker file changes - id: changed-files - uses: tj-actions/changed-files@2d756ea4c53f7f6b397767d8723b3a10a9f35bf2 # v44 - with: - files_ignore: third_party/** - files: | - docker-compose.yml - docker/linux/** - .github/actions/docker/** - - name: Retrieve Docker metadata - id: meta - uses: docker/metadata-action@507c2f2dc502c992ad446e3d7a5dfbe311567a96 # v4.3.0 + - name: Checkout files + uses: actions/checkout@v4 with: - images: ${{env.REGISTRY}}/${{github.repository}}/${{inputs.docker_image}} - tags: | - type=ref,event=branch - type=ref,event=tag - type=ref,event=pr - - name: Set Docker Tag - id: set-docker-tag + fetch-depth: 1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + - name: Set env vars run: | set -x - docker_tag="${{ steps.meta.outputs.tags }}" - docker_tag="${docker_tag%.1[+,-]}" - echo "DOCKER_TAG=${docker_tag}" | head -n 1 >> $GITHUB_ENV + tag=${{ github.event_name == 'pull_request' && github.event.pull_request.number || github.ref_name }} + tag=${tag//\//__} + tag="${tag%.1[+,-]}" + echo "FLOATING_TAG=${tag}" >> $GITHUB_ENV + full_git_sha=${{ github.sha }} + short_git_sha="${full_git_sha::7}" + echo "GIT_SHA=${short_git_sha}" >> $GITHUB_ENV shell: bash - # We need to set docker tag properly for pull requests. In those scenarios where no docker related files - # were changed we need to use an existing image (e.g. main). In cases where docker image is rebuilt we have - # to use tag generated by the image build. - - name: Retrieve Docker metadata for PR - id: pr-meta - uses: docker/metadata-action@507c2f2dc502c992ad446e3d7a5dfbe311567a96 # v4.3.0 - with: - images: ${{env.REGISTRY}}/${{github.repository}}/${{inputs.docker_image}} - tags: type=raw,value=${{ github.base_ref }} - name: Set Docker Tag - id: set-docker-tag-presubmit-non-fork - env: - REPO: ${{ github.repository }} - if: ${{ (steps.changed-files.outputs.any_changed == 'false') && (github.event_name == 'pull_request') }} + id: set-docker-tag-github + if: ${{ (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.full_name == github.repository) }} run: | set -x - docker_tag="${{ steps.pr-meta.outputs.tags }}" - docker_tag="${docker_tag%.1[+,-]}" - echo "DOCKER_TAG=${docker_tag}" >> $GITHUB_ENV + echo "DOCKER_TAG=ghcr.io/${{ github.repository }}/${{ inputs.docker_service }}:${GIT_SHA}" >> $GITHUB_ENV + echo "DOCKER_FLOATING_TAG=ghcr.io/${{ github.repository }}/${{ inputs.docker_service }}:${FLOATING_TAG}" >> $GITHUB_ENV shell: bash + # Handle Google Artifact Registry used for pull requests off forked repos. - name: Set up Cloud SDK + if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name != github.repository) }} uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Configure Docker auth for GCloud - shell: bash - run: | - gcloud auth configure-docker - - name: Set Docker Tag - id: set-docker-tag-presubmit-fork - env: - GITHUB_EVENT_NUMBER: ${{ github.event.number }} - if: ${{ (steps.changed-files.outputs.any_changed == 'true') && (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.fork) }} + - name: Login to GAR and set tag + if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name != github.repository) }} run: | - # Need to login to GCR to be able to push images created by fork based PR workflows. + # Need to login to GAR to be able to push images created by fork based PR workflows. PROJECT_NAME=$(gcloud config get-value project) METADATA="http://metadata.google.internal./computeMetadata/v1" SVC_ACCT="${METADATA}/instance/service-accounts/default" ACCESS_TOKEN=$(curl -H 'Metadata-Flavor: Google' ${SVC_ACCT}/token | cut -d'"' -f 4) - printf ${ACCESS_TOKEN} | docker login -u oauth2accesstoken --password-stdin https://gcr.io - echo "DOCKER_TAG=gcr.io/${PROJECT_NAME}/${{inputs.docker_image}}:pr-${GITHUB_EVENT_NUMBER}" >> $GITHUB_ENV + printf ${ACCESS_TOKEN} | docker login -u oauth2accesstoken --password-stdin https://us-central1-docker.pkg.dev + full_git_sha=${{ github.sha }} + echo "DOCKER_TAG=us-central1-docker.pkg.dev/${PROJECT_NAME}/github/${{ inputs.docker_service }}:${GIT_SHA}" >> $GITHUB_ENV + echo "DOCKER_FLOATING_TAG=us-central1-docker.pkg.dev/${PROJECT_NAME}/github/${{ inputs.docker_service }}:${FLOATING_TAG}" >> $GITHUB_ENV shell: bash - - name: Process Docker metadata - id: process-docker-metadata + # Proceed with docker pull, build, tag, and push. + - name: Docker Pull run: | - set -x - set +e - docker manifest inspect $DOCKER_TAG > /dev/null - if [[ $? -ne 0 || ${{ steps.changed-files.outputs.any_changed }} == 'true' ]]; then - echo "need_to_build=true" >> $GITHUB_ENV - else - echo "need_to_build=false" >> $GITHUB_ENV - fi + docker pull "${DOCKER_TAG}" || true + docker pull "${DOCKER_FLOATING_TAG}" || true shell: bash - - name: Build containers with Docker Compose - id: build-image - if: env.need_to_build == 'true' - env: - SERVICE: ${{inputs.docker_service}} + - name: Docker Build + run: docker compose build ${{inputs.docker_service}} shell: bash + - name: Docker Tag run: | - set -xue - DOCKER_BUILDKIT=0 docker compose -f docker-compose.yml up --build --no-start "${SERVICE}" - - name: Tag images - id: tag-images - if: env.need_to_build == 'true' - run: docker tag ${{inputs.docker_image}} $DOCKER_TAG + docker tag ghcr.io/${{ github.repository }}/${{ inputs.docker_service }}:latest ${DOCKER_TAG} + docker tag ghcr.io/${{ github.repository }}/${{ inputs.docker_service }}:latest ${DOCKER_FLOATING_TAG} shell: bash - - name: Push images - id: push-image - if: env.need_to_build == 'true' - run: docker push ${DOCKER_TAG} + - name: Docker Push + run: | + docker push ${DOCKER_TAG} + docker push ${DOCKER_FLOATING_TAG} shell: bash diff --git a/.github/actions/gn/action.yaml b/.github/actions/gn/action.yaml deleted file mode 100644 index 47b10aa66a230f..00000000000000 --- a/.github/actions/gn/action.yaml +++ /dev/null @@ -1,41 +0,0 @@ -name: GN -description: Generates and checks GN. -runs: - using: "composite" - steps: - - name: Configure Environment - shell: bash - run: | - echo "PYTHONPATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV - - name: Set up Cloud SDK - if: startsWith(${{matrix.target_platform}}, 'android') - uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Configure Android Environment - shell: bash - if: startsWith(${{matrix.target_platform}}, 'android') - run: | - echo "ANDROID_HOME=/root/starboard-toolchains/AndroidSdk/" >> $GITHUB_ENV - echo "COBALT_GRADLE_BUILD_COUNT=24" >> $GITHUB_ENV - PROJECT_NAME=$(gcloud config get-value project) - echo "GCS_NIGHTLY_PATH=gs://${PROJECT_NAME}-build-artifacts" >> $GITHUB_ENV - - name: GN - run: | - set -x - extra_arguments="${{matrix.extra_gn_arguments}}" - if [ -z ${COBALT_EVERGREEN_LOADER+x} ]; then - BUILD_PLATFORM=${{ matrix.target_platform }} - else - BUILD_PLATFORM=${COBALT_EVERGREEN_LOADER} - if [ ! -z "${{matrix.evergreen_loader_extra_gn_arguments}}" ] - then - extra_arguments="${{matrix.evergreen_loader_extra_gn_arguments}}" - fi - fi - if [ -z "${{matrix.sb_api_version}}"]; then - SB_API_VERSION_FLAG="" - else - SB_API_VERSION_FLAG="sb_api_version=${{matrix.sb_api_version}}" - fi - gn gen $GITHUB_WORKSPACE/out/${BUILD_PLATFORM}_${{matrix.config}} --args="target_platform=\"${BUILD_PLATFORM}\" ${SB_API_VERSION_FLAG} ${{matrix.target_os}} ${{matrix.target_cpu}} ${extra_arguments} is_internal_build=false build_type=\"${{matrix.config}}\"" - gn check $GITHUB_WORKSPACE/out/${BUILD_PLATFORM}_${{ matrix.config }} - shell: bash diff --git a/.github/actions/on_device_tests/action.yaml b/.github/actions/on_device_tests/action.yaml deleted file mode 100644 index 82e3c786ee64d1..00000000000000 --- a/.github/actions/on_device_tests/action.yaml +++ /dev/null @@ -1,164 +0,0 @@ -name: On Device Test -description: Runs on-device tests. - -runs: - using: "composite" - steps: - - name: Install Requirements - run: | - pip3 install grpcio==1.38.0 grpcio-tools==1.38.0 - shell: bash - - name: Generate gRPC files - run: | - python -m grpc_tools.protoc -Itools/ --python_out=tools/ --grpc_python_out=tools/ tools/on_device_tests_gateway.proto - shell: bash - - name: Set Up Cloud SDK - uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Set env vars - run: | - echo "PROJECT_NAME=$(gcloud config get-value project)" >> $GITHUB_ENV - - # Test results and logs - echo "GCS_RESULTS_PATH=gs://cobalt-unittest-storage/results/${{ matrix.name }}/${{ github.run_id }}_${{ matrix.shard }}" >> $GITHUB_ENV - - # Boot loader env - if [ "${COBALT_EVERGREEN_LOADER}" != "null" ]; then - echo "LOADER_CONFIG=${{ matrix.config }}" >> $GITHUB_ENV - echo "LOADER_PLATFORM=${COBALT_EVERGREEN_LOADER}" >> $GITHUB_ENV - fi - - # Dimension env - if [ "${{ matrix.dimension }}" != "null" ]; then - echo "DIMENSION=${{ matrix.dimension }}" >> $GITHUB_ENV - fi - - # Shard env - if [[ "${{matrix.shard}}" == 'black_box_test' || "${{matrix.shard}}" == 'evergreen_test' || "${{matrix.shard}}" == 'unit_test' ]]; then - echo "SHARD_NAME=${{ matrix.shard }}" >> $GITHUB_ENV - echo "TEST_TYPE=${{ matrix.shard }}" >> $GITHUB_ENV - else - echo "SHARD_NAME=unit_test_${{ matrix.shard }}" >> $GITHUB_ENV - echo "TEST_TYPE=unit_test" >> $GITHUB_ENV - echo "USE_SHARDING=1" >> $GITHUB_ENV - fi - shell: bash - - name: Run ${{ env.SHARD_NAME }} Tests on ${{ matrix.platform }} Platform - env: - GCS_PATH: gs://${{ env.PROJECT_NAME }}-test-artifacts/${{ github.workflow }}/${{ github.run_number }}/${{ matrix.platform }}_${{ matrix.config }} - GITHUB_SHA: ${{ github.sha }} - GITHUB_TOKEN: ${{ github.token }} - GITHUB_PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} - GITHUB_EVENT_NAME: ${{ github.event_name }} - GITHUB_ACTOR: ${{ github.actor }} - GITHUB_TRIGGERING_ACTOR: ${{ github.triggering_actor }} - GITHUB_ACTOR_ID: ${{ github.actor_id }} - GITHUB_REPO: ${{ github.repository }} - GITHUB_PR_HEAD_USER_LOGIN: ${{ github.event.pull_request.head.user.login }} - GITHUB_PR_HEAD_USER_ID: ${{ github.event.pull_request.head.user.id }} - GITHUB_COMMIT_AUTHOR_USERNAME: ${{ github.event.commits[0].author.username }} - GITHUB_COMMIT_AUTHOR_EMAIL: ${{ github.event.commits[0].author.email }} - GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} - GITHUB_RUN_NUMBER: ${{ github.run_number }} - GITHUB_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - GITHUB_WORKFLOW: ${{ github.workflow }} - run: | - set -uxe - python3 -u tools/on_device_tests_gateway_client.py \ - --token ${GITHUB_TOKEN} \ - --change_id ${GITHUB_PR_NUMBER:-postsubmit} \ - trigger \ - --test_type ${TEST_TYPE} \ - --platform ${{ matrix.target_platform }} \ - --config ${{ matrix.config }} \ - --tag cobalt_github_${GITHUB_EVENT_NAME} \ - --builder_name github_${{ matrix.platform }}_tests \ - --build_number ${GITHUB_RUN_NUMBER} \ - --builder_url ${GITHUB_RUN_URL} \ - ${LOADER_PLATFORM:+"--loader_config" "$LOADER_CONFIG"} \ - ${LOADER_PLATFORM:+"--loader_platform" "$LOADER_PLATFORM"} \ - ${DIMENSION:+"--dimension" "$DIMENSION"} \ - ${USE_SHARDING:+"--unittest_shard_index" "${{ matrix.shard }}"} \ - ${ON_DEVICE_TEST_ATTEMPTS:+"--test_attempts" "$ON_DEVICE_TEST_ATTEMPTS"} \ - --archive_path "${GCS_PATH}/artifacts.tar" \ - --gcs_result_path "${GCS_RESULTS_PATH}" \ - --label github \ - --label ${GITHUB_EVENT_NAME} \ - --label ${GITHUB_WORKFLOW} \ - --label actor-${GITHUB_ACTOR} \ - --label actor_id-${GITHUB_ACTOR_ID} \ - --label triggering_actor-${GITHUB_TRIGGERING_ACTOR} \ - --label sha-${GITHUB_SHA} \ - --label repository-${GITHUB_REPO} \ - --label author-${GITHUB_PR_HEAD_USER_LOGIN:-$GITHUB_COMMIT_AUTHOR_USERNAME} \ - --label author_id-${GITHUB_PR_HEAD_USER_ID:-$GITHUB_COMMIT_AUTHOR_EMAIL} - shell: bash - - name: Download ${{ matrix.platform }} Test Results - if: always() && env.TEST_TYPE == 'unit_test' - run: | - # Don't break on error (-e), some commands are expected to fail. - set -ux - - COBALT_LOGS_DIR="${GITHUB_WORKSPACE}/cobalt_logs" - UNIT_TEST_RESULT_PATH="${GITHUB_WORKSPACE}/unit-test-results" - COBALT_XMLS_FILENAME="cobalt_xmls.zip" - - # Forward environment variables for uploading artifacts in later steps. - echo "UNIT_TEST_RESULT_PATH=${UNIT_TEST_RESULT_PATH}" >> $GITHUB_ENV - echo "COBALT_LOGS_DIR=${COBALT_LOGS_DIR}" >> $GITHUB_ENV - - mkdir -p "${GITHUB_WORKSPACE}/test_results" - cd "${GITHUB_WORKSPACE}/test_results" - - i=0 - # Try downloading the results for 6x 10 seconds before giving up. - while [ $i -lt 6 ]; do - # The results are uploaded after the test has completed. - sleep 10 - - # The log files are named by the device lab test driver. - COBALT_ERROR_LOG="webDriverTestLog.ERROR" - COBALT_INFO_LOG="webDriverTestLog.INFO" - - # This command will fail until the results have been uploaded. - gsutil cp "${GCS_RESULTS_PATH}/${COBALT_ERROR_LOG}" . - gsutil cp "${GCS_RESULTS_PATH}/${COBALT_INFO_LOG}" . - gsutil cp "${GCS_RESULTS_PATH}/${COBALT_XMLS_FILENAME}" . - - # Break if all files were downloaded. - if [[ -f "${COBALT_XMLS_FILENAME}" && -f "${COBALT_ERROR_LOG}" && -f "${COBALT_INFO_LOG}" ]]; then - break - fi - - i=$(( ${i} + 1 )) - done - - # Print device logs. Might fail if the log file failed to download. - cat ${COBALT_ERROR_LOG} - - # Rename log files for archiving to not expose legacy weirdness. - mkdir -p "${COBALT_LOGS_DIR}/${{ matrix.platform }}/" - mv "${COBALT_ERROR_LOG}" "${COBALT_LOGS_DIR}/${{ matrix.platform }}/stderr_${{ matrix.shard }}.log" - mv "${COBALT_INFO_LOG}" "${COBALT_LOGS_DIR}/${{ matrix.platform }}/stdout_${{ matrix.shard }}.log" - - # Prepare unit test results for DataDog upload. - RESULT_PATH=${UNIT_TEST_RESULT_PATH}/${{ matrix.platform }}/${{ matrix.shard }}/ - mkdir -p ${RESULT_PATH} - - # Set tags for test differentiation. - tags="platform:${{ matrix.platform }}" - echo $tags > ${UNIT_TEST_RESULT_PATH}/${{ matrix.platform }}/TAGS - - unzip ${COBALT_XMLS_FILENAME} -d ${RESULT_PATH} - shell: bash - - name: Archive Unit Test Logs - uses: actions/upload-artifact@v3 - if: always() && env.TEST_TYPE == 'unit_test' - with: - name: Device logs - path: ${{ env.COBALT_LOGS_DIR }}/ - - name: Archive Unit Test Results - uses: actions/upload-artifact@v3 - if: always() && env.TEST_TYPE == 'unit_test' - with: - name: unit-test-results - path: ${{ env.UNIT_TEST_RESULT_PATH }}/ diff --git a/.github/actions/on_host_test/action.yaml b/.github/actions/on_host_test/action.yaml deleted file mode 100644 index 2276acdc37ed3e..00000000000000 --- a/.github/actions/on_host_test/action.yaml +++ /dev/null @@ -1,122 +0,0 @@ -name: On Host Test -description: Runs on-host tests. -inputs: - os: - description: "Host OS (either linux or windows)." - required: true -runs: - using: "composite" - steps: - - name: Set up Cloud SDK - uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Configure Environment - id: configure-environment - shell: bash - run: | - set -x - if [ "${{inputs.os}}" == 'linux' ] - then - echo "ARCHIVE_EXTENSION=tar.xz" >> $GITHUB_ENV - elif [ "${{inputs.os}}" == 'windows' ] - then - echo "ARCHIVE_EXTENSION=tar.gz" >> $GITHUB_ENV - fi - - name: Download Archive - shell: bash - env: - WORKFLOW: ${{ github.workflow }} - run: | - set -x - PROJECT_NAME=$(gcloud config get-value project) - gsutil cp gs://${PROJECT_NAME}-test-artifacts/${WORKFLOW}/${GITHUB_RUN_NUMBER}/${{matrix.platform}}_${{matrix.config}}/${{matrix.platform}}_${{matrix.config}}.${ARCHIVE_EXTENSION} ${GITHUB_WORKSPACE}/out/tmp/${{matrix.platform}}_${{matrix.config}}.${ARCHIVE_EXTENSION} - - name: Extract Archive - shell: bash - run: | - set -x - parallel= - if [[ "${{inputs.os}}" == 'linux' ]]; then - parallel="--parallel" - fi - python3 ${GITHUB_WORKSPACE}/tools/create_archive.py -x -s ${GITHUB_WORKSPACE}/out/tmp/${{matrix.platform}}_${{matrix.config}}.${ARCHIVE_EXTENSION} -d ${GITHUB_WORKSPACE}/out ${parallel} - rm -rf ${GITHUB_WORKSPACE}/out/tmp - - name: Download Evergreen loader Archive - if: ${{ env.COBALT_EVERGREEN_LOADER != null && env.COBALT_EVERGREEN_LOADER != 'null' }} - shell: bash - env: - WORKFLOW: ${{ github.workflow }} - run: | - set -x - PROJECT_NAME=$(gcloud config get-value project) - gsutil cp gs://${PROJECT_NAME}-test-artifacts/${WORKFLOW}/${GITHUB_RUN_NUMBER}/${{matrix.platform}}_${{matrix.config}}/${COBALT_EVERGREEN_LOADER}_${{matrix.config}}.${ARCHIVE_EXTENSION} ${GITHUB_WORKSPACE}/out/tmp/${COBALT_EVERGREEN_LOADER}_${{matrix.config}}.${ARCHIVE_EXTENSION} - - name: Extract Evergreen loader Archive - if: ${{ env.COBALT_EVERGREEN_LOADER != null && env.COBALT_EVERGREEN_LOADER != 'null' }} - shell: bash - run: | - set -x - python3 ${GITHUB_WORKSPACE}/tools/create_archive.py -x -s ${GITHUB_WORKSPACE}/out/tmp/${COBALT_EVERGREEN_LOADER}_${{matrix.config}}.${ARCHIVE_EXTENSION} -d ${GITHUB_WORKSPACE}/out --parallel - rm -rf ${GITHUB_WORKSPACE}/out/tmp - - name: Set Env Variables - shell: bash - run: | - echo "PYTHONPATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV - echo "TEST_RESULTS_DIR=${GITHUB_WORKSPACE}/unit-test-results" >> $GITHUB_ENV - echo "XML_FILES_DIR=${GITHUB_WORKSPACE}/unit-test-results/${{ matrix.platform }}/${{ matrix.shard }}" >> $GITHUB_ENV - echo "COVERAGE_DIR=${GITHUB_WORKSPACE}/coverage" >> $GITHUB_ENV - - name: Run Tests - id: run-tests - shell: bash - run: | - set -x - - # Starboard toolchains are downloaded to a different dir on github. Create a symlink to reassure our tooling that everything is fine. - if [ -d /root/starboard-toolchains ]; then - ln -s /root/starboard-toolchains /github/home/starboard-toolchains - fi - loader_args='' - if [ "${COBALT_EVERGREEN_LOADER}" != "null" ]; then - loader_args="--loader_platform ${COBALT_EVERGREEN_LOADER} --loader_config ${{matrix.config}}" - fi - if [[ "${{matrix.shard}}" == 'integration' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/black_box_tests/black_box_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} - elif [[ "${{matrix.shard}}" == 'blackbox' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/black_box_tests/black_box_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} --test_set blackbox - elif [[ "${{matrix.shard}}" == 'wpt' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/black_box_tests/black_box_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} --test_set wpt - elif [[ "${{matrix.shard}}" == 'evergreen' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/evergreen_tests/evergreen_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} --no-can_mount_tmpfs - elif [[ "${{matrix.shard}}" == 'coverage' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -r ${loader_args} --xml_output_dir=${XML_FILES_DIR} --coverage_dir=${COVERAGE_DIR} --coverage_report - else - # Set 'test_type' output to unit_tests for the upload steps below. - echo "test_type=unit_tests" >> $GITHUB_OUTPUT - if [[ "${{inputs.os}}" == 'windows' ]]; then - python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -s ${{matrix.shard}} -r --xml_output_dir=${XML_FILES_DIR} - else - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -s ${{matrix.shard}} -r ${loader_args} --xml_output_dir=${XML_FILES_DIR} - fi - fi - - name: Populate TAGS for unit test report - if: always() && steps.run-tests.outputs.test_type == 'unit_tests' - shell: bash - run: | - # Set tags for test differentiation. - tags="platform:${{ matrix.platform }}" - tags="${tags},os:${{ inputs.os }}" - echo $tags > ${TEST_RESULTS_DIR}/${{ matrix.platform }}/TAGS - - name: Archive unit test results - uses: actions/upload-artifact@v3 - if: always() && steps.run-tests.outputs.test_type == 'unit_tests' - with: - name: unit-test-results - path: ${{env.TEST_RESULTS_DIR}}/ - - name: Archive coverage html report - if: success() && matrix.shard == 'coverage' - uses: actions/upload-artifact@v3 - with: - name: coverage-report - path: ${{env.COVERAGE_DIR}}/html - - name: Upload to Codecov - if: success() && matrix.shard == 'coverage' - uses: codecov/codecov-action@v3 - with: - files: ${{env.COVERAGE_DIR}}/report.txt diff --git a/.github/actions/pre_commit/action.yaml b/.github/actions/pre_commit/action.yaml index 63402a000390e9..376b31ffc10134 100644 --- a/.github/actions/pre_commit/action.yaml +++ b/.github/actions/pre_commit/action.yaml @@ -7,11 +7,13 @@ inputs: runs: using: "composite" steps: - - run: pre-commit run --show-diff-on-failure --color=always --from-ref ${{ inputs.base_ref }} --to-ref HEAD + - run: python -m pip install pre-commit shell: bash - env: - SKIP: 'run-py2-tests' - - run: pre-commit run --show-diff-on-failure --color=always --hook-stage push --from-ref ${{ inputs.base_ref }} --to-ref HEAD + - run: python -m pip freeze --local + shell: bash + - uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: pre-commit-3|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }} + - run: pre-commit run --show-diff-on-failure --color=always --all-files shell: bash - env: - SKIP: 'test-download-from-gcs-helper,check-bug-in-commit-message,check-if-starboard-interface-changed' diff --git a/.github/actions/upload_nightly_artifacts/action.yaml b/.github/actions/upload_nightly_artifacts/action.yaml deleted file mode 100644 index 345e06b4938ed3..00000000000000 --- a/.github/actions/upload_nightly_artifacts/action.yaml +++ /dev/null @@ -1,39 +0,0 @@ -name: Upload Nightly Artifacts -description: Archives and uploads nightly artifacts to GCS bucket. -runs: - using: "composite" - steps: - - name: Set up Cloud SDK - uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Set env vars - env: - WORKFLOW: ${{github.workflow}} - run: | - echo "ARCHIVE_FILE=cobalt-${{matrix.platform}}_${{matrix.config}}.tar.gz" >> $GITHUB_ENV - echo "ARCHIVE_PATH=$GITHUB_WORKSPACE/cobalt-${{matrix.platform}}_${{matrix.config}}.tar.gz" >> $GITHUB_ENV - echo "PROJECT_NAME=$(gcloud config get-value project)" >> $GITHUB_ENV - echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV - echo "GITHUB_RUN_NUMBER=${GITHUB_RUN_NUMBER}" >> $GITHUB_ENV - echo "WORKFLOW=${WORKFLOW}" >> $GITHUB_ENV - echo "PYTHONPATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV - shell: bash - - name: Copy Out Folder - run: | - # Clean up. - [ -d "${GITHUB_WORKSPACE}/out/upload_out" ] && rm -rf "${GITHUB_WORKSPACE}/out/upload_out" - [ -f "${ARCHIVE_FILE}" ] && rm -rf "${ARCHIVE_FILE}" - # Create an archive. - python3 $GITHUB_WORKSPACE/tools/copy_and_filter_out_dir.py -d $GITHUB_WORKSPACE/out/upload_out/${{matrix.target_platform}}_${{matrix.config}} -s $GITHUB_WORKSPACE/out/${{matrix.target_platform}}_${{matrix.config}} - shell: bash - - name: Create Archive - run: | - set -x - cd "$GITHUB_WORKSPACE" - python3 $GITHUB_WORKSPACE/tools/create_archive.py --intermediate -d ${{env.ARCHIVE_FILE}} -s out/upload_out - shell: bash - - name: Upload Archive - id: upload-archive - shell: bash - run: | - set -uex - gsutil -d cp "${ARCHIVE_PATH}" "gs://${PROJECT_NAME}-build-artifacts/${WORKFLOW}/${TODAY}/${GITHUB_RUN_NUMBER}/" diff --git a/.github/actions/upload_test_artifacts/action.yaml b/.github/actions/upload_test_artifacts/action.yaml deleted file mode 100644 index 33eaa063fc0f3c..00000000000000 --- a/.github/actions/upload_test_artifacts/action.yaml +++ /dev/null @@ -1,78 +0,0 @@ -name: Test Artifact Upload -description: Uploads test archives to GCS and runs on-device tests. -inputs: - type: - description: "Type of artifacts to upload (ondevice or onhost)" - required: true - os: - description: "Host OS (either linux or windows)." - required: true -runs: - using: "composite" - steps: - - name: Set up Cloud SDK - uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Configure Environment - env: - WORKFLOW: ${{ github.workflow }} - run: | - set -x - project_name=$(gcloud config get-value project) - if [ -z ${COBALT_EVERGREEN_LOADER+x} ] - then - PLATFORM=${{matrix.platform}} - echo "TARGET_PLATFORM=${{matrix.target_platform}}" >> $GITHUB_ENV - else - PLATFORM=${COBALT_EVERGREEN_LOADER} - echo "TARGET_PLATFORM=${COBALT_EVERGREEN_LOADER}" >> $GITHUB_ENV - fi - - if [ "${{ inputs.type }}" == 'ondevice' ] - then - echo "ARCHIVE_FILE=artifacts.tar" >> $GITHUB_ENV - echo "ARCHIVE_PATH=$GITHUB_WORKSPACE/artifacts.tar" >> $GITHUB_ENV - echo "DESTINATION=${project_name}-test-artifacts/${WORKFLOW}/${GITHUB_RUN_NUMBER}/${{matrix.platform}}_${{matrix.config}}/" >> $GITHUB_ENV - elif [ "${{ inputs.type }}" == 'onhost' ] - then - if [ "${{ inputs.os }}" == 'linux' ] - then - echo "ARCHIVE_FILE=${PLATFORM}_${{matrix.config}}.tar.xz" >> $GITHUB_ENV - echo "ARCHIVE_PATH=$GITHUB_WORKSPACE/${PLATFORM}_${{matrix.config}}.tar.xz" >> $GITHUB_ENV - elif [ "${{ inputs.os }}" == 'windows' ] - then - echo "ARCHIVE_FILE=${PLATFORM}_${{matrix.config}}.tar.gz" >> $GITHUB_ENV - echo "ARCHIVE_PATH=$GITHUB_WORKSPACE/${PLATFORM}_${{matrix.config}}.tar.gz" >> $GITHUB_ENV - fi - echo "DESTINATION=${project_name}-test-artifacts/${WORKFLOW}/${GITHUB_RUN_NUMBER}/${{matrix.platform}}_${{matrix.config}}/" >> $GITHUB_ENV - fi - - echo "PYTHONPATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV - project_name=$(gcloud config get-value project) - shell: bash - - name: Create Test Files Archive - run: | - set -x - [ -f "${ARCHIVE_PATH}" ] && rm -rf "${ARCHIVE_PATH}" - if [ "${{ inputs.type }}" == 'ondevice' ] - then - outdir="$GITHUB_WORKSPACE/out/${{matrix.target_platform}}_${{matrix.config}}" - if [ -n "${COBALT_EVERGREEN_LOADER}" ] - then - outdir="${outdir} $GITHUB_WORKSPACE/out/${COBALT_EVERGREEN_LOADER}_${{matrix.config}}" - fi - python3 $GITHUB_WORKSPACE/tools/create_archive.py --test_infra -d ${{env.ARCHIVE_FILE}} -s ${outdir} - elif [ "${{ inputs.type }}" == 'onhost' ] - then - parallel= - if [[ "${{inputs.os}}" == 'linux' ]]; then - parallel='--parallel' - fi - python3 $GITHUB_WORKSPACE/tools/create_archive.py --intermediate -d ${{env.ARCHIVE_FILE}} -s $GITHUB_WORKSPACE/out/${TARGET_PLATFORM}_${{matrix.config}} $parallel - fi - shell: bash - - name: Copy Test Files to GCS - id: upload-test-archive - shell: bash - run: | - set -eux - gsutil -d cp "${ARCHIVE_PATH}" "gs://${DESTINATION}" diff --git a/.github/codecov.yml b/.github/codecov.yml deleted file mode 100644 index 236a6f1da932b7..00000000000000 --- a/.github/codecov.yml +++ /dev/null @@ -1,5 +0,0 @@ -coverage: - status: - project: - default: - threshold: 1% diff --git a/.github/config/android-arm.json b/.github/config/android-arm.json index 9ca34bb9050966..a6a2067b5af0bd 100644 --- a/.github/config/android-arm.json +++ b/.github/config/android-arm.json @@ -1,25 +1,27 @@ + { - "docker_service": "build-android", - "on_device_test": { - "enabled": true, - "tests": [ - "0", - "1", - "2", - "3", - "black_box_test" + "docker_service": "linux", + "platforms": [ + "android-arm" + ], + "targets": [ + "base_unittests", + "sql_unittests", + "net_unittests", + "url_unittests", + "ipc_tests", + "mojo_unittests", + "gpu_unittests", + "gin_unittests", + "blink_unittests", + "content_shell", + "system_webview_apk", + "system_webview_shell_apk" + ], + "includes": [ + { + "name":"arm", + "platform":"android-arm" + } ] - }, - "platforms": [ - "android-arm" - ], - "includes": [ - { - "name":"arm", - "platform":"android-arm", - "target_platform":"android-arm", - "target_cpu":"target_cpu=\\\"arm\\\"", - "target_os":"target_os=\\\"android\\\"" - } - ] } diff --git a/.github/config/android-arm64.json b/.github/config/android-arm64.json index 92f2e4a7e01125..de5836747ac313 100644 --- a/.github/config/android-arm64.json +++ b/.github/config/android-arm64.json @@ -1,25 +1,26 @@ { - "docker_service": "build-android", - "on_device_test": { - "enabled": true, - "tests": [ - "0", - "1", - "2", - "3", - "black_box_test" + "docker_service": "linux", + "platforms": [ + "android-arm64" + ], + "targets": [ + "base_unittests", + "sql_unittests", + "net_unittests", + "url_unittests", + "ipc_tests", + "mojo_unittests", + "gpu_unittests", + "gin_unittests", + "blink_unittests", + "content_shell", + "system_webview_apk", + "system_webview_shell_apk" + ], + "includes": [ + { + "name":"arm64", + "platform":"android-arm64" + } ] - }, - "platforms": [ - "android-arm64" - ], - "includes": [ - { - "name":"arm64", - "platform":"android-arm64", - "target_platform":"android-arm64", - "target_cpu":"target_cpu=\\\"arm64\\\"", - "target_os": "target_os=\\\"android\\\"" - } - ] } diff --git a/.github/config/android-x86.json b/.github/config/android-x86.json index b22d6bc661c0d1..932c9cd364e8ef 100644 --- a/.github/config/android-x86.json +++ b/.github/config/android-x86.json @@ -1,15 +1,26 @@ { - "docker_service": "build-android", - "platforms": [ - "android-x86" - ], - "includes": [ - { - "name":"x86", - "platform":"android-x86", - "target_platform":"android-x86", - "target_cpu":"target_cpu=\\\"x86\\\"", - "target_os": "target_os=\\\"android\\\"" - } - ] + "docker_service": "linux", + "platforms": [ + "android-x86" + ], + "targets": [ + "base_unittests", + "sql_unittests", + "net_unittests", + "url_unittests", + "ipc_tests", + "mojo_unittests", + "gpu_unittests", + "gin_unittests", + "blink_unittests", + "content_shell", + "system_webview_apk", + "system_webview_shell_apk" + ], + "includes": [ + { + "name":"x86", + "platform":"android-x86" + } + ] } diff --git a/.github/config/evergreen-arm-hardfp.json b/.github/config/evergreen-arm-hardfp.json deleted file mode 100644 index 1be38f06260df3..00000000000000 --- a/.github/config/evergreen-arm-hardfp.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "docker_service": "build-raspi", - "evergreen_loader": "raspi-2", - "on_device_test": { - "enabled": true, - "tests": [ - "evergreen_test", - "0", - "1", - "2", - "3" - ] - }, - "platforms": [ - "evergreen-arm-hardfp", - "evergreen-arm-hardfp-sbversion-17" - ], - "includes": [ - { - "name":"hardfp", - "platform":"evergreen-arm-hardfp", - "target_platform":"evergreen-arm-hardfp", - "target_cpu":"target_cpu=\\\"arm\\\"", - "extra_gn_arguments":"use_asan=false", - "evergreen_loader_extra_gn_arguments": "use_asan=false is_clang=false", - "dimension": "release_version=regex:10.*" - }, - { - "name":"sbversion-17", - "platform":"evergreen-arm-hardfp-sbversion-17", - "target_platform":"evergreen-arm-hardfp", - "target_cpu":"target_cpu=\\\"arm\\\"", - "extra_gn_arguments":"use_asan=false", - "evergreen_loader_extra_gn_arguments":"use_asan=false is_clang=false", - "sb_api_version": "17" - } - ] -} diff --git a/.github/config/evergreen-arm-softfp.json b/.github/config/evergreen-arm-softfp.json deleted file mode 100644 index 104809b1983232..00000000000000 --- a/.github/config/evergreen-arm-softfp.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "docker_service": "build-android-evergreen", - "evergreen_loader": "android-arm", - "on_device_test": { - "enabled": false, - "tests": [ - "evergreen_test", - "0", - "1", - "2", - "3" - ] - }, - "platforms": [ - "evergreen-arm-softfp", - "evergreen-arm-softfp-sbversion-17" - ], - "includes": [ - { - "name":"softfp", - "platform":"evergreen-arm-softfp", - "target_platform":"evergreen-arm-softfp", - "target_cpu":"target_cpu=\\\"arm\\\"", - "extra_gn_arguments":"use_asan=false", - "evergreen_loader_extra_gn_arguments": "target_os=\\\"android\\\" sb_is_evergreen_compatible=true" - }, - { - "name":"sbversion-17", - "platform":"evergreen-arm-softfp-sbversion-17", - "target_platform":"evergreen-arm-softfp", - "target_cpu":"target_cpu=\\\"arm\\\"", - "extra_gn_arguments":"use_asan=false", - "sb_api_version":"17", - "evergreen_loader_extra_gn_arguments": "target_os=\\\"android\\\" sb_is_evergreen_compatible=true" - } - ] -} diff --git a/.github/config/evergreen-arm64.json b/.github/config/evergreen-arm64.json deleted file mode 100644 index e11fb6f41055c5..00000000000000 --- a/.github/config/evergreen-arm64.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "docker_service": "build-android-evergreen", - "evergreen_loader": "android-arm64", - "platforms": [ - "evergreen-arm64", - "evergreen-arm64-sbversion-17" - ], - "includes": [ - { - "name":"arm64", - "platform":"evergreen-arm64", - "target_platform":"evergreen-arm64", - "target_cpu":"target_cpu=\\\"arm64\\\"", - "extra_gn_arguments":"use_asan=false", - "evergreen_loader_extra_gn_arguments": "target_os=\\\"android\\\" sb_is_evergreen_compatible=true" - }, - { - "name":"sbversion-17", - "platform":"evergreen-arm64-sbversion-17", - "target_platform":"evergreen-arm64", - "target_cpu":"target_cpu=\\\"arm64\\\"", - "extra_gn_arguments":"use_asan=false", - "sb_api_version":"17", - "evergreen_loader_extra_gn_arguments": "target_os=\\\"android\\\" sb_is_evergreen_compatible=true" - } - ] -} diff --git a/.github/config/evergreen-x64.json b/.github/config/evergreen-x64.json deleted file mode 100644 index e8b3bb2bc3923b..00000000000000 --- a/.github/config/evergreen-x64.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "docker_service": "build-linux-evergreen", - "on_host_test": true, - "evergreen_loader": "linux-x64x11", - "on_host_test_shards": ["0", "1", "2", "3", "blackbox", "wpt", "evergreen"], - "platforms": [ - "evergreen-x64", - "evergreen-x64-sbversion-17" - ], - "includes": [ - { - "name":"x64", - "platform":"evergreen-x64", - "target_platform":"evergreen-x64", - "target_cpu":"target_cpu=\\\"x64\\\"", - "extra_gn_arguments":"use_asan=false" - }, - { - "name":"sbversion-17", - "platform":"evergreen-x64-sbversion-17", - "target_platform":"evergreen-x64", - "target_cpu":"target_cpu=\\\"x64\\\"", - "extra_gn_arguments":"use_asan=false", - "sb_api_version":"17" - } - ] -} diff --git a/.github/config/linux-clang-3-9.json b/.github/config/linux-clang-3-9.json deleted file mode 100644 index f994506e235205..00000000000000 --- a/.github/config/linux-clang-3-9.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "docker_service": "build-linux-clang-3-9", - "on_host_test": true, - "on_host_test_shards": ["0", "1", "2", "3", "blackbox", "wpt"], - "platforms": [ - "linux-x64x11-clang-3-9" - ], - "includes": [ - { - "name":"clang-3-9", - "platform":"linux-x64x11-clang-3-9", - "target_platform":"linux-x64x11-clang-3-9", - "extra_gn_arguments":"using_old_compiler=true build_with_separate_cobalt_toolchain=true" - } - ] -} diff --git a/.github/config/linux-coverage.json b/.github/config/linux-coverage.json deleted file mode 100644 index f4c90068421ad2..00000000000000 --- a/.github/config/linux-coverage.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "docker_service": "build-linux", - "on_host_test": true, - "on_host_test_shards": ["coverage"], - "platforms": [ - "linux-coverage" - ], - "includes": [ - { - "name":"linux", - "platform":"linux-coverage", - "target_platform":"linux-x64x11", - "extra_gn_arguments":"use_clang_coverage=true" - } - ] -} diff --git a/.github/config/linux-gcc-6-3.json b/.github/config/linux-gcc-6-3.json deleted file mode 100644 index 4223ca8f2d89bb..00000000000000 --- a/.github/config/linux-gcc-6-3.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "docker_service": "build-linux-gcc", - "on_host_test": true, - "on_host_test_shards": ["0", "1", "2", "3", "blackbox", "wpt"], - "platforms": [ - "linux-x64x11-gcc-6-3" - ], - "includes": [ - { - "name":"gcc-6-3", - "platform":"linux-x64x11-gcc-6-3", - "target_platform":"linux-x64x11-gcc-6-3", - "extra_gn_arguments":"using_old_compiler=true build_with_separate_cobalt_toolchain=true" - } - ] -} diff --git a/.github/config/linux-modular.json b/.github/config/linux-modular.json deleted file mode 100644 index e250d62a5868ad..00000000000000 --- a/.github/config/linux-modular.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "docker_service": "build-linux", - "on_host_test": true, - "on_host_test_shards": [ - "0", - "1", - "2", - "3", - "blackbox", - "wpt" - ], - "platforms": [ - "linux-x64x11-modular" - ], - "includes": [ - { - "name":"modular", - "platform":"linux-x64x11-modular", - "target_platform":"linux-x64x11", - "extra_gn_arguments":"build_with_separate_cobalt_toolchain=true use_contrib_cast=true concurrent_links=8" - } - ] -} diff --git a/.github/config/linux.json b/.github/config/linux.json index 3c2b325eaa4678..2d02d498bb97fa 100644 --- a/.github/config/linux.json +++ b/.github/config/linux.json @@ -1,35 +1,24 @@ { - "docker_service": "build-linux", - "evergreen_loader": "linux-x64x11", - "on_host_test": true, - "on_host_test_shards": ["0", "1", "2", "3", "blackbox", "wpt"], - "platforms": [ - "linux-x64x11", - "linux-x64x11-egl", - "linux-x64x11-skia", - "linux-x64x11-sbversion-17" - ], - "includes": [ - { - "name":"x64", - "platform":"linux-x64x11", - "target_platform":"linux-x64x11" - }, - { - "name":"egl", - "platform":"linux-x64x11-egl", - "target_platform":"linux-x64x11-egl" - }, - { - "name":"skia", - "platform":"linux-x64x11-skia", - "target_platform":"linux-x64x11-skia" - }, - { - "name":"sbversion-17", - "platform":"linux-x64x11-sbversion-17", - "target_platform":"linux-x64x11", - "sb_api_version":"17" - } - ] -} + "docker_service": "linux", + "platforms": [ + "linux-x64x11" + ], + "targets": [ + "base_unittests", + "sql_unittests", + "net_unittests", + "url_unittests", + "ipc_tests", + "mojo_unittests", + "gpu_unittests", + "gin_unittests", + "blink_unittests", + "content_shell" + ], + "includes": [ + { + "name":"x64", + "platform":"linux-x64x11" + } + ] + } diff --git a/.github/config/raspi-2-skia.json b/.github/config/raspi-2-skia.json deleted file mode 100644 index e83ea1fa8577b1..00000000000000 --- a/.github/config/raspi-2-skia.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "docker_service": "build-raspi", - "platforms": [ - "raspi-2-skia" - ], - "includes": [ - { - "name":"raspi-2-skia", - "platform":"raspi-2-skia", - "target_platform":"raspi-2-skia", - "target_cpu":"target_cpu=\\\"arm\\\"", - "extra_gn_arguments": "build_with_separate_cobalt_toolchain=true use_asan=false" - } - ] -} diff --git a/.github/config/raspi-2.json b/.github/config/raspi-2.json deleted file mode 100644 index f5b897329872a0..00000000000000 --- a/.github/config/raspi-2.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "docker_service": "build-raspi", - "on_device_test": { - "enabled": true, - "tests": [ - "0", - "1", - "2", - "3", - "4", - "5" - ] - }, - "platforms": [ - "raspi-2", - "raspi-2-sbversion-17" - ], - "includes": [ - { - "name":"raspi", - "platform":"raspi-2", - "target_platform":"raspi-2", - "target_cpu":"target_cpu=\\\"arm\\\"", - "extra_gn_arguments": "build_with_separate_cobalt_toolchain=true use_asan=false" - }, - { - "name":"sbversion-17", - "platform":"raspi-2-sbversion-17", - "target_platform":"raspi-2", - "target_cpu":"target_cpu=\\\"arm\\\"", - "extra_gn_arguments": "build_with_separate_cobalt_toolchain=true use_asan=false", - "sb_api_version": "17" - } - ] -} diff --git a/.github/config/stub.json b/.github/config/stub.json deleted file mode 100644 index 792466fe3e305a..00000000000000 --- a/.github/config/stub.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "docker_service": "build-linux-stub", - "platforms": [ - "stub" - ], - "includes": [ - { - "name":"stub", - "platform":"stub", - "target_platform":"stub" - } - ] -} diff --git a/.github/release.yml b/.github/release.yml deleted file mode 100644 index d8d6482aff3d12..00000000000000 --- a/.github/release.yml +++ /dev/null @@ -1,4 +0,0 @@ -changelog: - exclude: - labels: - - ignore-for-release diff --git a/.github/workflows/android.yaml b/.github/workflows/android.yaml index 852bbbcc7d76e3..3c226ac8741089 100644 --- a/.github/workflows/android.yaml +++ b/.github/workflows/android.yaml @@ -1,20 +1,15 @@ name: android on: - release: - types: [prereleased, released, published] pull_request: types: [opened, reopened, synchronize, labeled] branches: - main - - feature/* + - experimental/* push: branches: - main - - feature/* - schedule: - # GMT timezone. - - cron: '0 9 * * *' + - experimental/* workflow_dispatch: inputs: nightly: @@ -32,77 +27,19 @@ jobs: with: platform: android-arm64 nightly: ${{ github.event.inputs.nightly }} - keep_artifacts: cobalt.apk - android-x86: + android-arm: uses: ./.github/workflows/main.yaml permissions: packages: write pull-requests: write with: - platform: android-x86 + platform: android-arm nightly: ${{ github.event.inputs.nightly }} - keep_artifacts: cobalt.apk - android-arm: + android-x86: uses: ./.github/workflows/main.yaml permissions: packages: write pull-requests: write with: - platform: android-arm + platform: android-x86 nightly: ${{ github.event.inputs.nightly }} - keep_artifacts: cobalt.apk - - upload-release-artifacts: - runs-on: ubuntu-latest - needs: [ android-arm, android-arm64, android-x86 ] - permissions: - actions: write - contents: write - steps: - - name: Download arm-gold apk - uses: actions/download-artifact@v4 - with: - name: android-arm-gold - path: arm-gold - - name: Download arm-qa apk - uses: actions/download-artifact@v4 - with: - name: android-arm-qa - path: arm-qa - - name: Download arm64-gold apk - uses: actions/download-artifact@v4 - with: - name: android-arm64-gold - path: arm64-gold - - name: Download arm64-qa apk - uses: actions/download-artifact@v4 - with: - name: android-arm64-gold - path: arm64-qa - - name: Download x86-gold apk - uses: actions/download-artifact@v4 - with: - name: android-x86-gold - path: x86-gold - - name: Download x86-qa apk - uses: actions/download-artifact@v4 - with: - name: android-x86-qa - path: x86-qa - - name: 'Upload Android APKs' - uses: actions/upload-artifact@v4 - with: - name: Android APKs - path: ./* - retention-days: 90 - compression-level: 0 # We expect kept artifacts to be already compressed - if-no-files-found: error - - name: Make a release zip - run: zip -0 -r "Android APKs.zip" . -i '*.apk' - - name: Upload the APKs to a release - if: ${{ github.event_name == 'release' }} - uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # v2.9.0 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: "Android APKs.zip" - overwrite: true diff --git a/.github/workflows/coverage.yaml b/.github/workflows/coverage.yaml deleted file mode 100644 index 834696dcd1123a..00000000000000 --- a/.github/workflows/coverage.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: coverage - -on: - pull_request: - types: - - opened - - edited - - reopened - - synchronize - push: - branches: - - main - - feature/* - -jobs: - linux-coverage: - # TODO(b/286302961): Move back and re-enable this check once sharding is supported. - # Run on main branch for pushes, PRs and manual invocations. - # if: | - # ${{ github.ref == 'refs/heads/main' && - # (github.event_name == 'push' || - # github.event_name == 'pull_request' || - # (github.event_name == 'workflow_dispatch' && inputs.nightly == 'false')) }} - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: linux-coverage - nightly: ${{ github.event.inputs.nightly }} diff --git a/.github/workflows/evergreen.yaml b/.github/workflows/evergreen.yaml deleted file mode 100644 index f299a90a87f4fd..00000000000000 --- a/.github/workflows/evergreen.yaml +++ /dev/null @@ -1,68 +0,0 @@ -name: evergreen - -on: - pull_request: - types: [opened, reopened, synchronize, labeled] - branches: - - main - - feature/* - push: - branches: - - main - - feature/* - schedule: - # GMT timezone. - - cron: '0 9 * * *' - workflow_dispatch: - inputs: - nightly: - description: 'Nightly workflow.' - required: true - type: boolean - default: false - -jobs: - evergreen-x64: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: evergreen-x64 - nightly: ${{ github.event.inputs.nightly }} - run_api_leak_detector: true - keep_artifacts: install/lib/libcobalt.* - artifact_size_increase_thresholds: '{"install/lib/libcobalt.so": 0.02, "install/lib/libcobalt.lz4": 0.02}' - evergreen-arm-hardfp: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: evergreen-arm-hardfp - nightly: ${{ github.event.inputs.nightly }} - run_api_leak_detector: true - keep_artifacts: install/lib/libcobalt.* - artifact_size_increase_thresholds: '{"install/lib/libcobalt.so": 0.02, "install/lib/libcobalt.lz4": 0.02}' - evergreen-arm-softfp: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: evergreen-arm-softfp - nightly: ${{ github.event.inputs.nightly }} - run_api_leak_detector: true - keep_artifacts: install/lib/libcobalt.* - artifact_size_increase_thresholds: '{"install/lib/libcobalt.so": 0.02, "install/lib/libcobalt.lz4": 0.02}' - evergreen-arm64: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: evergreen-arm64 - nightly: ${{ github.event.inputs.nightly }} - run_api_leak_detector: true - keep_artifacts: install/lib/libcobalt.* - artifact_size_increase_thresholds: '{"install/lib/libcobalt.so": 0.02, "install/lib/libcobalt.lz4": 0.02}' diff --git a/.github/workflows/gradle.yaml b/.github/workflows/gradle.yaml deleted file mode 100644 index 7dec25a3958546..00000000000000 --- a/.github/workflows/gradle.yaml +++ /dev/null @@ -1,34 +0,0 @@ -name: Java Tests - -on: - pull_request: - push: - branches: - - main - - feature/* - -concurrency: - group: '${{ github.workflow }}-${{ github.event_name }}-${{ inputs.platform }} @ ${{ github.event.pull_request.number || github.sha }}' - cancel-in-progress: true - -permissions: {} - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 - - name: Set up JDK 11 - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: 11 - - name: Validate Gradle wrapper - uses: gradle/wrapper-validation-action@ccb4328a959376b642e027874838f60f8e596de3 #v1.0.6 - - name: Build with Gradle - uses: gradle/gradle-build-action@749f47bda3e44aa060e82d7b3ef7e40d953bd629 #v2.4.2 - with: - arguments: test - build-root-directory: starboard/android/apk diff --git a/.github/workflows/label-cherry-pick.yaml b/.github/workflows/label-cherry-pick.yaml deleted file mode 100644 index 37dd6d62ab264d..00000000000000 --- a/.github/workflows/label-cherry-pick.yaml +++ /dev/null @@ -1,135 +0,0 @@ -name: Label Cherry Pick - -on: - pull_request_target: - types: - - labeled - - closed - -jobs: - prepare_branch_list: - runs-on: ubuntu-latest - if: | - github.event.pull_request.merged == true && - github.event.pull_request.merge_commit_sha != null - outputs: - target_branch: ${{ steps.set-branches.outputs.target_branch }} - steps: - - name: Set Branches - id: set-branches - env: - PR_LABELS: ${{ toJson(github.event.pull_request.labels) }} - EVENT_ACTION: ${{ github.event.action }} - LABEL_NAME: ${{ github.event.label.name }} - BASE_REF: ${{ github.base_ref }} - run: | - if [[ $EVENT_ACTION == 'closed' ]]; then - # Get a list of the labels from the PR. - labels=$(echo "$PR_LABELS" | jq -r '.[].name') - else - # Or get the label that was added on the merged PR. - labels=$LABEL_NAME - fi - - branches=("25.lts.1+" "24.lts.1+" "23.lts.1+" "22.lts.1+" "21.lts.1+" "20.lts.1+" "19.lts.1+" "rc_11" "COBALT_9") - filtered_branches=() - for branch in "${branches[@]}"; do - if [[ $branch == $BASE_REF ]]; then - continue - fi - if [[ ${labels[@]} =~ "cp-$branch" ]]; then - filtered_branches+=("$branch") - fi - done - - echo "target_branch=$(echo -n "${filtered_branches[@]}" | jq -cRs 'split(" ")')" >> $GITHUB_OUTPUT - - cherry_pick: - runs-on: ubuntu-latest - permissions: - issues: write - needs: prepare_branch_list - if: needs.prepare_branch_list.outputs.target_branch != '[]' - strategy: - matrix: - target_branch: ${{ fromJson(needs.prepare_branch_list.outputs.target_branch) }} - env: - ACCESS_TOKEN: ${{ secrets.CHERRY_PICK_TOKEN }} - REPOSITORY: ${{ github.repository }} - GITHUB_REF: ${{ github.ref }} - MERGE_COMMIT_SHA: ${{ github.event.pull_request.merge_commit_sha }} - CHERRY_PICK_BRANCH: cherry-pick-${{ matrix.target_branch }}-${{ github.event.pull_request.number }} - steps: - - name: Checkout repository - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 - with: - ref: ${{ matrix.target_branch }} - fetch-depth: 0 - persist-credentials: false - - - name: Setup Git - run: | - git config --global user.name "GitHub Release Automation" - git config --global user.email "github@google.com" - - - name: Cherry pick merge commit - id: cherry-pick - continue-on-error: true - run: | - set -x - git fetch origin ${{ matrix.target_branch }} - - set +e - # Select the first parent as the mainline tree. This is necessary if - # the commit has multiple parents as the cherry-pick will fail otherwise. - git cherry-pick -x --mainline=1 ${MERGE_COMMIT_SHA} - RES=$? - set -e - if [ ${RES} -ne 0 ]; then - # If the cherry pick failed due to a merge conflict we can - # add the conflicting file and create the commit anyway. - git add . - git cherry-pick --continue - fi - exit ${RES} - - - name: Create Pull Request - id: create-pr - continue-on-error: true - uses: peter-evans/create-pull-request@2b011faafdcbc9ceb11414d64d0573f37c774b04 # v4.2.3 - with: - token: ${{ secrets.CHERRY_PICK_TOKEN }} - draft: ${{ steps.cherry-pick.outcome == 'failure' }} - base: ${{ matrix.target_branch }} - branch: ${{ env.CHERRY_PICK_BRANCH }} - committer: GitHub Release Automation - reviewers: ${{ github.event.pull_request.user.login }} - title: "Cherry pick PR #${{ github.event.pull_request.number }}: ${{ github.event.pull_request.title }}" - body: | - Refer to the original PR: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }} - - ${{ github.event.pull_request.body }} - - - name: Comment on failure - uses: actions/github-script@v6 - with: - github-token: ${{ secrets.CHERRY_PICK_TOKEN }} - script: | - if ('${{ steps.create-pr.outputs.pull-request-number }}' == '') { - // Comment on the originating PR if creating a cherry pick PR failed. - github.rest.issues.createComment({ - issue_number: context.payload.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: '> [!IMPORTANT]\n> Creating the cherry pick PR failed! Check the log at ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} for details.' - }); - } else if ('${{ steps.cherry-pick.outcome }}' == 'failure') { - // Comment on the new PR if the cherry pick failed. - github.rest.issues.createComment({ - issue_number: '${{ steps.create-pr.outputs.pull-request-number }}', - owner: context.repo.owner, - repo: context.repo.repo, - body: '> [!IMPORTANT]\n> There were merge conflicts while cherry picking! Check out [${{ env.CHERRY_PICK_BRANCH }}](${{ github.repository }}/tree/${{ env.CHERRY_PICK_BRANCH }}) and fix the conflicts before proceeding. Check the log at ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} for details.' - }); - } diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml deleted file mode 100644 index dde32db942a0ea..00000000000000 --- a/.github/workflows/lint.yaml +++ /dev/null @@ -1,69 +0,0 @@ -name: lint - -on: - pull_request: - types: - - opened - - edited - - reopened - - synchronize - push: - branches: - - main - - feature/* - -concurrency: - group: '${{ github.workflow }}-${{ github.event_name }}-${{ inputs.platform }} @ ${{ github.event.pull_request.number || github.sha }}' - cancel-in-progress: true - -permissions: {} - -jobs: - lint: - runs-on: ubuntu-latest - timeout-minutes: 15 - steps: - - name: Install clang-format Dependencies - run: | - sudo apt-get update - sudo apt-get install libncurses5 - - name: Download GN via CIPD - env: - GN_SHA256SUM: 'af7b2dcb3905bca56655e12131b365f1cba8e159db80d2022330c4f522fab2ef /tmp/gn.zip' - GN_HASH: r3styzkFvKVmVeEhMbNl8cuo4VnbgNICIzDE9SL6su8C - run: | - set -e -x - curl --location --silent --output /tmp/gn.zip "https://chrome-infra-packages.appspot.com/dl/gn/gn/linux-amd64/+/${GN_HASH}" - echo ${GN_SHA256SUM} | sha256sum --check - unzip /tmp/gn.zip -d /usr/local/bin - rm /tmp/gn.zip - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 - with: - fetch-depth: 0 - persist-credentials: false - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: '3.8' - - name: Install Pip Packages - run: pip install --require-hashes --no-deps -r ${GITHUB_WORKSPACE}/precommit_hooks/requirements.txt - - name: Download Resources - run: python ${GITHUB_WORKSPACE}/download_resources.py - - name: Run pre-commit - uses: ./.github/actions/pre_commit - with: - base_ref: ${{ github.event.pull_request.base.sha && github.event.pull_request.base.sha || github.event.before }} - check-bug-id: - name: Check Bug ID - runs-on: ubuntu-latest - steps: - - name: Check Bug ID Present - # v2 - uses: gsactions/commit-message-checker@16fa2d5de096ae0d35626443bcd24f1e756cafee - with: - accessToken: ${{ secrets.GITHUB_TOKEN }} - pattern: '(b\/\d+|^(Bug|Fixed|Issue): \d+$|(partnerissuetracker\.corp|issuetracker)\.google\.com\/u\/\d+\/issues\/\d+$)' - flags: 'gm' - error: 'PR title or description should include at least one bug ID.' diff --git a/.github/workflows/linux.yaml b/.github/workflows/linux.yaml index f144ef72f4ab32..11615479ac7ddb 100644 --- a/.github/workflows/linux.yaml +++ b/.github/workflows/linux.yaml @@ -5,14 +5,11 @@ on: types: [opened, reopened, synchronize, labeled] branches: - main - - feature/* + - experimental/* push: branches: - main - - feature/* - schedule: - # GMT timezone. - - cron: '0 9 * * *' + - experimental/* workflow_dispatch: inputs: nightly: @@ -30,31 +27,3 @@ jobs: with: platform: linux nightly: ${{ github.event.inputs.nightly }} - linux-clang-3-9: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: linux-clang-3-9 - nightly: ${{ github.event.inputs.nightly }} - modular: true - linux-gcc-6-3: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: linux-gcc-6-3 - nightly: ${{ github.event.inputs.nightly }} - modular: true - # TODO(b/285632780): Enable blackbox tests for modular linux workflows. - linux-modular: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: linux-modular - nightly: ${{ github.event.inputs.nightly }} - modular: true diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index f4e76a21ef8255..35bdd66cc05b4f 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -14,49 +14,6 @@ on: required: true type: string default: 'false' - run_api_leak_detector: - description: 'Whether to run the api leak detector.' - required: false - type: boolean - default: false - leak_manifest_filename: - description: 'Path to the leak manifest.' - required: false - type: string - default: "" - modular: - description: 'Whether this is a modular build.' - required: false - type: boolean - default: false - keep_artifacts: - description: 'Which artifacts to keep for releases.' - required: false - type: string - default: '' - artifact_size_increase_thresholds: - description: 'Threshold for artifact binary size increase.' - required: false - type: string - default: "" - -# Global env vars. -env: - REGISTRY: ghcr.io - IPV6_AVAILABLE: 0 - LANG: en_US.UTF-8 - IS_BUILDBOT_DOCKER: 1 - IS_CI: 1 - IS_DOCKER: 1 - NINJA_STATUS: '[%e sec | %f/%t %u remaining | %c/sec | j%r]' - SCCACHE: 1 - SCCACHE_GCS_BUCKET: cobalt-actions-sccache-linux - SCCACHE_GCS_OAUTH_URL: http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token - SCCACHE_GCS_RW_MODE: READ_WRITE - SCCACHE_IDLE_TIMEOUT: 0 # prevent sccache server from shutting down after long idle. - STARBOARD_TOOLCHAINS_DIR: /root/starboard-toolchains - # TODO(b/255654807) Workaround for https://github.com/actions/checkout/issues/1590 - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ inputs.platform }} @ ${{ github.event.label.name || github.event.pull_request.number || github.sha }} @ ${{ github.event.label.name && github.event.pull_request.number || github.event.action }} @@ -78,17 +35,14 @@ jobs: github.event.pull_request.merged == false && ( github.event.action == 'labeled' && - github.event.label.name == 'runtest' || - github.event.label.name == 'on_device' + github.event.label.name == 'runtest' ) timeout-minutes: 10 steps: - - id: checkout - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 + - name: Checkout files + uses: actions/checkout@v4 with: fetch-depth: 1 - persist-credentials: false - name: Remove runtest if exists if: github.event_name == 'pull_request' continue-on-error: true # Ignore this step if we cannot remove the label. @@ -105,36 +59,16 @@ jobs: run: | platforms=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -c '.platforms') echo "platforms=${platforms}" >> $GITHUB_ENV + - id: set-targets + shell: bash + run: | + targets=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -c '.targets | join(" ")') + echo "targets=${targets}" >> $GITHUB_ENV - id: set-includes shell: bash run: | includes=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -c '.includes') echo "includes=${includes}" >> $GITHUB_ENV - - id: set-on-device-test - shell: bash - run: | - on_device_test=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -rc '.on_device_test') - echo "on_device_test=${on_device_test}" >> $GITHUB_ENV - - id: set-on-device-test-attempts - shell: bash - run: | - on_device_test_attempts=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -rc '.on_device_test.test_attempts // empty') - echo "on_device_test_attempts=${on_device_test_attempts}" >> $GITHUB_ENV - - id: set-on-host-test - shell: bash - run: | - on_host_test=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -rc '.on_host_test') - echo "on_host_test=${on_host_test}" >> $GITHUB_ENV - - id: set-on-host-test-shards - shell: bash - run: | - on_host_test_shards=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -c '.on_host_test_shards') - echo "on_host_test_shards=${on_host_test_shards}" >> $GITHUB_ENV - - id: set-on-host-test-evergreen-loader - shell: bash - run: | - evergreen_loader=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -rc '.evergreen_loader') - echo "evergreen_loader=${evergreen_loader}" >> $GITHUB_ENV - id: set-docker-service shell: bash run: | @@ -142,93 +76,48 @@ jobs: echo "docker_service=${docker_service}" >> $GITHUB_ENV outputs: platforms: ${{ env.platforms }} + targets: ${{ env.targets }} includes: ${{ env.includes }} - on_device_test: ${{ env.on_device_test }} - on_device_test_attempts: ${{ env.on_device_test_attempts }} - on_host_test: ${{ env.on_host_test }} - on_host_test_shards: ${{ env.on_host_test_shards }} - evergreen_loader: ${{ env.evergreen_loader }} docker_service: ${{ env.docker_service }} # Builds, tags, and pushes Cobalt docker build images to ghr. docker-build-image: needs: [initialize] - runs-on: [self-hosted, linux-runner] - permissions: - packages: write - timeout-minutes: 30 + runs-on: [self-hosted, chrobalt-linux-runner] steps: - - name: Checkout files - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 - with: - fetch-depth: 0 - persist-credentials: false - - name: Login to Docker Registry ${{env.REGISTRY}} - uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # v2.1.0 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Build docker image - id: build-docker-image - uses: ./.github/actions/docker - with: - docker_service: ${{ needs.initialize.outputs.docker_service }} - docker_image: cobalt-${{ needs.initialize.outputs.docker_service }} - - name: Set Docker Tag Output - id: set-docker-tag-output - shell: bash - run: | - set -u - echo $DOCKER_TAG - echo "docker_tag=$DOCKER_TAG" | head -n 1 >> $GITHUB_ENV - outputs: - docker_tag: ${{env.docker_tag}} - - # Builds, tags, and pushes Cobalt unit test image to ghr. - docker-unittest-image: - if: needs.initialize.outputs.on_host_test == 'true' - needs: [initialize] - permissions: - packages: write - runs-on: [self-hosted, linux-runner] - timeout-minutes: 30 - steps: - - name: Checkout files - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 - with: - fetch-depth: 2 - persist-credentials: false - - name: Login to Docker Registry ${{env.REGISTRY}} - uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # v2.1.0 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Build docker image - id: build-docker-image - uses: ./.github/actions/docker - with: - docker_service: linux-x64x11-unittest - docker_image: cobalt-linux-x64x11-unittest - - name: Set Docker Tag Output - id: set-docker-unittest-tag-output - shell: bash - run: | - set -u - echo $DOCKER_TAG - echo "docker_unittest_tag=$DOCKER_TAG" >> $GITHUB_ENV + - name: Checkout files + uses: actions/checkout@v4 + with: + fetch-depth: 1 + # Handle GitHub registry used for everything other than pull requests off forked repos. + - name: Login to GitHub Docker Registry + if: ${{ (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.full_name == github.repository) }} + uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # v2.1.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build docker image + id: build-docker-image + uses: ./.github/actions/docker + with: + docker_service: ${{ needs.initialize.outputs.docker_service }} + - name: Set Docker Tag Output + id: set-docker-tag-output + shell: bash + run: | + set -u + echo $DOCKER_TAG + echo "docker_tag=$DOCKER_TAG" | head -n 1 >> $GITHUB_ENV outputs: - docker_unittest_tag: ${{env.docker_unittest_tag}} + docker_tag: ${{ env.docker_tag }} # Runs builds. build: needs: [initialize, docker-build-image] permissions: {} - runs-on: [self-hosted, linux-runner] - name: ${{matrix.name}}_${{matrix.config}} + runs-on: [self-hosted, chrobalt-linux-runner] + name: ${{ matrix.name }}_${{ matrix.config }} strategy: fail-fast: false matrix: @@ -237,165 +126,26 @@ jobs: config: [devel, debug, qa, gold] container: ${{ needs.docker-build-image.outputs.docker_tag }} env: + DEPOT_TOOLS_UPDATE: 0 + DEPOT_TOOLS_REPORT_BUILD: 0 + DEPOT_TOOLS_COLLECT_METRICS: 0 + DEPOT_TOOLS_METRICS: 0 + SCCACHE: 1 + SCCACHE_GCS_BUCKET: cobalt-actions-sccache-linux + SCCACHE_GCS_SERVICE_ACCOUNT: runner@cobalt-actions-devel.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE: READ_WRITE + SCCACHE_IDLE_TIMEOUT: 0 # prevent sccache server from shutting down after long idle. # We want temp folder to be on tmpfs which makes workloads faster. # However, dind container ends up having / folder mounted on overlay # filesystem, whereas /__w which contains Cobalt source code is on tmpfs. TMPDIR: /__w/_temp - timeout-minutes: 60 steps: - name: Checkout - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 + uses: actions/checkout@v4 with: - # Use fetch depth of 0 to get full history for a valid build id. - fetch-depth: 0 - persist-credentials: false - - name: Cache Gradle - uses: actions/cache@v3 - if: startsWith(matrix.target_platform, 'android') || startsWith(needs.initialize.outputs.evergreen_loader, 'android') - with: - key: gradle-cache-${{ hashFiles('starboard/android/apk/**/*gradle*') }} - path: | - /root/.gradle/caches - /root/.gradle/wrapper - - name: GN - uses: ./.github/actions/gn - - name: Build Cobalt - uses: ./.github/actions/build - - name: 'Check Artifact Size' - uses: ./.github/actions/check_artifact_size - if: ${{ false }} # ${{ inputs.artifact_size_increase_thresholds }} - with: - workflow: ${{ github.workflow }} - name: ${{ matrix.platform }}-${{ matrix.config }} - path: out/${{ matrix.target_platform }}_${{ matrix.config }} - thresholds: ${{ inputs.artifact_size_increase_thresholds }} - token: ${{ secrets.GITHUB_TOKEN }} - - name: 'Upload Artifact' - uses: actions/upload-artifact@v4 - if: ${{ inputs.keep_artifacts }} - with: - name: ${{ matrix.platform }}-${{ matrix.config }} - path: out/${{ matrix.target_platform }}_${{ matrix.config }}/${{ inputs.keep_artifacts }} - retention-days: 7 - compression-level: 0 # We expect kept artifacts to be already compressed - if-no-files-found: error - - name: Run API Leak Detector - uses: ./.github/actions/api_leak_detector - if: inputs.run_api_leak_detector - with: - relative_manifest_path: ${{ inputs.leak_manifest_filename }} - - name: Upload On Host Test Artifacts - if: ${{ matrix.config == 'devel' && needs.initialize.outputs.on_host_test == 'true' }} - uses: ./.github/actions/upload_test_artifacts - with: - type: onhost - os: linux - # For some reason passing needs.initialize.outputs.evergreen_loader as parameter to build - # action didn't work, so instead we set an env var. - - name: Set Evergreen loader config - if: ${{ needs.initialize.outputs.evergreen_loader != 'null' }} - shell: bash - run: | - set -u - COBALT_EVERGREEN_LOADER="${{needs.initialize.outputs.evergreen_loader}}" - echo "COBALT_EVERGREEN_LOADER=${COBALT_EVERGREEN_LOADER}" >> $GITHUB_ENV - # Build Evergreen loader for on-host tests if necessary. - - name: Evergreen loader GN - if: ${{ needs.initialize.outputs.evergreen_loader != 'null' && ( matrix.config == 'devel' || matrix.config == 'qa' ) }} - uses: ./.github/actions/gn - - name: Build Evergreen loader - if: ${{ needs.initialize.outputs.evergreen_loader != 'null' && ( matrix.config == 'devel' || matrix.config == 'qa' ) }} - uses: ./.github/actions/build - - name: Upload Nightly Artifacts - if: ${{ ( inputs.nightly == 'true' || github.event_name == 'schedule' ) && matrix.config != 'debug' }} - uses: ./.github/actions/upload_nightly_artifacts - - name: Upload Evergreen loader On Host Test Artifacts - if: ${{ needs.initialize.outputs.evergreen_loader != 'null' && matrix.config == 'devel' && needs.initialize.outputs.on_host_test == 'true'}} - uses: ./.github/actions/upload_test_artifacts - with: - type: onhost - os: linux - - name: Upload On Device Test Artifacts - if: | - matrix.config == 'devel' && - fromJSON(needs.initialize.outputs.on_device_test).enabled == true && - ( - github.event_name != 'pull_request' || - contains(github.event.pull_request.labels.*.name, 'on_device') - ) - uses: ./.github/actions/upload_test_artifacts - with: - type: ondevice - os: linux - - # Runs on-device integration and unit tests. - on-device-test: - needs: [initialize, build] - # Run ODT when on_device label is applied on PR. - # Also, run ODT on push and schedule if not explicitly disabled via repo vars. - if: | - fromJSON(needs.initialize.outputs.on_device_test).enabled == true && (( - github.event_name == 'pull_request' && - contains(github.event.pull_request.labels.*.name, 'on_device') ) || (( - inputs.nightly == 'true' || github.event_name == 'schedule') && - vars.RUN_ODT_TESTS_ON_NIGHTLY != 'False') || - ( github.event_name == 'push' && vars.RUN_ODT_TESTS_ON_POSTSUBMIT != 'False' ) ) - runs-on: [self-hosted, odt-runner] - name: ${{ matrix.name }}_on_device_${{ matrix.shard }} - permissions: {} - strategy: - fail-fast: false - matrix: - platform: ${{ fromJson(needs.initialize.outputs.platforms) }} - config: [devel] - shard: ${{ fromJson(needs.initialize.outputs.on_device_test).tests }} - include: ${{ fromJson(needs.initialize.outputs.includes) }} - env: - COBALT_EVERGREEN_LOADER: ${{ needs.initialize.outputs.evergreen_loader }} - ON_DEVICE_TEST_ATTEMPTS: ${{ needs.initialize.outputs.on_device_test_attempts }} - MODULAR_BUILD: ${{ inputs.modular && 1 || 0 }} - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 - with: - fetch-depth: 1 - persist-credentials: false - - name: Run Tests (${{ matrix.shard }}) - uses: ./.github/actions/on_device_tests - - # Runs on-host integration and unit tests. - on-host-test: - needs: [initialize, docker-unittest-image, build] - permissions: {} - if: needs.initialize.outputs.on_host_test == 'true' - runs-on: [self-hosted, linux-runner] - name: ${{matrix.name}}_${{matrix.shard}}_test - strategy: - fail-fast: false - matrix: - platform: ${{ fromJson(needs.initialize.outputs.platforms) }} - shard: ${{ fromJson(needs.initialize.outputs.on_host_test_shards) }} - config: [devel] - include: ${{ fromJson(needs.initialize.outputs.includes) }} - container: ${{ needs.docker-unittest-image.outputs.docker_unittest_tag }} - env: - DISPLAY: :99 - # For some reason tests complaining about HOME set to /github/home - # with permission denied error. - HOME: /root - COBALT_EVERGREEN_LOADER: ${{needs.initialize.outputs.evergreen_loader}} - MODULAR_BUILD: ${{ inputs.modular && 1 || 0 }} - timeout-minutes: 90 - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 - with: - fetch-depth: 1 - persist-credentials: false - - name: Run Tests - uses: ./.github/actions/on_host_test + path: src + - name: Build Chrobalt + uses: ./src/.github/actions/build with: - os: linux + targets: ${{ needs.initialize.outputs.targets }} + diff --git a/.github/workflows/manual-cherry-pick.yaml b/.github/workflows/manual-cherry-pick.yaml deleted file mode 100644 index f48cb117defcf2..00000000000000 --- a/.github/workflows/manual-cherry-pick.yaml +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2023 The Cobalt Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Usage: Go to -# https://https://github.com/youtube/cobalt/actions/workflows/manual-cherry-pick.yaml -# and click "Run Workflow." Leave "Use Workflow From" set to "main", then -# input the branch name and paste the cherry-pick commit and click Run. A PR -# will be created. - -name: Release Branch Cherrypick -on: - workflow_dispatch: - inputs: - # We use this instead of the "run on branch" argument because GitHub looks - # on that branch for a workflow.yml file, and we'd have to cherry-pick - # this file into those branches. - release_branch: - description: 'Release branch name (e.g. 23.lts.1+)' - required: true - type: string - git_commit: - description: 'Git commit to cherry-pick' - required: true - type: string - -jobs: - cherrypick: - name: Cherrypick to ${{ github.event.inputs.release_branch}} - ${{ github.event.inputs.git_commit }} - runs-on: ubuntu-latest - env: - ACCESS_TOKEN: ${{ secrets.CHERRY_PICK_TOKEN }} - RELEASE_BRANCH: ${{ github.event.inputs.release_branch }} - COMMIT_HASH: ${{ github.event.inputs.git_commit }} - REPOSITORY: ${{ github.repository }} - GITHUB_REF: ${{ github.ref }} - steps: - - name: Checkout code - uses: kaidokert/checkout@v3.5.999 - timeout-minutes: 30 - with: - ref: ${{ env.RELEASE_BRANCH }} - persist-credentials: false - - name: Get some helpful info for formatting - id: cherrypick - run: | - git config --global user.name "GitHub Release Automation" - git config --global user.email "github@google.com" - git fetch origin $GITHUB_REF - git cherry-pick -x $COMMIT_HASH - echo "SHORTSHA=$(git log -1 $COMMIT_HASH --format="%h")" >> "$GITHUB_OUTPUT" - echo "TITLE=$(git log -1 $COMMIT_HASH --format="%s")" >> "$GITHUB_OUTPUT" - - name: Create Pull Request with changes - uses: peter-evans/create-pull-request@2b011faafdcbc9ceb11414d64d0573f37c774b04 # v4.2.3 - with: - title: '${{ env.RELEASE_BRANCH }} cherry-pick: ${{ steps.cherrypick.outputs.SHORTSHA }} "${{ steps.cherrypick.outputs.TITLE }}"' - committer: GitHub Release Automation - token: ${{ secrets.CHERRY_PICK_TOKEN }} - base: ${{ env.RELEASE_BRANCH }} - branch: ${{ env.RELEASE_BRANCH }}-${{ steps.cherrypick.outputs.SHORTSHA }} - reviewers: ${{ github.actor }} - body: | - Refer to the original commit: https://github.com/${{ github.repository }}/commit/${{ github.event.inputs.git_commit }} diff --git a/.github/workflows/nightly_trigger.yaml b/.github/workflows/nightly_trigger.yaml deleted file mode 100644 index 6bcc16bd27eb67..00000000000000 --- a/.github/workflows/nightly_trigger.yaml +++ /dev/null @@ -1,140 +0,0 @@ -name: nightly_trigger - -on: - schedule: - # GMT timezone. - - cron: '0 9 * * *' - workflow_dispatch: - -jobs: - trigger_23: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: 23.lts.1+ - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run android_23.lts.1+ --ref 23.lts.1+ -f nightly=true - gh workflow run evergreen_23.lts.1+ --ref 23.lts.1+ -f nightly=true - gh workflow run linux_23.lts.1+ --ref 23.lts.1+ -f nightly=true - gh workflow run raspi-2_23.lts.1+ --ref 23.lts.1+ -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - trigger_22: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: 22.lts.1+ - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run android_22.lts.1+ --ref 22.lts.1+ -f nightly=true - gh workflow run evergreen_22.lts.1+ --ref 22.lts.1+ -f nightly=true - gh workflow run linux_22.lts.1+ --ref 22.lts.1+ -f nightly=true - gh workflow run raspi-2_22.lts.1+ --ref 22.lts.1+ -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - trigger_21: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: 21.lts.1+ - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run evergreen_21.lts.1+ --ref 21.lts.1+ -f nightly=true - gh workflow run linux_21.lts.1+ --ref 21.lts.1+ -f nightly=true - gh workflow run raspi-2_21.lts.1+ --ref 21.lts.1+ -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - trigger_20: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: 20.lts.1+ - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run linux_20.lts.1+ --ref 20.lts.1+ -f nightly=true - gh workflow run raspi-2_20.lts.1+ --ref 20.lts.1+ -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - trigger_19: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: 19.lts.1+ - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run linux_19.lts.1+ --ref 19.lts.1+ -f nightly=true - gh workflow run raspi-2_19.lts.1+ --ref 19.lts.1+ -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - trigger_rc_11: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: rc_11 - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run linux_rc_11 --ref rc_11 -f nightly=true - gh workflow run raspi-2_rc_11 --ref rc_11 -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - trigger_cobalt_9: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: COBALT_9 - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run linux_COBALT_9 --ref COBALT_9 -f nightly=true - gh workflow run raspi-2_COBALT_9 --ref COBALT_9 -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/nightly_trigger_24.lts.1+.yaml b/.github/workflows/nightly_trigger_24.lts.1+.yaml deleted file mode 100644 index 45ca3996b95861..00000000000000 --- a/.github/workflows/nightly_trigger_24.lts.1+.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: nightly_trigger_24.lts.1+ - -on: - schedule: - # GMT timezone. - - cron: '0 10 * * *' - workflow_dispatch: - -jobs: - trigger_24: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: 24.lts.1+ - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run android_24.lts.1+ --ref 24.lts.1+ -f nightly=true - gh workflow run evergreen_24.lts.1+ --ref 24.lts.1+ -f nightly=true - gh workflow run linux_24.lts.1+ --ref 24.lts.1+ -f nightly=true - gh workflow run raspi-2_24.lts.1+ --ref 24.lts.1+ -f nightly=true - gh workflow run win32_24.lts.1+ --ref 24.lts.1+ -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/nightly_trigger_25.lts.1+.yaml b/.github/workflows/nightly_trigger_25.lts.1+.yaml deleted file mode 100644 index 77175727eb4657..00000000000000 --- a/.github/workflows/nightly_trigger_25.lts.1+.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: nightly_trigger_25.lts.1+ - -on: - schedule: - # GMT timezone. - - cron: '0 10 * * *' - workflow_dispatch: - -jobs: - trigger_24: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: 25.lts.1+ - persist-credentials: false - - name: Trigger Nightly - run: | - set -x - gh workflow run android_25.lts.1+ --ref 25.lts.1+ -f nightly=true - gh workflow run evergreen_25.lts.1+ --ref 25.lts.1+ -f nightly=true - gh workflow run linux_25.lts.1+ --ref 25.lts.1+ -f nightly=true - gh workflow run raspi-2_25.lts.1+ --ref 25.lts.1+ -f nightly=true - gh workflow run win32_25.lts.1+ --ref 25.lts.1+ -f nightly=true - gh workflow run xb1_25.lts.1+ --ref 25.lts.1+ -f nightly=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/outside_collaborator.yaml b/.github/workflows/outside_collaborator.yaml deleted file mode 100644 index a8913e38a4546b..00000000000000 --- a/.github/workflows/outside_collaborator.yaml +++ /dev/null @@ -1,35 +0,0 @@ -name: Outside Collaborator - -on: - pull_request_target: - types: - - opened - - reopened - -concurrency: - group: '${{ github.workflow }}-${{ github.event_name }} @ ${{ github.event.pull_request.number || github.sha }}' - cancel-in-progress: true - -jobs: - assign-reviewer: - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - name: Check if PR author is outside collaborator - env: - PR_AUTHOR_LOGIN: ${{ github.event.pull_request.user.login }} - REPO_NAME: ${{ github.event.repository.full_name }} - PR_NUMBER: ${{ github.event.number }} - run: | - PERMISSION_LEVEL=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ - "https://api.github.com/repos/$REPO_NAME/collaborators/$PR_AUTHOR_LOGIN/permission" | jq -r .role_name) - - if [ "$PERMISSION_LEVEL" == "none" ] || [ "$PERMISSION_LEVEL" == "read" ]; then - echo "PR author is an outside collaborator. Adding label..." - - curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ - -H "Accept: application/vnd.github.v3+json" \ - -d '["outside collaborator"]' \ - "https://api.github.com/repos/$REPO_NAME/issues/$PR_NUMBER/labels" - fi diff --git a/.github/workflows/pr_badges.yaml b/.github/workflows/pr_badges.yaml deleted file mode 100644 index 4cba1f75af8d2c..00000000000000 --- a/.github/workflows/pr_badges.yaml +++ /dev/null @@ -1,70 +0,0 @@ -name: PR badges - -on: - pull_request_target: - branches: - - 'feature/*' - - 'main' - - '25.lts.1\+' - - '24.lts.1\+' - - '23.lts.1\+' - - '22.lts.1\+' - - '21.lts.1\+' - - '20.lts.1\+' - - '19.lts.1\+' - - 'rc_11' - - 'COBALT_9' - -concurrency: - group: '${{ github.workflow }}-${{ github.event_name }}-${{ inputs.platform }} @ ${{ github.event.pull_request.number || github.sha }}' - cancel-in-progress: true - -permissions: - pull-requests: write - -jobs: - comment: - runs-on: ubuntu-latest - env: - GITHUB_SERVER_URL: ${{github.server_url}} - GITHUB_REPO: ${{github.repository}} - GITHUB_HEAD_REF: ${{ github.head_ref }} - steps: - - uses: actions/github-script@v6 - with: - script: | - // Get env vars. - const { GITHUB_SERVER_URL, GITHUB_REPO, GITHUB_HEAD_REF } = process.env - // Get the existing comments. - const {data: comments} = await github.rest.issues.listComments({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.payload.number, - }) - - // Find any comment already made by the bot. - const botComment = comments.find(comment => { - return comment.user.type === 'Bot' && comment.body.includes('Build Status') - }) - const workflows = ["lint", "android", "evergreen", "linux", "raspi-2", "stub"] - var commentBody = ` - ## Build Status - | Workflow | Status | - | --------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | - ` - for (let i = 0; i < workflows.length; i++) { - commentBody += "| " + workflows[i] + " | [![" + workflows[i] + "](" + `${GITHUB_SERVER_URL}` + "/" + `${GITHUB_REPO}` + "/actions/workflows/" + workflows[i] + ".yaml/badge.svg?branch=" + `${GITHUB_HEAD_REF}` + ")](" + `${GITHUB_SERVER_URL}` + "/" + `${GITHUB_REPO}` + "/actions/workflows/" + workflows[i] + ".yaml?query=branch%3A" + `${GITHUB_HEAD_REF}` + ") |\n" - } - if (botComment) { - await github.rest.issues.deleteComment({ - owner: context.repo.owner, - repo: context.repo.repo, - comment_id: botComment.id, - }) - } - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.payload.number, - body: commentBody - }) diff --git a/.github/workflows/pytest.yaml b/.github/workflows/pytest.yaml deleted file mode 100644 index 9f4382462b0acd..00000000000000 --- a/.github/workflows/pytest.yaml +++ /dev/null @@ -1,41 +0,0 @@ -name: python-tests - -on: - pull_request: - push: - branches: - - main - - feature/* - -concurrency: - group: '${{ github.workflow }}-${{ github.event_name }}-${{ inputs.platform }} @ ${{ github.event.pull_request.number || github.sha }}' - cancel-in-progress: true - -permissions: {} - -jobs: - python-test: - strategy: - matrix: - os: [ubuntu-latest, windows-latest] - python-version: ['3.8', '3.11'] - fail-fast: false - runs-on: ${{ matrix.os }} - timeout-minutes: 15 - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - persist-credentials: false - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - - name: Install Pip Packages - run: pip install --require-hashes --no-deps -r ${{ github.workspace }}/docker/pytest/requirements.txt - - name: Run Tests - run: coverage run -m pytest - - name: Coverage Report - run: coverage report -m diff --git a/.github/workflows/raspi-2.yaml b/.github/workflows/raspi-2.yaml deleted file mode 100644 index 8613cfaf83d2cf..00000000000000 --- a/.github/workflows/raspi-2.yaml +++ /dev/null @@ -1,40 +0,0 @@ -name: raspi-2 - -on: - pull_request: - types: [opened, reopened, synchronize, labeled] - branches: - - main - - feature/* - push: - branches: - - main - - feature/* - schedule: - # GMT timezone. - - cron: '0 9 * * *' - workflow_dispatch: - inputs: - nightly: - description: 'Nightly workflow.' - required: true - type: boolean - default: false - -jobs: - raspi-2: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: raspi-2 - nightly: ${{ github.event.inputs.nightly }} - raspi-2-skia: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: raspi-2-skia - nightly: ${{ github.event.inputs.nightly }} diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml deleted file mode 100644 index 181c96d6ba674e..00000000000000 --- a/.github/workflows/scorecards.yml +++ /dev/null @@ -1,59 +0,0 @@ -# This workflow uses actions that are not certified by GitHub. They are provided -# by a third-party and are governed by separate terms of service, privacy -# policy, and support documentation. - -name: Scorecards supply-chain security -on: - # For Branch-Protection check. Only the default branch is supported. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection - branch_protection_rule: - # To guarantee Maintained check is occasionally updated. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained - schedule: - - cron: '18 14 * * 1' - push: - branches: [ main ] - pull_request: - branches: [ main ] - -# Declare default permissions as read only. -permissions: read-all - -jobs: - analysis: - name: Scorecards analysis - runs-on: ubuntu-latest - permissions: - # Needed to upload the results to code-scanning dashboard. - security-events: write - # Needed to publish results and get a badge (see publish_results below). - id-token: write - - timeout-minutes: 15 - steps: - - name: "Checkout code" - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - persist-credentials: false - - - name: "Run analysis" - uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 - with: - results_file: results.sarif - results_format: sarif - publish_results: true - - # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF - # format to the repository Actions tab. - - name: "Upload artifact" - uses: actions/upload-artifact@97a0fba1372883ab732affbe8f94b823f91727db # v3.pre.node20 - with: - name: SARIF file - path: results.sarif - retention-days: 5 - - # Upload the results to GitHub's code scanning dashboard. - - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@1b1aada464948af03b950897e5eb522f92603cc2 # v3.24.9 - with: - sarif_file: results.sarif diff --git a/.github/workflows/stub.yaml b/.github/workflows/stub.yaml deleted file mode 100644 index 17e8b74082f52d..00000000000000 --- a/.github/workflows/stub.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: stub - -on: - pull_request: - types: [opened, reopened, synchronize, labeled] - branches: - - main - - feature/* - push: - branches: - - main - - feature/* - workflow_dispatch: - inputs: - nightly: - description: 'Nightly workflow.' - required: true - type: boolean - default: false - -jobs: - stub: - uses: ./.github/workflows/main.yaml - permissions: - packages: write - pull-requests: write - with: - platform: stub - nightly: ${{ github.event.inputs.nightly }} - leak_manifest_filename: "gn_built_docker_debian11_manifest" diff --git a/.github/workflows/unit_test_report.yaml b/.github/workflows/unit_test_report.yaml deleted file mode 100644 index 2b9b707839a70d..00000000000000 --- a/.github/workflows/unit_test_report.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: Upload Unit Test Results - -on: - workflow_run: - workflows: - - android - - evergreen - - linux - - raspi-2 - types: - - completed - -# TODO(b/293508740): Report failed workflow runs back to triggering PR. - -jobs: - unit-test-report: - permissions: - pull-requests: write - if: ${{ github.event.workflow_run.conclusion == 'success' || github.event.workflow_run.conclusion == 'failure' }} - runs-on: ubuntu-latest - name: Upload Unit Test Reports - steps: - - name: Download '${{ github.event.workflow_run.name }}' Unit Test Results - # The `download-artifact` action can only access artifacts that were uploaded in the same workflow. - # Since it was not this workflow that uploaded the artifacts we must use rest api to download them. - # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#using-data-from-the-triggering-workflow - uses: actions/github-script@v6 - with: - script: | - const name = context.payload.workflow_run.name; - const url = context.payload.workflow_run.html_url; - - console.log(`Downloading artifacts from ${url}`) - context.payload.workflow_run.pull_requests.forEach(pr => { - console.log(`Pull request: ${context.payload.repository.html_url}/pull/${pr.number}`); - }); - - let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: context.payload.workflow_run.id - }); - - let matchArtifacts = allArtifacts.data.artifacts.filter((artifact) => { - return artifact.name == "unit-test-results" - }); - - if (matchArtifacts.length == 1) { - let download = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifacts[0].id, - archive_format: 'zip' - }); - let fs = require('fs'); - const path = `${process.env.GITHUB_WORKSPACE}/unit-test-results.zip`; - fs.writeFileSync(path, Buffer.from(download.data)); - } else { - core.setFailed(`Expected one artifact with name 'unit-test-results'. Found ${matchArtifacts.length}.`); - } - - name: Extract Archived Unit Test Results - run: unzip unit-test-results.zip -d unit-test-results - - name: Get Datadog CLI - shell: bash - env: - DD_VERSION: 'v2.18.0' - DD_SHA256SUM: 'adbe9b3a41faaf0b1d9702ba256cf8fa9e474c0cc8216f25e5b489c53d6f0a70 datadog-ci' - run: | - set -e - download_url="https://github.com/DataDog/datadog-ci/releases/download/${DD_VERSION}/datadog-ci_linux-x64" - curl -L --fail $download_url --output datadog-ci - echo ${DD_SHA256SUM} | sha256sum --check - chmod +x datadog-ci - - name: Upload to Datadog - # Unit test results are archived on the following format: - # ├── - # │ ├── - # │ │ └── - # │ └── TAGS - # ├── - # │ ├── - # │ │ └── - # │ └── TAGS - # etc. - shell: bash - env: - DATADOG_API_KEY: ${{ secrets.DD_API_KEY }} - DATADOG_SITE: us5.datadoghq.com - DD_ENV: ci - DD_SERVICE: ${{ github.event.repository.name }} - # Need to populate git info via env vars as we don't have the repo to look at. - DD_GIT_REPOSITORY_URL: ${{ github.event.repository.git_url }} - DD_GIT_COMMIT_SHA: ${{ github.event.workflow_run.head_sha }} - DD_GIT_BRANCH: ${{ github.event.workflow_run.head_branch }} - DD_GIT_COMMIT_MESSAGE: ${{ github.event.workflow_run.head_commit.message }} - DD_GIT_COMMIT_AUTHOR_NAME: ${{ github.event.workflow_run.head_commit.author.name }} - DD_GIT_COMMIT_AUTHOR_EMAIL: ${{ github.event.workflow_run.head_commit.author.email }} - DD_GIT_COMMIT_AUTHOR_DATE: ${{ github.event.workflow_run.head_commit.timestamp }} - DD_GIT_COMMIT_COMMITTER_NAME: ${{ github.event.workflow_run.head_commit.committer.name }} - DD_GIT_COMMIT_COMMITTER_EMAIL: ${{ github.event.workflow_run.head_commit.committer.email }} - DD_GIT_COMMIT_COMMITTER_DATE: ${{ github.event.workflow_run.head_commit.timestamp }} - run: | - # Loop over each platform, extract the tags and upload xml results. - for dir in unit-test-results/*/; do - echo "Uploading $dir test report" - export DD_TAGS=`cat ${dir}TAGS` - ./datadog-ci junit upload \ - $dir/**/*.xml - done diff --git a/.github/workflows/workflow_trigger.yaml b/.github/workflows/workflow_trigger.yaml deleted file mode 100644 index ed326899991c03..00000000000000 --- a/.github/workflows/workflow_trigger.yaml +++ /dev/null @@ -1,49 +0,0 @@ -name: workflow_trigger - -on: - workflow_dispatch: - inputs: - branch: - type: choice - description: Branch - options: - - '25.lts.1+' - - '24.lts.1+' - - '23.lts.1+' - - '22.lts.1+' - - '21.lts.1+' - - '20.lts.1+' - - '19.lts.1+' - - 'rc_11' - - 'COBALT_9' - workflow: - type: choice - description: Workflow name - options: - - 'android' - - 'evergreen' - - 'linux' - - 'raspi' - nightly: - description: 'Nightly workflow.' - required: true - type: boolean - default: false - -jobs: - trigger: - permissions: - actions: write - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - ref: ${{ github.event.branch }} - - name: Trigger Workflow - run: | - set -x - gh workflow run ${{ github.event.inputs.workflow }}_${{ github.event.inputs.branch }} --ref ${{ github.event.inputs.branch }} -f nightly=${{ github.event.inputs.nightly }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/chrobalt/build/configs/android-arm/args.gn b/chrobalt/build/configs/android-arm/args.gn new file mode 100644 index 00000000000000..b850dd00a03576 --- /dev/null +++ b/chrobalt/build/configs/android-arm/args.gn @@ -0,0 +1,2 @@ +target_os = "android" +target_cpu = "arm" diff --git a/chrobalt/build/configs/android-arm64/args.gn b/chrobalt/build/configs/android-arm64/args.gn new file mode 100644 index 00000000000000..bf5f6094c59992 --- /dev/null +++ b/chrobalt/build/configs/android-arm64/args.gn @@ -0,0 +1,2 @@ +target_os = "android" +target_cpu = "arm64" diff --git a/chrobalt/build/configs/android-x86/args.gn b/chrobalt/build/configs/android-x86/args.gn new file mode 100644 index 00000000000000..c47c5bee4173c8 --- /dev/null +++ b/chrobalt/build/configs/android-x86/args.gn @@ -0,0 +1,2 @@ +target_os = "android" +target_cpu = "x86" diff --git a/chrobalt/build/configs/linux-x64x11/args.gn b/chrobalt/build/configs/linux-x64x11/args.gn new file mode 100644 index 00000000000000..86ea5fd66c5507 --- /dev/null +++ b/chrobalt/build/configs/linux-x64x11/args.gn @@ -0,0 +1,87 @@ +target_os = "linux" +target_cpu = "x64" + +enable_nacl=false +use_dawn=false +skia_use_dawn=false +# enable_vulkan=false + +use_ozone=true + +# Enterprise stuff +enable_print_content_analysis=false +# enterprise_client_certificates=false +enterprise_cloud_content_analysis=false +enterprise_content_analysis=true +enterprise_data_controls=true +enterprise_local_content_analysis=false +enterprise_watermark=false + + +optional_trace_events_enabled = false +enable_plugins = false +enable_pdf = false +enable_printing = false +enable_cast_receiver = false +enable_paint_preview=false +cast_streaming_enable_remoting = false + +# +enable_background_contents=false +enable_background_mode=false +enable_chrome_notifications=false +enable_click_to_call=false + +enable_system_notifications=false + +enable_webui_certificate_viewer=false + +enable_hidpi = false + +enable_library_cdms = false + +# disable swiftshader +enable_swiftshader = false +enable_swiftshader_vulkan = false +swiftshader_optimized_debug_build=false +use_swiftshader_with_subzero=false +angle_enable_swiftshader=false +dawn_use_swiftshader=false + + +# Disable webnn neural networks | tflite +webnn_use_tflite = false +enable_maldoca = false +enable_reading_list = false +enable_remoting = false +enable_screen_ai_service = true + +# linux system interfaces +use_bluez = false +use_cups = false +use_dbus = false +use_mpris = false +use_udev = false +use_qt = false +use_qt6 = false + +# printing stuff +enable_oop_basic_print_dialog = false +enable_basic_print_dialog = false +enable_basic_printing = false +enable_oop_basic_print_dialog = false +enable_oop_printing = false +enable_oop_printing_no_oop_basic_print_dialog=false +enable_print_content_analysis= false +enable_print_preview=false +enable_printing=false +enable_printing_tests=false +use_cups=false + +enable_vr=false + +# Network stuff +use_kerberos=false + +toolkit_views = true +chrome_pgo_phase = 0 \ No newline at end of file diff --git a/chrobalt/build/configs/linux-x64x11/last_working.gn b/chrobalt/build/configs/linux-x64x11/last_working.gn new file mode 100644 index 00000000000000..ba483e10597468 --- /dev/null +++ b/chrobalt/build/configs/linux-x64x11/last_working.gn @@ -0,0 +1,85 @@ +# Use sccache +cc_wrapper = "sccache" + +enable_nacl=false +use_dawn=false +skia_use_dawn=false +# enable_vulkan=false + + +# Enterprise stuff +enable_print_content_analysis=false +# enterprise_client_certificates=false +enterprise_cloud_content_analysis=false +enterprise_content_analysis=true +enterprise_data_controls=true +enterprise_local_content_analysis=false +enterprise_watermark=false + + +optional_trace_events_enabled = false +enable_plugins = false +enable_pdf = false +enable_printing = false +enable_cast_receiver = false +enable_paint_preview=false +cast_streaming_enable_remoting = false + +# +enable_background_contents=false +enable_background_mode=false +enable_chrome_notifications=false +enable_click_to_call=false + +enable_system_notifications=false + +enable_webui_certificate_viewer=false + +enable_hidpi = false + +enable_library_cdms = false + +# disable swiftshader +enable_swiftshader = false +enable_swiftshader_vulkan = false +swiftshader_optimized_debug_build=false +use_swiftshader_with_subzero=false +angle_enable_swiftshader=false +dawn_use_swiftshader=false + + +# Disable webnn neural networks | tflite +webnn_use_tflite = false +enable_maldoca = false +enable_reading_list = false +enable_remoting = false +enable_screen_ai_service = true + +# linux system interfaces +use_bluez = false +use_cups = false +use_dbus = false +use_mpris = false +use_udev = false +use_qt = false +use_qt6 = false + +# printing stuff +enable_oop_basic_print_dialog = false +enable_basic_print_dialog = false +enable_basic_printing = false +enable_oop_basic_print_dialog = false +enable_oop_printing = false +enable_oop_printing_no_oop_basic_print_dialog=false +enable_print_content_analysis= false +enable_print_preview=false +enable_printing=false +enable_printing_tests=false +use_cups=false + +enable_vr=false + +# Network stuff +use_kerberos=false + +toolkit_views = true diff --git a/chrobalt/build/gn.py b/chrobalt/build/gn.py new file mode 100755 index 00000000000000..3d6fef78315570 --- /dev/null +++ b/chrobalt/build/gn.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Cobalt Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=missing-module-docstring,missing-function-docstring + +import argparse +import os +import subprocess +from pathlib import Path +from typing import List + + +# Parsing code Lifted from tools/code_coverage/coverage.py +def get_build_args(build_args_path): + """Parses args.gn file and returns results as original contents, and dictionary.""" + assert os.path.exists(build_args_path), ( + f'{build_args_path} is not a build directory, ' + 'missing args.gn file.') + dict_settings = {} + with open(build_args_path, encoding='utf-8') as build_args_file: + build_args_lines = build_args_file.readlines() + + for build_arg_line in build_args_lines: + build_arg_without_comments = build_arg_line.split('#')[0] + key_value_pair = build_arg_without_comments.split('=') + if len(key_value_pair) == 2: + key = key_value_pair[0].strip() + # Values are wrapped within a pair of double-quotes, so remove the leading + # and trailing double-quotes. + value = key_value_pair[1].strip().strip('"') + dict_settings[key] = value + + return build_args_lines, dict_settings + + +_BUILD_TYPES = { + 'debug': { + 'symbol_level': 2, + 'is_debug': 'false', + 'is_asan': 'false', + 'is_lsan': 'false', + 'is_msan': 'false', + }, + 'devel': { + 'symbol_level': 1, + 'is_debug': 'false', + 'is_asan': 'false', + 'is_lsan': 'false', + 'is_msan': 'false', + }, + 'qa': { + 'symbol_level': 1, + 'is_debug': 'false' + }, + 'gold': { + 'symbol_level': 0, + 'is_official_build': 'true' + } +} + +CC_WRAPPER = "sccache" +CONTROLLED_ARGS = [ + 'cc_wrapper', # See build/toolschain/cc_wrapper.gni + 'is_debug', # See build/config/BUILDCONFIG.GN + 'is_official_build', # mutually exclusive with is_debug + 'symbol_level', # See build/config/compiler/compiler.gni + 'is_asan', # See build/config/sanitizers/sanitizers.gni + 'is_lsan', # -"- + 'is_msan' # -"- +] + + +def write_build_args(build_args_path, original_lines, dict_settings, + build_type): + """ Write args file, modifying settings for config""" + controlled_args = [(k, dict_settings[k]) for k in CONTROLLED_ARGS + if k in dict_settings] + if controlled_args: + raise RuntimeError( + f'The following args cannot be set in configs: {controlled_args}') + gen_comment = '# Set by gn.py' + with open(build_args_path, 'w', encoding='utf-8') as f: + f.write(f'cc_wrapper = "{CC_WRAPPER}" {gen_comment}\n') + f.write(f'build_type = "{build_type}" {gen_comment}\n') + for key, value in _BUILD_TYPES[build_type].items(): + f.write(f'{key} = {value} {gen_comment}\n') + for line in original_lines: + f.write(line) + + +def main(out_directory: str, platform: str, build_type: str, + overwrite_args: bool, gn_gen_args: List[str]): + platform_path = f'chrobalt/build/configs/{platform}' + dst_args_gn_file = os.path.join(out_directory, 'args.gn') + src_args_gn_file = os.path.join(platform_path, 'args.gn') + Path(out_directory).mkdir(parents=True, exist_ok=True) + + if overwrite_args or not os.path.exists(dst_args_gn_file): + build_args = get_build_args(src_args_gn_file) + write_build_args(dst_args_gn_file, build_args[0], build_args[1], + build_type) + else: + print(f'{dst_args_gn_file} already exists.' + + ' Running ninja will regenerate build files automatically.') + + gn_command = ['gn', 'gen', out_directory] + gn_gen_args + print(' '.join(gn_command)) + subprocess.check_call(gn_command) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + builds_directory_group = parser.add_mutually_exclusive_group() + builds_directory_group.add_argument( + 'out_directory', + type=str, + nargs='?', + help='Path to the directory to build in.') + + parser.add_argument( + '-p', + '--platform', + default='linux', + choices=['linux-x64x11', + 'android-arm', + 'android-arm64', + 'android-x86', + 'linux'], + help='The platform to build.') + parser.add_argument('-c', + '-C', + '--build_type', + default='devel', + choices=_BUILD_TYPES.keys(), + help='The build_type (configuration) to build with.') + parser.add_argument( + '--overwrite_args', + default=False, + action='store_true', + help= + 'Whether or not to overwrite an existing args.gn file if one exists ' + 'in the out directory. In general, if the file exists, you should run ' + '`gn args ` to edit it instead.') + parser.add_argument( + '--no-check', + default=False, + action='store_true', + help='Pass this flag to disable the header dependency gn check.') + script_args, gen_args = parser.parse_known_args() + + if script_args.platform == 'linux': + script_args.platform = 'linux-x64x11' + + if not script_args.no_check: + gen_args.append('--check') + + if script_args.out_directory: + builds_out_directory = script_args.out_directory + else: + BUILDS_DIRECTORY = 'out' + builds_out_directory = os.path.join( + BUILDS_DIRECTORY, + f'{script_args.platform}_{script_args.build_type}') + main(builds_out_directory, script_args.platform, script_args.build_type, + script_args.overwrite_args, gen_args) diff --git a/chrobalt/docker/linux/Dockerfile b/chrobalt/docker/linux/Dockerfile new file mode 100644 index 00000000000000..e69bfa7f34b2e2 --- /dev/null +++ b/chrobalt/docker/linux/Dockerfile @@ -0,0 +1,35 @@ +# Use the official Ubuntu image as the base +FROM debian:12 + +# Install any necessary dependencies +# NOTE: From libxcomposite down, these are minimal requirements to run a +# V8 snapshot binary, on X11/GTK builds +# gcc-multilib for cross-builds of v8/32bit arm +RUN apt-get update && apt-get install -y \ + curl python3-dev git \ + xz-utils \ + pkgconf \ + libglib2.0-0 \ + libnss3 \ + gperf \ + libdbus-1-3 \ + libxcomposite1 \ + libxdamage1 \ + libxext6 \ + libxfixes3 \ + libxrandr2 \ + libxtst6 \ + libgbm1 \ + libasound2 \ + libxkbcommon0 \ + libpango-1.0-0 \ + gcc-multilib \ + bzip2 \ + && rm -rf /var/lib/apt/lists/* + +ADD files/sccache /usr/local/bin + +ENV SCCACHE_GCS_BUCKET=githubactions-chrome-sccache +ENV SCCACHE_GCS_RW_MODE=READ_WRITE +ENV SCCACHE_GCS_KEY_PATH=/root/key.json +ENV SCCACHE_GCS_SERVICE_ACCOUNT=github-actions-bucket-access@cobalt-demo-330821.iam.gserviceaccount.com diff --git a/chrobalt/docker/linux/files/sccache b/chrobalt/docker/linux/files/sccache new file mode 100755 index 00000000000000..51521d5b88c838 Binary files /dev/null and b/chrobalt/docker/linux/files/sccache differ diff --git a/chrobalt/linux/args.gn b/chrobalt/linux/args.gn new file mode 100644 index 00000000000000..721348019cfec6 --- /dev/null +++ b/chrobalt/linux/args.gn @@ -0,0 +1,83 @@ +# Use sccache +cc_wrapper = "sccache" + +enable_nacl=false +use_dawn=false +skia_use_dawn=false +# enable_vulkan=false + + +# Enterprise stuff +enable_print_content_analysis=false +# enterprise_client_certificates=false +enterprise_cloud_content_analysis=false +enterprise_content_analysis=false +enterprise_data_controls=false +enterprise_local_content_analysis=false +enterprise_watermark=false + + +optional_trace_events_enabled = false +enable_plugins = false +enable_pdf = false +enable_printing = false +enable_cast_receiver = false +enable_paint_preview=false +cast_streaming_enable_remoting = false + +# +enable_background_contents=false +enable_background_mode=false +enable_chrome_notifications=false +enable_click_to_call=false + +enable_system_notifications=false + +enable_webui_certificate_viewer=false + +enable_hidpi = false + +enable_library_cdms = false + +# disable swiftshader +enable_swiftshader = false +enable_swiftshader_vulkan = false +swiftshader_optimized_debug_build=false +use_swiftshader_with_subzero=false +angle_enable_swiftshader=false +dawn_use_swiftshader=false + + +# Disable webnn neural networks | tflite +webnn_use_tflite = false +enable_maldoca = false +enable_reading_list = false +enable_remoting = false +enable_screen_ai_service = false + +# linux system interfaces +use_bluez = false +use_cups = false +use_dbus = false +use_mpris = false +use_udev = false +use_qt = false +use_qt6 = false + +# printing stuff +enable_oop_basic_print_dialog = false +enable_basic_print_dialog = false +enable_basic_printing = false +enable_oop_basic_print_dialog = false +enable_oop_printing = false +enable_oop_printing_no_oop_basic_print_dialog=false +enable_print_content_analysis= false +enable_print_preview=false +enable_printing=false +enable_printing_tests=false +use_cups=false + +enable_vr=false + +# Network stuff +use_kerberos=false diff --git a/chrobalt/linux/last_working.gn b/chrobalt/linux/last_working.gn new file mode 100644 index 00000000000000..921ca1e18545d8 --- /dev/null +++ b/chrobalt/linux/last_working.gn @@ -0,0 +1,73 @@ +# Last tested args.gn setup for Linux + +cc_wrapper = "sccache" +enable_nacl=false +use_dawn=false +skia_use_dawn=false +# enable_vulkan=false + + +enable_print_content_analysis=false +# enterprise_client_certificates=false +enterprise_cloud_content_analysis=false +enterprise_content_analysis=false +enterprise_data_controls=false +enterprise_local_content_analysis=false +enterprise_watermark=false + + +optional_trace_events_enabled = false +enable_plugins = false +enable_pdf = false +enable_printing = false +enable_cast_receiver = false +enable_paint_preview=false +cast_streaming_enable_remoting = false + +# +enable_background_contents=false +enable_background_mode=false +enable_chrome_notifications=false +enable_click_to_call=false + +enable_system_notifications=false + +enable_webui_certificate_viewer=false + +enable_hidpi = false + +enable_library_cdms = false + +# swiftshader disable +enable_swiftshader = false +enable_swiftshader_vulkan = false +swiftshader_optimized_debug_build=false +use_swiftshader_with_subzero=false +angle_enable_swiftshader=false +dawn_use_swiftshader=false + + +# Disable webnn | tflite +webnn_use_tflite = false +enable_maldoca = false +enable_oop_printing = false +enable_reading_list = false +enable_remoting = false +enable_screen_ai_service = false + +use_bluez = false +use_cups = false +use_dbus = false +use_mpris = false + +enable_oop_basic_print_dialog = false +enable_basic_print_dialog = false +enable_basic_printing = false +enable_oop_basic_print_dialog = false +enable_oop_printing = false +enable_oop_printing_no_oop_basic_print_dialog=false +enable_print_content_analysis= false +enable_print_preview=false +enable_printing=false +enable_printing_tests=false +use_cups=false diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 00000000000000..cde85959ccf2f2 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,11 @@ +version: '3.8' +services: + linux: + build: + context: chrobalt/docker/linux + cache_from: + - ghcr.io/youtube/cobalt_sandbox/linux:latest + image: ghcr.io/youtube/cobalt_sandbox/linux:latest + platform: linux/amd64 + environment: + - DEPOT_TOOLS_UPDATE=0