diff --git a/.circleci/config.yml b/.circleci/config.yml
index 3ebbf3c1e5d5..89d7e99d12fd 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -106,7 +106,7 @@ references:
neo4j_conf_file="/etc/neo4j/neo4j.conf"
sudo echo "dbms.security.procedures.unrestricted=apoc.*" >> $neo4j_conf_file
sudo echo "dbms.security.procedures.allowlist=apoc.*" >> $neo4j_conf_file
- sudo echo "dbms.memory.transaction.total.max=600m" >> $neo4j_conf_file
+ sudo echo "dbms.memory.transaction.total.max=2000m" >> $neo4j_conf_file
apoc_conf_file="/etc/neo4j/apoc.conf"
sudo echo "apoc.export.file.enabled=true" > $apoc_conf_file
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 4be5b9e92957..c4dba753fe2c 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -82,18 +82,19 @@
.gitlab/ci/* @yucohen
.gitlab/* @yucohen
.gitlab-ci.yml @yucohen
-/Tests/scripts/wait_in_line_for_cloud_env.sh @daryakoval
+/Tests/scripts/wait_in_line_for_cloud_env.sh @yucohen
.gitlab/ci/.gitlab-ci.staging.yml @ilaner
-/Tests/scripts/uninstall_packs_and_reset_bucket_cloud.sh @daryakoval
-/Tests/Marketplace/search_and_uninstall_pack.py @daryakoval
-/Tests/scripts/install_content_and_test_integrations.sh @daryakoval
-/Tests/configure_and_test_integration_instances.py @daryakoval
-/Tests/scripts/print_cloud_machine_details.sh @daryakoval
-/Tests/scripts/run_tests.sh @daryakoval
-/Tests/scripts/download_demisto_conf.sh @daryakoval
-Tests/scripts/test_modeling_rules.sh @daryakoval
-Tests/scripts/lock_cloud_machines.py @daryakoval
+/Tests/scripts/uninstall_packs_and_reset_bucket_cloud.sh @yucohen
+/Tests/Marketplace/search_and_uninstall_pack.py @yucohen
+/Tests/scripts/install_content_and_test_integrations.sh @yucohen
+/Tests/configure_and_test_integration_instances.py @yucohen
+/Tests/scripts/print_cloud_machine_details.sh @yucohen
+/Tests/scripts/run_tests.sh @yucohen
+/Tests/scripts/download_demisto_conf.sh @yucohen
+Tests/scripts/test_modeling_rules.sh @AradCarmi
+Tests/scripts/lock_cloud_machines.py @yucohen
Tests/Marketplace/server_content_items.json @dantavori
+validation_config.toml @YuvHayun @JudahSchwartz @GuyAfik @anara123
# SDK Related
.gitlab/ci/.gitlab-ci.sdk-nightly.yml @dorschw
@@ -113,7 +114,7 @@ poetry.lock @ilaner @dorschw
.devcontainer/* @ilaner
# Demisto Class
-Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.py @daryakoval
+Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.py @dantavori
# TIM Related
/Packs/TAXIIServer/Integrations/* @MLainer1
@@ -139,4 +140,4 @@ Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.py @daryako
/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/ @jlevypaloalto
/Packs/Base/Scripts/DBotSuggestClassifierMapping/ @jlevypaloalto
/Packs/Base/Scripts/GetMLModelEvaluation/ @jlevypaloalto
-/Packs/Base/Scripts/DBotMLFetchData/ @jlevypaloalto
\ No newline at end of file
+/Packs/Base/Scripts/DBotMLFetchData/ @jlevypaloalto
diff --git a/.github/workflows/check-nightly-ok-label.yml b/.github/workflows/check-nightly-ok-label.yml
new file mode 100644
index 000000000000..f2a52c95c594
--- /dev/null
+++ b/.github/workflows/check-nightly-ok-label.yml
@@ -0,0 +1,51 @@
+name: Check nightly-ok label
+
+on:
+ pull_request:
+ types: [opened, synchronize, labeled, unlabeled]
+
+jobs:
+ check_label:
+ runs-on: ubuntu-latest
+ if: github.repository == 'demisto/content' && github.event.pull_request.head.repo.fork == false
+
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Check if files under .gitlab directory are changed
+ id: check-changes
+ run: |
+ CHANGED_FILES=$(git diff --name-only origin/master origin/${{ github.head_ref || github.ref_name }})
+ echo "All changed files:"
+ echo "${CHANGED_FILES}"
+ GITLAB_CHANGED_FILES=$( [[ $CHANGED_FILES == *".gitlab/ci"* ]] && echo true || echo false)
+ echo "Files in the .gitlab folder have changed: ${GITLAB_CHANGED_FILES}"
+ echo "gitlab_changed_files=$GITLAB_CHANGED_FILES" >> $GITHUB_OUTPUT
+ if [[ $GITLAB_CHANGED_FILES == true ]]; then
+ echo 'Files under .gitlab folder has changed, Will check if the PR has the `nightly-ok` label.'
+ else
+ echo 'Files in the .gitlab folder have not been changed.'
+ fi
+
+ - name: Check if PR has the nightly-ok label
+ uses: actions/github-script@v7
+ id: check-label
+ with:
+ script: |
+ const gitlabChangedFiles = ${{ steps.check-changes.outputs.gitlab_changed_files }};
+ if(gitlabChangedFiles) {
+ console.log('Files under .gitlab folder has changed, Will check if the PR has the `nightly-ok` label.');
+ const labels = context.payload.pull_request.labels.map(label => label.name);
+ const hasLabel = labels.includes('nightly-ok');
+ if (hasLabel) {
+ console.log('All good, the PR has the `nightly-ok` label.');
+ } else {
+ console.log('PR does not have the `nightly-ok` label. It is required when changing files under the `.gitlab` directory. Please run nightly using the Utils/gitlab_triggers/trigger_content_nightly_build.sh script, check that succeeded, and add the `nightly-ok` label');
+ process.exit(1); // Exit with failure status if label is missing
+ }
+ } else {
+ console.log('Files in the .gitlab folder have not been changed.');
+ }
diff --git a/.github/workflows/create-internal-pr-from-external.yml b/.github/workflows/create-internal-pr-from-external.yml
index 65c0847544f0..dfc8f072d0ea 100644
--- a/.github/workflows/create-internal-pr-from-external.yml
+++ b/.github/workflows/create-internal-pr-from-external.yml
@@ -19,11 +19,11 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Python
- uses: actions/setup-python@v3
+ uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Setup Poetry
- uses: Gr1N/setup-poetry@v8
+ uses: Gr1N/setup-poetry@v9
- name: Print Context
run: |
echo "$GITHUB_CONTEXT"
diff --git a/.github/workflows/handle-new-external-pr.yml b/.github/workflows/handle-new-external-pr.yml
index 50ad5129c5c4..63cab0acc72e 100644
--- a/.github/workflows/handle-new-external-pr.yml
+++ b/.github/workflows/handle-new-external-pr.yml
@@ -17,11 +17,11 @@ jobs:
fetch-depth: 2
- name: Setup Python
- uses: actions/setup-python@v3
+ uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Setup Poetry
- uses: Gr1N/setup-poetry@v8
+ uses: Gr1N/setup-poetry@v9
- name: Print Context
run: |
echo "$GITHUB_CONTEXT"
diff --git a/.github/workflows/pre-commit-reuse.yml b/.github/workflows/pre-commit-reuse.yml
index 954133224066..c8375e31a4b6 100644
--- a/.github/workflows/pre-commit-reuse.yml
+++ b/.github/workflows/pre-commit-reuse.yml
@@ -19,6 +19,9 @@ jobs:
with:
fetch-depth: 0
+ - name: Set PYTHONPATH
+ run: echo "PYTHONPATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
+
- name: Setup python
uses: actions/setup-python@v4
with:
@@ -45,13 +48,44 @@ jobs:
- name: "Check coverage.xml exists"
if: always()
- id: check_files
+ id: check-coverage-xml-exists
uses: andstor/file-existence-action@v2
with:
files: "coverage_report/coverage.xml"
+ - name: "Check pytest report exists"
+ if: always()
+ id: check-pytest-junit-exists
+ uses: andstor/file-existence-action@v2
+ with:
+ files: ".report_pytest.xml"
+
+ - name: Create pack-wise pytest report
+ run: poetry run python Utils/github_workflow_scripts/parse_junit_per_pack.py
+ if: |
+ always() &&
+ steps.check-pytest-junit-exists.outputs.files_exists == 'true' &&
+ github.event.pull_request.head.repo.fork == false
+
+ - name: Upload junit & pack-wise pytest report
+ uses: actions/upload-artifact@v4
+ if: |
+ always() &&
+ steps.check-pytest-junit-exists.outputs.files_exists == 'true' &&
+ github.event.pull_request.head.repo.fork == false
+ with:
+ name: pytest
+ path: |
+ packwise_pytest_time.csv
+ .report_pytest.xml
+ if-no-files-found: error
+
- name: Pytest coverage comment
- if: always() && steps.check_files.outputs.files_exists == 'true' && github.event.pull_request.head.repo.fork == false
+ if: |
+ always() &&
+ steps.check-coverage-xml-exists.outputs.files_exists == 'true' &&
+ steps.check-pytest-junit-exists.outputs.files_exists == false &&
+ github.event.pull_request.head.repo.fork == false
uses: MishaKav/pytest-coverage-comment@main
with:
pytest-xml-coverage-path: coverage_report/coverage.xml
diff --git a/.github/workflows/trigger-contribution-build.yml b/.github/workflows/trigger-contribution-build.yml
index b51464014b9f..018ff010b997 100644
--- a/.github/workflows/trigger-contribution-build.yml
+++ b/.github/workflows/trigger-contribution-build.yml
@@ -24,7 +24,7 @@ jobs:
PR_NUMBER: ${{ github.event.pull_request.number }}
BASE_BRANCH: ${{ github.event.pull_request.base.ref }}
CONTRIB_BRANCH: ${{ github.event.pull_request.head.label }}
- CONTRIB_REPO: ${{ github.event.repository.name }}
+ CONTRIB_REPO: ${{ github.event.pull_request.head.repo.name }}
USERNAME: ${{ secrets.SECRET_CHECK_USER_NG }}
PASSWORD: ${{ secrets.SECRET_CHECK_PASS_NG }}
GOLD_SERVER_URL: ${{ secrets.GOLD_SERVER_URL_NG }}
diff --git a/.github/workflows/update-demisto-sdk-version.yml b/.github/workflows/update-demisto-sdk-version.yml
index 8dfd8e845052..1d07304f6b29 100644
--- a/.github/workflows/update-demisto-sdk-version.yml
+++ b/.github/workflows/update-demisto-sdk-version.yml
@@ -54,6 +54,7 @@ jobs:
run: |
poetry add --group dev demisto-sdk@${{inputs.release_version}}
poetry lock --no-update
+ git add .
source .venv/bin/activate
demisto-sdk pre-commit --mode=ci
git add .
diff --git a/.gitlab/ci/.gitlab-ci.bucket-upload.yml b/.gitlab/ci/.gitlab-ci.bucket-upload.yml
index b865b571ccdb..daf520d9ea16 100644
--- a/.gitlab/ci/.gitlab-ci.bucket-upload.yml
+++ b/.gitlab/ci/.gitlab-ci.bucket-upload.yml
@@ -54,6 +54,11 @@ run-validations-upload-flow:
- .run-validations
- .bucket-upload-rule
+run-validations-upload-flow-new-validate-flow:
+ extends:
+ - .run-validations-new-validate-flow
+ - .bucket-upload-rule
+ allow_failure: true
run-unittests-and-lint-upload-flow:
cache:
diff --git a/.gitlab/ci/.gitlab-ci.global.yml b/.gitlab/ci/.gitlab-ci.global.yml
index c7df44c12e4a..8b53bce409c5 100644
--- a/.gitlab/ci/.gitlab-ci.global.yml
+++ b/.gitlab/ci/.gitlab-ci.global.yml
@@ -201,7 +201,7 @@
.get_last_upload_commit: &get_last_upload_commit
- section_start "Getting last bucket upload commit" --collapsed
- gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1
- - gsutil cp "gs://$GCS_MARKET_BUCKET/content/packs/index.json" "${ARTIFACTS_FOLDER_SERVER_TYPE}/previous_index.json"
+ - gsutil cp "gs://$GCS_PRODUCTION_BUCKET/content/packs/index.json" "${ARTIFACTS_FOLDER_SERVER_TYPE}/previous_index.json"
- export LAST_UPLOAD_COMMIT=$(cat "${ARTIFACTS_FOLDER_SERVER_TYPE}/previous_index.json" | jq -r ".\"commit\"")
- section_end "Getting last bucket upload commit"
@@ -501,8 +501,6 @@
stage: unittests-and-validations
extends:
- .default-job-settings
- variables:
- KUBERNETES_CPU_REQUEST: 1000m
artifacts:
expire_in: 30 days
paths:
@@ -545,6 +543,25 @@
- !reference [ .validate_content_test_conf_branch_merged ] # This section should be the last one in the script, do not move it.
- job-done
+.run-validations-new-validate-flow:
+ stage: unittests-and-validations
+ extends:
+ - .default-job-settings
+ artifacts:
+ expire_in: 30 days
+ paths:
+ - ${CI_PROJECT_DIR}/artifacts/*
+ - ${CI_PROJECT_DIR}/pipeline_jobs_folder/*
+ when: always
+ script:
+ - section_start "Validate Files and Yaml"
+ - |
+ ./Tests/scripts/linters_runner.sh
+ ./Tests/scripts/new_validate.sh
+ - section_end "Validate Files and Yaml"
+ - !reference [ .validate_content_test_conf_branch_merged ] # This section should be the last one in the script, do not move it.
+ - job-done
+
.jobs-done-check:
stage: are-jobs-really-done
extends:
diff --git a/.gitlab/ci/.gitlab-ci.on-push.yml b/.gitlab/ci/.gitlab-ci.on-push.yml
index 791fd1599d06..bdb26f78d67b 100644
--- a/.gitlab/ci/.gitlab-ci.on-push.yml
+++ b/.gitlab/ci/.gitlab-ci.on-push.yml
@@ -107,6 +107,16 @@ run-validations:
- if: '$NIGHTLY'
- if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/'
+# runs in gitlab for the on-push flow (except for contributors)
+run-validations-new-validate-flow:
+ extends:
+ - .run-validations-new-validate-flow
+ rules:
+ - if: '$NIGHTLY'
+ - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/'
+ allow_failure: true
+
+
# runs in gitlab for the on-push flow, on every new commit pushed to the branch.
validate-content-conf:
tags:
@@ -132,7 +142,6 @@ validate-content-conf:
cache:
policy: pull-push
variables:
- KUBERNETES_CPU_REQUEST: 2000m
EXTRACT_PRIVATE_TESTDATA: "true"
stage: prepare-testing-bucket
script:
@@ -140,6 +149,11 @@ validate-content-conf:
- !reference [.create-release-notes-and-common-docs]
- !reference [.secrets-fetch]
- section_start "Create or update content graph" --collapsed
+ - |
+ echo "set DEMISTO_SDK_GRAPH_FORCE_CREATE to true to create graph from scratch"
+ export DEMISTO_SDK_GRAPH_FORCE_CREATE=true
+ echo "DEMISTO_SDK_GRAPH_FORCE_CREATE was set to true to create graph from scratch"
+ echo $DEMISTO_SDK_GRAPH_FORCE_CREATE
- echo "Staging the repo to include the private packs in the graph"
- git add Packs
- echo "Updating the content graph"
@@ -210,6 +224,10 @@ validate-content-conf:
- ./Tests/scripts/prepare_content_packs_for_testing.sh "$MARKETPLACE_BUCKET" "$STORAGE_BASE_PATH" "$MARKETPLACE_VERSION"
- section_end "Prepare Content Packs for Testing"
+ - section_start "Override and upload core packs versions"
+ - ./Tests/Marketplace/upload_versions_core_files.sh "$MARKETPLACE_BUCKET" "$STORAGE_BASE_PATH" "$MARKETPLACE_VERSION" "$LAST_UPLOAD_COMMIT"
+ - section_end "Override and upload core packs versions"
+
- section_start "Create Instances for XSOAR"
- |
if [[ ${MARKETPLACE_VERSION} = "xsoar" ]]; then
@@ -376,9 +394,6 @@ tests_xsoar_server:
- !reference [.filter-non-nightly-docker-updates-rule, rules]
- if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/'
- if: '$NIGHTLY'
- when: always
- variables:
- KUBERNETES_CPU_REQUEST: 2000m
parallel:
matrix:
- INSTANCE_ROLE:
diff --git a/.gitlab/ci/.gitlab-ci.sdk-nightly.yml b/.gitlab/ci/.gitlab-ci.sdk-nightly.yml
index 263e70e179e1..e242289163e1 100644
--- a/.gitlab/ci/.gitlab-ci.sdk-nightly.yml
+++ b/.gitlab/ci/.gitlab-ci.sdk-nightly.yml
@@ -96,6 +96,12 @@ demisto-sdk-nightly:run-validations:
- .run-validations
- .sdk-nightly-schedule-rule
+demisto-sdk-nightly:run-validations-new-validate-flow:
+ extends:
+ - .run-validations-new-validate-flow
+ - .sdk-nightly-schedule-rule
+ allow_failure: true
+
demisto_sdk_nightly:check_idset_dependent_commands:
tags:
- gke
@@ -108,7 +114,6 @@ demisto_sdk_nightly:check_idset_dependent_commands:
variables: true
variables:
IS_NIGHTLY: "false"
- KUBERNETES_CPU_REQUEST: 1000m
PRODUCT_TYPE: "XSOAR"
SERVER_TYPE: "XSOAR"
INSTANCE_ROLE: "Server Master"
@@ -165,7 +170,6 @@ demisto-sdk-nightly:xsoar-prepare-testing-bucket:
ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_XSOAR}/instance_${INSTANCE_ROLE}"
ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}"
IFRA_ENV_TYPE: "Server Master"
- KUBERNETES_CPU_REQUEST: 2000m
MARKETPLACE_VERSION: "xsoar"
MARKETPLACE_BUCKET: "$GCS_MARKET_BUCKET"
cache:
@@ -173,7 +177,6 @@ demisto-sdk-nightly:xsoar-prepare-testing-bucket:
needs: []
stage: prepare-testing-bucket
script:
- - unset DEMISTO_SDK_GRAPH_FORCE_CREATE
- !reference [.generic-prepare-testing-bucket, script]
- job-done
@@ -194,13 +197,11 @@ demisto-sdk-nightly:mpv2-prepare-testing-bucket:
MARKETPLACE_BUCKET: "$GCS_MARKET_V2_BUCKET"
PRODUCT_NAME: "Cortex XSIAM"
IFRA_ENV_TYPE: "Server Master"
- KUBERNETES_CPU_REQUEST: 2000m
cache:
policy: pull-push
needs: []
stage: prepare-testing-bucket
script:
- - unset DEMISTO_SDK_GRAPH_FORCE_CREATE
- !reference [.generic-prepare-testing-bucket, script]
- job-done
@@ -219,13 +220,11 @@ demisto-sdk-nightly:xpanse-prepare-testing-bucket:
PRODUCT_NAME: "Cortex XPANSE"
MARKETPLACE_BUCKET: "$GCS_MARKET_XPANSE_BUCKET"
IFRA_ENV_TYPE: "Server Master"
- KUBERNETES_CPU_REQUEST: 2000m
cache:
policy: pull-push
needs: []
stage: prepare-testing-bucket
script:
- - unset DEMISTO_SDK_GRAPH_FORCE_CREATE
- !reference [.generic-prepare-testing-bucket, script]
- job-done
@@ -237,7 +236,6 @@ demisto-sdk-nightly:xsoar-saas-prepare-testing-bucket:
- .sdk-nightly-schedule-rule
variables:
IFRA_ENV_TYPE: "Server Master"
- KUBERNETES_CPU_REQUEST: 2000m
PRODUCT_TYPE: "XSOAR"
SERVER_TYPE: "XSOAR SAAS"
ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}"
@@ -249,7 +247,6 @@ demisto-sdk-nightly:xsoar-saas-prepare-testing-bucket:
needs: []
stage: prepare-testing-bucket
script:
- - unset DEMISTO_SDK_GRAPH_FORCE_CREATE
- !reference [.generic-prepare-testing-bucket, script]
- job-done
diff --git a/.gitlab/ci/.gitlab-ci.variables.yml b/.gitlab/ci/.gitlab-ci.variables.yml
index 9f5d98af61b5..a4af7160ca66 100644
--- a/.gitlab/ci/.gitlab-ci.variables.yml
+++ b/.gitlab/ci/.gitlab-ci.variables.yml
@@ -38,7 +38,6 @@ variables:
OVERRIDE_ALL_PACKS: "false"
TEST_UPLOAD: "true"
NATIVE_CANDIDATE_IMAGE: "latest"
- DEMISTO_SDK_GRAPH_FORCE_CREATE: "true" # change this when the demisto-sdk update-graph command is stable
DEMISTO_SDK_LOG_FILE_PATH: "${ARTIFACTS_FOLDER}/logs"
CONTENT_GITLAB_CI: "true"
POETRY_VIRTUALENVS_OPTIONS_ALWAYS_COPY: "true"
@@ -46,3 +45,5 @@ variables:
POETRY_VERSION: "1.6.1"
INSTALL_POETRY: "true"
DOCKER_IO: "docker.io" # defined in the project level CI/CD variables
+ KUBERNETES_CPU_REQUEST: 2
+ KUBERNETES_MEMORY_REQUEST: 2Gi
diff --git a/.pre-commit-config_template.yaml b/.pre-commit-config_template.yaml
index 711b2748d49b..f6218f3e3c4f 100644
--- a/.pre-commit-config_template.yaml
+++ b/.pre-commit-config_template.yaml
@@ -58,6 +58,14 @@ repos:
- community
- repo: local
hooks:
+ - id: xsoar-lint
+ name: xsoar-lint
+ description: Run xsoar-linter on the code in content packs
+ entry: demisto-sdk xsoar-lint
+ files: ^Packs\/.*\.py$
+ exclude: _test\.py|\.vulture_whitelist\.py|test_data|tests_data|TestData
+ require_serial: true
+ language: system
- id: pylint-in-docker
name: pylint-in-docker
description: Run pylint on the code in content packs
@@ -204,8 +212,8 @@ repos:
pass_filenames: false
needs:
- pytest-in-docker
- - id: coverage-analyze
- name: coverage-analyze
+ - id: coverage-pytest-analyze
+ name: coverage-pytest-analyze
entry: demisto-sdk coverage-analyze
description: Running demisto-sdk coverage-analyze and showing a coverage report.
language: system
@@ -289,7 +297,7 @@ repos:
- decorator==5.1.1 ; python_version >= "3.8" and python_version < "3.11"
- defusedxml==0.7.1 ; python_version >= "3.8" and python_version < "3.11"
- demisto-py==3.2.13 ; python_version >= "3.8" and python_version < "3.11"
- - demisto-sdk==1.26.2 ; python_version >= "3.8" and python_version < "3.11"
+ - demisto-sdk==1.27.4 ; python_version >= "3.8" and python_version < "3.11"
- dictdiffer==0.9.0 ; python_version >= "3.8" and python_version < "3.11"
- dictor==0.1.12 ; python_version >= "3.8" and python_version < "3.11"
- distlib==0.3.7 ; python_version >= "3.8" and python_version < "3.11"
diff --git a/Packs/AHA/Integrations/AHA/AHA.yml b/Packs/AHA/Integrations/AHA/AHA.yml
index 5de0da5cc2eb..8a168b7dfea0 100644
--- a/Packs/AHA/Integrations/AHA/AHA.yml
+++ b/Packs/AHA/Integrations/AHA/AHA.yml
@@ -171,7 +171,7 @@ script:
script: "-"
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
fromversion: 6.5.0
tests:
- No tests (auto formatted)
diff --git a/Packs/AHA/ReleaseNotes/1_0_26.md b/Packs/AHA/ReleaseNotes/1_0_26.md
new file mode 100644
index 000000000000..e93cd27725e0
--- /dev/null
+++ b/Packs/AHA/ReleaseNotes/1_0_26.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Aha
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/AHA/TestPlaybooks/AHA_TestPlaybook.yml b/Packs/AHA/TestPlaybooks/AHA_TestPlaybook.yml
index 36a17a78c3c2..19ab66f05827 100644
--- a/Packs/AHA/TestPlaybooks/AHA_TestPlaybook.yml
+++ b/Packs/AHA/TestPlaybooks/AHA_TestPlaybook.yml
@@ -1,7 +1,7 @@
-id: 73d65261-a8d2-45d0-8a62-000edfdace6b
+id: AHA_TestPlaybook
version: 16
vcShouldKeepItemLegacyProdMachine: false
-name: AHA
+name: AHA_TestPlaybook
starttaskid: "0"
tasks:
"0":
diff --git a/Packs/AHA/pack_metadata.json b/Packs/AHA/pack_metadata.json
index e6b6324cfef5..1eb4c2036297 100644
--- a/Packs/AHA/pack_metadata.json
+++ b/Packs/AHA/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AHA",
"description": "Use the Aha! integration to edit name/title description and status of features in Aha! according to their status in Jira",
"support": "xsoar",
- "currentVersion": "1.0.25",
+ "currentVersion": "1.0.26",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AMP/Integrations/AMPv2/AMPv2.yml b/Packs/AMP/Integrations/AMPv2/AMPv2.yml
index 87d1f1091592..eb010f640a01 100644
--- a/Packs/AMP/Integrations/AMPv2/AMPv2.yml
+++ b/Packs/AMP/Integrations/AMPv2/AMPv2.yml
@@ -1753,7 +1753,7 @@ script:
- contextPath: DBotScore.Score
description: The actual score.
type: Number
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/AMP/Integrations/CiscoAMPEventCollector/CiscoAMPEventCollector.yml b/Packs/AMP/Integrations/CiscoAMPEventCollector/CiscoAMPEventCollector.yml
index d0cdc0643334..363f6ea37861 100644
--- a/Packs/AMP/Integrations/CiscoAMPEventCollector/CiscoAMPEventCollector.yml
+++ b/Packs/AMP/Integrations/CiscoAMPEventCollector/CiscoAMPEventCollector.yml
@@ -65,7 +65,7 @@ script:
description: Gets events from Cisco AMP.
execution: false
name: cisco-amp-get-events
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
isfetchevents: true
runonce: false
script: '-'
diff --git a/Packs/AMP/ReleaseNotes/2_1_2.md b/Packs/AMP/ReleaseNotes/2_1_2.md
new file mode 100644
index 000000000000..71b54c323cb4
--- /dev/null
+++ b/Packs/AMP/ReleaseNotes/2_1_2.md
@@ -0,0 +1,5 @@
+#### Integrations
+##### Cisco AMP v2
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
+##### Cisco AMP Event Collector
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/AMP/pack_metadata.json b/Packs/AMP/pack_metadata.json
index 7b8afa3d6db2..3394c125e84e 100644
--- a/Packs/AMP/pack_metadata.json
+++ b/Packs/AMP/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cisco AMP",
"description": "Uses CISCO AMP Endpoint",
"support": "xsoar",
- "currentVersion": "2.1.1",
+ "currentVersion": "2.1.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-AccessAnalyzer/Integrations/AWS-AccessAnalyzer/AWS-AccessAnalyzer.yml b/Packs/AWS-AccessAnalyzer/Integrations/AWS-AccessAnalyzer/AWS-AccessAnalyzer.yml
index 1b305b17f041..5f18c8c282c5 100755
--- a/Packs/AWS-AccessAnalyzer/Integrations/AWS-AccessAnalyzer/AWS-AccessAnalyzer.yml
+++ b/Packs/AWS-AccessAnalyzer/Integrations/AWS-AccessAnalyzer/AWS-AccessAnalyzer.yml
@@ -301,7 +301,7 @@ script:
name: roleSessionDuration
description: Updates findings with the new values provided in the request.
name: aws-access-analyzer-update-findings
- dockerimage: demisto/boto3py3:1.0.0.87582
+ dockerimage: demisto/boto3py3:1.0.0.88855
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/AWS-AccessAnalyzer/ReleaseNotes/1_1_29.md b/Packs/AWS-AccessAnalyzer/ReleaseNotes/1_1_29.md
new file mode 100644
index 000000000000..a1a9dc7af0ee
--- /dev/null
+++ b/Packs/AWS-AccessAnalyzer/ReleaseNotes/1_1_29.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### AWS - AccessAnalyzer
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
diff --git a/Packs/AWS-AccessAnalyzer/pack_metadata.json b/Packs/AWS-AccessAnalyzer/pack_metadata.json
index c5a3df65fc69..b032aa31693f 100644
--- a/Packs/AWS-AccessAnalyzer/pack_metadata.json
+++ b/Packs/AWS-AccessAnalyzer/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - AccessAnalyzer",
"description": "Amazon Web Services IAM Access Analyzer",
"support": "xsoar",
- "currentVersion": "1.1.28",
+ "currentVersion": "1.1.29",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail.py b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail.py
index 322f55a157b7..b16f45fea552 100644
--- a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail.py
+++ b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail.py
@@ -1,6 +1,7 @@
-import demistomock as demisto
-from CommonServerPython import *
-from CommonServerUserPython import *
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+
import boto3
from botocore.config import Config
from botocore.parsers import ResponseParserError
@@ -104,8 +105,8 @@ def aws_session(service='cloudtrail', region=None, roleArn=None, roleSessionName
def handle_returning_date_to_string(date_obj: datetime | str) -> str:
"""Gets date object to string"""
- # if the returning date is a string leave it as is.
- if isinstance(date_obj, str):
+ # if the returning date is a string or None, leave it as is.
+ if date_obj is None or isinstance(date_obj, str):
return date_obj
# if event time is datetime object - convert it to string.
@@ -238,6 +239,40 @@ def describe_trails(args: dict) -> CommandResults:
)
+def get_trail_status(args: dict) -> CommandResults:
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ kwargs = {'Name': args.get('name')}
+
+ response = client.get_trail_status(**kwargs)
+
+ data = {
+ 'IsLogging': response.get('IsLogging'),
+ 'LatestDeliveryTime': handle_returning_date_to_string(response.get('LatestDeliveryTime')),
+ 'LatestCloudWatchLogsDeliveryError': response.get('LatestCloudWatchLogsDeliveryError'),
+ 'LatestDeliveryErrorDetails': response.get('LatestDeliveryErrorDetails'),
+ 'LatestNotificationError': response.get('LatestNotificationError'),
+ 'LatestNotificationTime': handle_returning_date_to_string(response.get('LatestNotificationTime')),
+ 'StartLoggingTime': handle_returning_date_to_string(response.get('StartLoggingTime')),
+ 'StopLoggingTime': handle_returning_date_to_string(response.get('StopLoggingTime')),
+ 'LatestCloudWatchLogsDeliveryTime': handle_returning_date_to_string(response.get('LatestCloudWatchLogsDeliveryTime')),
+ 'LatestDigestDeliveryTime': handle_returning_date_to_string(response.get('LatestDigestDeliveryTime')),
+ 'LatestDigestDeliveryError': response.get('LatestDigestDeliveryError')
+ }
+
+ return CommandResults(
+ outputs_prefix="AWS.CloudTrail.TrailStatus",
+ outputs_key_field="Name",
+ outputs=data,
+ readable_output=tableToMarkdown('AWS CloudTrail TrailStatus', data),
+ )
+
+
def update_trail(args: dict) -> CommandResults:
client = aws_session(
region=args.get('region'),
@@ -409,6 +444,8 @@ def main():
return_results(stop_logging(args))
if command == 'aws-cloudtrail-lookup-events':
return_results(lookup_events(args))
+ if command == 'aws-cloudtrail-get-trail-status':
+ return_results(get_trail_status(args))
except Exception as e:
err = "Error has occurred in the AWS CloudTrail Integration."
diff --git a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail.yml b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail.yml
index 898dc1bc0ead..dde5a179614c 100644
--- a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail.yml
+++ b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail.yml
@@ -371,9 +371,60 @@ script:
- contextPath: AWS.CloudTrail.Events.CloudTrailEvent
description: A JSON string that contains a representation of the event returned.
type: string
- dockerimage: demisto/boto3py3:1.0.0.86958
+ - arguments:
+ - description: Specifies the names of multiple trails.
+ name: trailNameList
+ - description: Specifies the region of the trail.
+ name: region
+ required: true
+ - description: The The Amazon Resource Name (ARN) of the role to assume.
+ name: roleArn
+ - description: An identifier for the assumed role session.
+ name: roleSessionName
+ - description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role.
+ name: roleSessionDuration
+ - description: Specifies the name of the trail.
+ name: name
+ required: true
+ description: Returns a JSON-formatted list of information about the specified trail. Fields include information on delivery errors, Amazon SNS and Amazon S3 errors, and start and stop logging times for each trail.
+ name: aws-cloudtrail-get-trail-status
+ outputs:
+ - contextPath: AWS.CloudTrail.TrailStatus.IsLogging
+ description: Whether the CloudTrail trail is currently logging Amazon Web Services API calls.
+ type: boolean
+ - contextPath: AWS.CloudTrail.TrailStatus.LatestDeliveryError
+ description: Displays any Amazon S3 error that CloudTrail encountered when attempting to deliver log files to the designated bucket.
+ type: string
+ - contextPath: AWS.CloudTrail.TrailStatus.LatestNotificationError
+ description: Displays any Amazon SNS error that CloudTrail encountered when attempting to send a notification.
+ type: string
+ - contextPath: AWS.CloudTrail.TrailStatus.LatestDeliveryTime
+ description: Specifies the date and time that CloudTrail last delivered log files to an account’s Amazon S3 bucket.
+ type: date
+ - contextPath: AWS.CloudTrail.TrailStatus.LatestNotificationTime
+ description: Specifies the date and time of the most recent Amazon SNS notification that CloudTrail has written a new log file to an account’s Amazon S3 bucket.
+ type: date
+ - contextPath: AWS.CloudTrail.TrailStatus.StartLoggingTime
+ description: Specifies the most recent date and time when CloudTrail started recording API calls for an Amazon Web Services account.
+ type: date
+ - contextPath: AWS.CloudTrail.TrailStatus.StopLoggingTime
+ description: Specifies the most recent date and time when CloudTrail stopped recording API calls for an Amazon Web Services account.
+ type: date
+ - contextPath: AWS.CloudTrail.TrailStatus.LatestCloudWatchLogsDeliveryError
+ description: Displays any CloudWatch Logs error that CloudTrail encountered when attempting to deliver logs to CloudWatch Logs.
+ type: string
+ - contextPath: AWS.CloudTrail.TrailStatus.LatestCloudWatchLogsDeliveryTime
+ description: Displays the most recent date and time when CloudTrail delivered logs to CloudWatch Logs.
+ type: date
+ - contextPath: AWS.CloudTrail.TrailStatus.LatestDigestDeliveryTime
+ description: Specifies the date and time that CloudTrail last delivered a digest file to an account’s Amazon S3 bucket.
+ type: date
+ - contextPath: AWS.CloudTrail.TrailStatus.LatestDigestDeliveryError
+ description: Displays any Amazon S3 error that CloudTrail encountered when attempting to deliver a digest file to the designated bucket.
+ type: string
+ dockerimage: demisto/boto3py3:1.0.0.89556
runonce: false
- script: '-'
+ script: ''
subtype: python3
type: python
tests:
diff --git a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail_description.md b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail_description.md
index 45a7420390ac..a3cec4b1f0cb 100644
--- a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail_description.md
+++ b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail_description.md
@@ -11,4 +11,4 @@ on your AWS environment.
- Attach a Role to the Instance Profile.
- Configure the Necessary IAM Roles that the AWS Integration Can Assume.
-For detailed instructions, see the [AWS Integrations - Authentication](https://xsoar.pan.dev/docs/reference/articles/aws-integrations---authentication).
+For detailed instructions, see the [AWS Integrations - Authentication](https://xsoar.pan.dev/docs/reference/articles/aws-integrations---authentication).
\ No newline at end of file
diff --git a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail_test.py b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail_test.py
index c8b5ace4239f..4c3eee8c0ba4 100644
--- a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail_test.py
+++ b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/AWS-CloudTrail_test.py
@@ -93,6 +93,9 @@ def stop_logging(self, **kwargs):
def lookup_events(self, **kwargs):
return None
+ def get_trail_status(self, **kwargs):
+ return {"IsLogging": True}
+
def get_paginator(self, _):
class Paginator:
def paginate(self, **kwargs):
@@ -297,3 +300,20 @@ def test_cloudtrail_lookup_events(mocker, aws_cloudtrail, return_results_func):
command_result: CommandResults = return_results_func.call_args[0][0]
outputs: list[dict] = command_result.outputs
assert outputs[0]["Username"] == "user"
+
+
+def test_cloudtrail_get_trail_status(mocker, aws_cloudtrail, return_results_func):
+ """
+ Given
+ - demisto args
+ When
+ - running aws-cloudtrail-get-trail-status command
+ Then
+ - Ensure the command result is returned as expected
+ """
+ args = {"name": "name"}
+ mock_command(mocker, aws_cloudtrail, "aws-cloudtrail-get-trail-status", args)
+ aws_cloudtrail.main()
+ command_result: CommandResults = return_results_func.call_args[0][0]
+ outputs: dict = command_result.outputs
+ assert "IsLogging" in outputs
diff --git a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/README.md b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/README.md
index d482f8aed812..42c7017dd569 100644
--- a/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/README.md
+++ b/Packs/AWS-CloudTrail/Integrations/AWS-CloudTrail/README.md
@@ -1,745 +1,289 @@
-
-
AWS CloudTrail is a service that enables governance, compliance, operational auditing, and risk auditing of your AWS account. With CloudTrail, you can log, continuously monitor, and retain account activity related to actions across your AWS infrastructure. CloudTrail provides event history of your AWS account activity, including actions taken through the AWS Management Console, AWS SDKs, command line tools, and other AWS services. This event history simplifies security analysis, resource change tracking, and troubleshooting. For more information, see the AWS CloudTrail documentation.
-
Configure AWS CloudTrail on Cortex XSOAR
-
-
Navigate to Settings > Integrations > Servers & Services.
-
Search for AWS - CloudTrail.
-
Click Add instance to create and configure a new integration instance.
-
-
-Name: a textual name for the integration instance.
-
-Default Region:
-
Role Arn
-
Role Session Name
-
Role Session Duration
-
-
-
Click Test to validate the URLs, token, and connection.
-
-
Commands
-
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook. After you successfully execute a command, a DBot message appears in the War Room with the command details.
Creates a trail that specifies the settings for delivery of log data to an Amazon S3 bucket. A maximum of five trails can exist in a region, irrespective of the region in which they were created.
-
Base Command
-
aws-cloudtrail-create-trail
-
Input
-
-
-
-
Argument Name
-
Description
-
Required
-
-
-
-
-
name
-
Specifies the name of the trail
-
Required
-
-
-
s3BucketName
-
Specifies the name of the Amazon S3 bucket designated for publishing log files
-
Required
-
-
-
s3KeyPrefix
-
Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery
-
Optional
-
-
-
snsTopicName
-
Specifies the name of the Amazon SNS topic defined for notification of log file delivery
-
Optional
-
-
-
includeGlobalServiceEvents
-
Specifies whether the trail is publishing events from global services, such as IAM, to the log files
-
Optional
-
-
-
isMultiRegionTrail
-
Specifies whether the trail is created in the current region or in all regions. The default is false.
-
Optional
-
-
-
enableLogFileValidation
-
Specifies whether log file integrity validation is enabled. The default is false.
-
Optional
-
-
-
cloudWatchLogsLogGroupArn
-
Specifies a log group name using an Amazon Resource Name (ARN), a unique identifier that represents the log group to which CloudTrail logs will be delivered. Not required unless you specify CloudWatchLogsRoleArn.
-
Optional
-
-
-
cloudWatchLogsRoleArn
-
Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group
-
Optional
-
-
-
kmsKeyId
-
Specifies the KMS key ID to use to encrypt the logs delivered by CloudTrail. The value can be an alias name prefixed by "alias/", a fully specified ARN to an alias, a fully specified ARN to a key, or a globally unique identifier.
-
Optional
-
-
-
region
-
The AWS Region, if not specified the default region will be used
-
Optional
-
-
-
roleArn
-
The Amazon Resource Name (ARN) of the role to assume
-
Optional
-
-
-
roleSessionName
-
An identifier for the assumed role session
-
Optional
-
-
-
roleSessionDuration
-
The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role.
-
Optional
-
-
-
-
-
Context Output
-
-
-
-
Path
-
Type
-
Description
-
-
-
-
-
AWS.CloudTrail.Trails.Name
-
string
-
Specifies the name of the trail
-
-
-
AWS.CloudTrail.Trails.S3BucketName
-
string
-
Specifies the name of the Amazon S3 bucket designated for publishing log files
-
-
-
AWS.CloudTrail.Trails.IncludeGlobalServiceEvents
-
boolean
-
Specifies whether the trail is publishing events from global services such as IAM to the log files
-
-
-
AWS.CloudTrail.Trails.IsMultiRegionTrail
-
boolean
-
Specifies whether the trail exists in one region or in all regions
-
-
-
AWS.CloudTrail.Trails.TrailARN
-
string
-
Specifies the ARN of the trail that was created
-
-
-
AWS.CloudTrail.Trails.LogFileValidationEnabled
-
boolean
-
Specifies whether log file integrity validation is enabled
-
-
-
AWS.CloudTrail.Trails.SnsTopicARN
-
string
-
Specifies the ARN of the Amazon SNS topic that CloudTrail uses to send notifications when log files are delivered
-
-
-
AWS.CloudTrail.Trails.S3KeyPrefix
-
string
-
Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery
-
-
-
AWS.CloudTrail.Trails.CloudWatchLogsLogGroupArn
-
string
-
Specifies the Amazon Resource Name (ARN) of the log group to which CloudTrail logs will be delivered
-
-
-
AWS.CloudTrail.Trails.CloudWatchLogsRoleArn
-
string
-
Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group
-
-
-
AWS.CloudTrail.Trails.KmsKeyId
-
string
-
Specifies the KMS key ID that encrypts the logs delivered by CloudTrail
Deletes a trail. This operation must be called from the region in which the trail was created. DeleteTrail cannot be called on the shadow trails (replicated trails in other regions) of a trail that is enabled in all regions.
-
Base Command
-
aws-cloudtrail-delete-trail
-
Input
-
-
-
-
Argument Name
-
Description
-
Required
-
-
-
-
-
name
-
Specifies the name or the CloudTrail ARN of the trail to be deleted. The format of a trail ARN is: arn:aws:cloudtrail:us-east-1:123456789012:trail/MyTrail
-
Required
-
-
-
-
-
Context Output
-
There is no context output for this command.
-
Command Example
-
!aws-cloudtrail-delete-trail name=test
-
Human Readable Output
-
-
3. Get the settings of a trail
-
-
Retrieves settings for the trail associated with the current region for your account.
-
Base Command
-
aws-cloudtrail-describe-trails
-
Input
-
-
-
-
Argument Name
-
Description
-
Required
-
-
-
-
-
trailNameList
-
Specifies a list of trail names, trail ARNs, or both, of the trails to describe. If an empty list is specified, information for the trail in the current region is returned.
-
False
-
-
-
includeShadowTrails
-
Specifies whether to include shadow trails in the response. A shadow trail is the replication in a region of a trail that was created in a different region. The default is "true".
-
Optional
-
-
-
region
-
The AWS Region, if not specified the default region will be used
-
Optional
-
-
-
roleArn
-
The Amazon Resource Name (ARN) of the role to assume
-
Optional
-
-
-
roleSessionName
-
An identifier for the assumed role session
-
Optional
-
-
-
roleSessionDuration
-
The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role.
-
Optional
-
-
-
-
-
Context Output
-
-
-
-
Path
-
Type
-
Description
-
-
-
-
-
AWS.CloudTrail.Trails.Name
-
string
-
Name of the trail set by calling CreateTrail
-
-
-
AWS.CloudTrail.Trails.S3BucketName
-
string
-
Name of the Amazon S3 bucket into which CloudTrail delivers your trail files
-
-
-
AWS.CloudTrail.Trails.S3KeyPrefix
-
string
-
Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery
-
-
-
AWS.CloudTrail.Trails.SnsTopicARN
-
string
-
Specifies the ARN of the Amazon SNS topic that CloudTrail uses to send notifications when log files are delivered
-
-
-
AWS.CloudTrail.Trails.IncludeGlobalServiceEvents
-
boolean
-
Set to "True" to include AWS API calls from AWS global services such as IAM. Otherwise, "False".
-
-
-
AWS.CloudTrail.Trails.IsMultiRegionTrail
-
boolean
-
Specifies whether the trail belongs only to one region or exists in all regions
-
-
-
AWS.CloudTrail.Trails.HomeRegion
-
string
-
The region in which the trail was created
-
-
-
AWS.CloudTrail.Trails.TrailARN
-
string
-
Specifies the ARN of the trail
-
-
-
AWS.CloudTrail.Trails.LogFileValidationEnabled
-
boolean
-
Specifies whether log file validation is enabled
-
-
-
AWS.CloudTrail.Trails.CloudWatchLogsLogGroupArn
-
string
-
Specifies an Amazon Resource Name (ARN), a unique identifier that represents the log group to which CloudTrail logs will be delivered
-
-
-
AWS.CloudTrail.Trails.CloudWatchLogsRoleArn
-
string
-
Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group
-
-
-
AWS.CloudTrail.KmsKeyId
-
string
-
Specifies the KMS key ID that encrypts the logs delivered by CloudTrail
-
-
-
AWS.CloudTrail.HasCustomEventSelectors
-
boolean
-
Specifies if the trail has custom event selectors
-
-
-
-
-
Command Example
-
!aws-cloudtrail-describe-trails
-
Context Example
-
-
Human Readable Output
-
-
4. Update a trail
-
-
Updates the settings that specify delivery of log files. Changes to a trail do not require stopping the CloudTrail service.
-
Base Command
-
aws-cloudtrail-update-trail
-
Input
-
-
-
-
Argument Name
-
Description
-
Required
-
-
-
-
-
name
-
Specifies the name of the trail or trail ARN
-
Required
-
-
-
s3BucketName
-
Specifies the name of the Amazon S3 bucket designated for publishing log files
-
Optional
-
-
-
s3KeyPrefix
-
Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery
-
Optional
-
-
-
snsTopicName
-
Specifies the name of the Amazon SNS topic defined for notification of log file delivery
-
Optional
-
-
-
includeGlobalServiceEvents
-
Specifies whether the trail is publishing events from global services such as IAM to the log files
-
Optional
-
-
-
isMultiRegionTrail
-
Specifies whether the trail applies only to the current region or to all regions. The default is false. If the trail exists only in the current region and this value is set to true, shadow trails (replications of the trail) will be created in the other regions. If the trail exists in all regions and this value is set to false, the trail will remain in the region where it was created, and its shadow trails in other regions will be deleted.
-
Optional
-
-
-
enableLogFileValidation
-
Specifies whether log file validation is enabled. The default is false.
-
Optional
-
-
-
cloudWatchLogsLogGroupArn
-
Specifies a log group name using an Amazon Resource Name (ARN), a unique identifier that represents the log group to which CloudTrail logs will be delivered. Not required unless you specify CloudWatchLogsRoleArn.
-
Optional
-
-
-
cloudWatchLogsRoleArn
-
Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group
-
Optional
-
-
-
kmsKeyId
-
Specifies the KMS key ID to use to encrypt the logs delivered by CloudTrail
-
Optional
-
-
-
region
-
The AWS Region, if not specified the default region will be used
-
Optional
-
-
-
roleArn
-
The Amazon Resource Name (ARN) of the role to assume
-
Optional
-
-
-
roleSessionName
-
An identifier for the assumed role session
-
Optional
-
-
-
roleSessionDuration
-
The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role.
-
Optional
-
-
-
-
-
Context Output
-
-
-
-
Path
-
Type
-
Description
-
-
-
-
-
AWS.CloudTrail.Trails.Name
-
string
-
Specifies the name of the trail
-
-
-
AWS.CloudTrail.Trails.S3BucketName
-
string
-
Specifies the name of the Amazon S3 bucket designated for publishing log files
-
-
-
AWS.CloudTrail.Trails.IncludeGlobalServiceEvents
-
boolean
-
Specifies whether the trail is publishing events from global services such as IAM to the log files
-
-
-
AWS.CloudTrail.Trails.IsMultiRegionTrail
-
boolean
-
Specifies whether the trail exists in one region or in all regions
-
-
-
AWS.CloudTrail.Trails.TrailARN
-
string
-
Specifies the ARN of the trail that was created
-
-
-
AWS.CloudTrail.Trails.LogFileValidationEnabled
-
boolean
-
Specifies whether log file integrity validation is enabled
-
-
-
AWS.CloudTrail.Trails.SnsTopicARN
-
string
-
Specifies the ARN of the Amazon SNS topic that CloudTrail uses to send notifications when log files are delivered
-
-
-
AWS.CloudTrail.Trails.S3KeyPrefix
-
string
-
Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery
-
-
-
AWS.CloudTrail.Trails.CloudWatchLogsLogGroupArn
-
string
-
Specifies the Amazon Resource Name (ARN) of the log group to which CloudTrail logs will be delivered
-
-
-
AWS.CloudTrail.Trails.CloudWatchLogsRoleArn
-
string
-
Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group
-
-
-
AWS.CloudTrail.Trails.KmsKeyId
-
string
-
Specifies the KMS key ID that encrypts the logs delivered by CloudTrail
Starts the recording of AWS API calls and log file delivery for a trail. For a trail that is enabled in all regions, this operation must be called from the region in which the trail was created. This operation cannot be called on the shadow trails (replicated trails in other regions) of a trail that is enabled in all regions.
-
Base Command
-
aws-cloudtrail-start-logging
-
Input
-
-
-
-
Argument Name
-
Description
-
Required
-
-
-
-
-
name
-
Specifies the name or the CloudTrail ARN of the trail for which CloudTrail logs AWS API calls
-
Required
-
-
-
region
-
The AWS Region, if not specified the default region will be used
-
Optional
-
-
-
roleArn
-
The Amazon Resource Name (ARN) of the role to assume
-
Optional
-
-
-
roleSessionName
-
An identifier for the assumed role session
-
Optional
-
-
-
roleSessionDuration
-
The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role.
-
Optional
-
-
-
-
-
Context Output
-
There is no context output for this command.
-
Command Example
-
!aws-cloudtrail-start-logging name=test
-
Context Example
-
There is no context output for this command.
-
Human Readable Output
-
-
6. Stop recording logs
-
-
Suspends the recording of AWS API calls and log file delivery for the specified trail. Under most circumstances, there is no need to use this action. You can update a trail without stopping it first. This action is the only way to stop recording. For a trail enabled in all regions, this operation must be called from the region in which the trail was created, or an InvalidHomeRegionException will occur. This operation cannot be called on the shadow trails (replicated trails in other regions) of a trail enabled in all regions.
-
Base Command
-
aws-cloudtrail-stop-logging
-
Input
-
-
-
-
Argument Name
-
Description
-
Required
-
-
-
-
-
name
-
Specifies the name or the CloudTrail ARN of the trail for which CloudTrail logs AWS API calls
-
Required
-
-
-
region
-
The AWS Region, if not specified the default region will be used
-
Optional
-
-
-
roleArn
-
The Amazon Resource Name (ARN) of the role to assume
-
Optional
-
-
-
roleSessionName
-
An identifier for the assumed role session
-
Optional
-
-
-
roleSessionDuration
-
The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role
-
Optional
-
-
-
-
-
Context Output
-
There is no context output for this command.
-
Command Example
-
!aws-cloudtrail-stop-logging name=test
-
Context Example
-
There is no context output for this command.
-
Human Readable Output
-
-
7. Search API activity events
-
-
Looks up API activity events captured by CloudTrail that create, update, or delete resources in your account. Events for a region can be looked up for the times in which you had CloudTrail turned on in that region during the last seven days.
-
Base Command
-
aws-cloudtrail-lookup-events
-
Input
-
-
-
-
Argument Name
-
Description
-
Required
-
-
-
-
-
attributeKey
-
Specifies an attribute on which to filter the returned events
-
Required
-
-
-
attributeValue
-
Specifies a value for the specified AttributeKey
-
-
Required
-
-
-
startTime
-
Specifies that only events that occur on or after the specified time are returned
-
Optional
-
-
-
endTime
-
Specifies that only events that occur on or before the specified time are returned
-
Optional
-
-
-
region
-
The AWS Region, if not specified the default region will be used
-
Optional
-
-
-
roleArn
-
The Amazon Resource Name (ARN) of the role to assume
-
Optional
-
-
-
roleSessionName
-
An identifier for the assumed role session
-
Optional
-
-
-
roleSessionDuration
-
The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role.
-
Optional
-
-
-
-
-
Context Output
-
-
-
-
Path
-
Type
-
Description
-
-
-
-
-
AWS.CloudTrail.Trails.Events.EventId
-
string
-
The CloudTrail ID of the returned event
-
-
-
AWS.CloudTrail.Trails.Events.EventName
-
string
-
The name of the returned event
-
-
-
AWS.CloudTrail.Trails.Events.EventTime
-
date
-
The date and time of the returned event
-
-
-
AWS.CloudTrail.Trails.Events.EventSource
-
string
-
The AWS service that the request was made to
-
-
-
AWS.CloudTrail.Trails.Events.Username
-
string
-
User name or role name of the requester that called the API in the event returned
-
-
-
AWS.CloudTrail.Trails.Events.ResourceName
-
string
-
The type of a resource referenced by the event returned. When the resource type cannot be determined, null is returned. Some examples of resource types are: Instance for EC2, Trail for CloudTrail, DBInstance for RDS, and AccessKey for IAM.
-
-
-
AWS.CloudTrail.Trails.Events.ResourceType
-
string
-
The name of the resource referenced by the event returned. These are user-created names whose values will depend on the environment. For example, the resource name might be "auto-scaling-test-group" for an Auto Scaling Group or "i-1234567" for an EC2 Instance.
-
-
-
AWS.CloudTrail.Trails.Events.CloudTrailEvent
-
string
-
A JSON string that contains a representation of the returned event
\ No newline at end of file
+Amazon Web Services CloudTrail.
+This integration was integrated and tested with version 1.0.11 of AWS - CloudTrail.
+
+## Configure AWS - CloudTrail on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for AWS - CloudTrail.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Required** |
+ | --- | --- |
+ | AWS Default Region | False |
+ | Role Arn | False |
+ | Role Session Name | False |
+ | Role Session Duration | False |
+ | Access Key | False |
+ | Secret Key | False |
+ | Access Key | False |
+ | Secret Key | False |
+ | Trust any certificate (not secure) | False |
+ | Use system proxy settings | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### aws-cloudtrail-create-trail
+
+***
+Creates a trail that specifies the settings for delivery of log data to an Amazon S3 bucket. A maximum of five trails can exist in a region, irrespective of the region in which they were created.
+
+#### Base Command
+
+`aws-cloudtrail-create-trail`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| name | Specifies the name of the trail. | Required |
+| s3BucketName | Specifies the name of the Amazon S3 bucket designated for publishing log files. | Required |
+| s3KeyPrefix | Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery. | Optional |
+| snsTopicName | Specifies the name of the Amazon SNS topic defined for notification of log file delivery. | Optional |
+| includeGlobalServiceEvents | Specifies whether the trail is publishing events from global services such as IAM to the log files. Possible values are: True, False. | Optional |
+| isMultiRegionTrail | Specifies whether the trail is created in the current region or in all regions. The default is false. Possible values are: True, False. | Optional |
+| enableLogFileValidation | Specifies whether log file integrity validation is enabled. The default is false. Possible values are: True, False. | Optional |
+| cloudWatchLogsLogGroupArn | Specifies a log group name using an Amazon Resource Name (ARN), a unique identifier that represents the log group to which CloudTrail logs will be delivered. Not required unless you specify CloudWatchLogsRoleArn. | Optional |
+| cloudWatchLogsRoleArn | Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group. | Optional |
+| kmsKeyId | Specifies the KMS key ID to use to encrypt the logs delivered by CloudTrail. The value can be an alias name prefixed by "alias/", a fully specified ARN to an alias, a fully specified ARN to a key, or a globally unique identifier. | Optional |
+| region | The AWS Region, if not specified the default region will be used. | Optional |
+| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional |
+| roleSessionName | An identifier for the assumed role session. | Optional |
+| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| AWS.CloudTrail.Trails.Name | string | Specifies the name of the trail. |
+| AWS.CloudTrail.Trails.S3BucketName | string | Specifies the name of the Amazon S3 bucket designated for publishing log files. |
+| AWS.CloudTrail.Trails.IncludeGlobalServiceEvents | boolean | Specifies whether the trail is publishing events from global services such as IAM to the log files. |
+| AWS.CloudTrail.Trails.IsMultiRegionTrail | boolean | Specifies whether the trail exists in one region or in all regions. |
+| AWS.CloudTrail.Trails.TrailARN | string | Specifies the ARN of the trail that was created. |
+| AWS.CloudTrail.Trails.LogFileValidationEnabled | boolean | Specifies whether log file integrity validation is enabled. |
+| AWS.CloudTrail.Trails.SnsTopicARN | string | Specifies the ARN of the Amazon SNS topic that CloudTrail uses to send notifications when log files are delivered. |
+| AWS.CloudTrail.Trails.S3KeyPrefix | string | pecifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery. |
+| AWS.CloudTrail.Trails.CloudWatchLogsLogGroupArn | string | Specifies the Amazon Resource Name \(ARN\) of the log group to which CloudTrail logs will be delivered. |
+| AWS.CloudTrail.Trails.CloudWatchLogsRoleArn | string | Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group. |
+| AWS.CloudTrail.Trails.KmsKeyId | string | Specifies the KMS key ID that encrypts the logs delivered by CloudTrail. |
+| AWS.CloudTrail.Trails.HomeRegion | string | The region in which the trail was created. |
+
+### aws-cloudtrail-delete-trail
+
+***
+Deletes a trail. This operation must be called from the region in which the trail was created. DeleteTrail cannot be called on the shadow trails (replicated trails in other regions) of a trail that is enabled in all regions.
+
+#### Base Command
+
+`aws-cloudtrail-delete-trail`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| name | Specifies the name or the CloudTrail ARN of the trail to be deleted. The format of a trail ARN is: arn:aws:cloudtrail:us-east-1:123456789012:trail/MyTrail. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+### aws-cloudtrail-describe-trails
+
+***
+Retrieves settings for the trail associated with the current region for your account.
+
+#### Base Command
+
+`aws-cloudtrail-describe-trails`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| trailNameList | Specifies a list of trail names, trail ARNs, or both, of the trails to describe. If an empty list is specified, information for the trail in the current region is returned. | Optional |
+| includeShadowTrails | Specifies whether to include shadow trails in the response. A shadow trail is the replication in a region of a trail that was created in a different region. The default is true. Possible values are: True, False. | Optional |
+| region | The AWS Region, if not specified the default region will be used. | Optional |
+| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional |
+| roleSessionName | An identifier for the assumed role session. | Optional |
+| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| AWS.CloudTrail.Trails.Name | string | Name of the trail set by calling CreateTrail. |
+| AWS.CloudTrail.Trails.S3BucketName | string | Name of the Amazon S3 bucket into which CloudTrail delivers your trail files. |
+| AWS.CloudTrail.Trails.S3KeyPrefix | string | Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery. |
+| AWS.CloudTrail.Trails.SnsTopicARN | string | Specifies the ARN of the Amazon SNS topic that CloudTrail uses to send notifications when log files are delivered. |
+| AWS.CloudTrail.Trails.IncludeGlobalServiceEvents | boolean | Set to True to include AWS API calls from AWS global services such as IAM. Otherwise, False. |
+| AWS.CloudTrail.Trails.IsMultiRegionTrail | boolean | Specifies whether the trail belongs only to one region or exists in all regions. |
+| AWS.CloudTrail.Trails.HomeRegion | string | The region in which the trail was created. |
+| AWS.CloudTrail.Trails.TrailARN | string | Specifies the ARN of the trail. |
+| AWS.CloudTrail.Trails.LogFileValidationEnabled | boolean | Specifies whether log file validation is enabled. |
+| AWS.CloudTrail.Trails.CloudWatchLogsLogGroupArn | string | Specifies an Amazon Resource Name \(ARN\), a unique identifier that represents the log group to which CloudTrail logs will be delivered. |
+| AWS.CloudTrail.Trails.CloudWatchLogsRoleArn | string | Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group. |
+| AWS.CloudTrail.KmsKeyId | string | Specifies the KMS key ID that encrypts the logs delivered by CloudTrail. |
+| AWS.CloudTrail.HasCustomEventSelectors | boolean | Specifies if the trail has custom event selectors. |
+
+### aws-cloudtrail-update-trail
+
+***
+Updates the settings that specify delivery of log files. Changes to a trail do not require stopping the CloudTrail service.
+
+#### Base Command
+
+`aws-cloudtrail-update-trail`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| name | Specifies the name of the trail or trail ARN. | Required |
+| s3BucketName | Specifies the name of the Amazon S3 bucket designated for publishing log files. | Optional |
+| s3KeyPrefix | Specifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery. | Optional |
+| snsTopicName | Specifies the name of the Amazon SNS topic defined for notification of log file delivery. | Optional |
+| includeGlobalServiceEvents | Specifies whether the trail is publishing events from global services such as IAM to the log files. | Optional |
+| isMultiRegionTrail | Specifies whether the trail applies only to the current region or to all regions. The default is false. If the trail exists only in the current region and this value is set to true, shadow trails (replications of the trail) will be created in the other regions. If the trail exists in all regions and this value is set to false, the trail will remain in the region where it was created, and its shadow trails in other regions will be deleted. | Optional |
+| enableLogFileValidation | Specifies whether log file validation is enabled. The default is false. | Optional |
+| cloudWatchLogsLogGroupArn | Specifies a log group name using an Amazon Resource Name (ARN), a unique identifier that represents the log group to which CloudTrail logs will be delivered. Not required unless you specify CloudWatchLogsRoleArn. | Optional |
+| cloudWatchLogsRoleArn | Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group. | Optional |
+| kmsKeyId | Specifies the KMS key ID to use to encrypt the logs delivered by CloudTrail. | Optional |
+| region | The AWS Region, if not specified the default region will be used. | Optional |
+| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional |
+| roleSessionName | An identifier for the assumed role session. | Optional |
+| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| AWS.CloudTrail.Trails.Name | string | Specifies the name of the trail. |
+| AWS.CloudTrail.Trails.S3BucketName | string | Specifies the name of the Amazon S3 bucket designated for publishing log files. |
+| AWS.CloudTrail.Trails.IncludeGlobalServiceEvents | boolean | Specifies whether the trail is publishing events from global services such as IAM to the log files. |
+| AWS.CloudTrail.Trails.IsMultiRegionTrail | boolean | Specifies whether the trail exists in one region or in all regions. |
+| AWS.CloudTrail.Trails.TrailARN | string | Specifies the ARN of the trail that was created. |
+| AWS.CloudTrail.Trails.LogFileValidationEnabled | boolean | Specifies whether log file integrity validation is enabled. |
+| AWS.CloudTrail.Trails.SnsTopicARN | string | Specifies the ARN of the Amazon SNS topic that CloudTrail uses to send notifications when log files are delivered. |
+| AWS.CloudTrail.Trails.S3KeyPrefix | string | pecifies the Amazon S3 key prefix that comes after the name of the bucket you have designated for log file delivery. |
+| AWS.CloudTrail.Trails.CloudWatchLogsLogGroupArn | string | Specifies the Amazon Resource Name \(ARN\) of the log group to which CloudTrail logs will be delivered. |
+| AWS.CloudTrail.Trails.CloudWatchLogsRoleArn | string | Specifies the role for the CloudWatch Logs endpoint to assume to write to a user's log group. |
+| AWS.CloudTrail.Trails.KmsKeyId | string | Specifies the KMS key ID that encrypts the logs delivered by CloudTrail. |
+| AWS.CloudTrail.Trails.HomeRegion | string | The region in which the trail was created. |
+
+### aws-cloudtrail-start-logging
+
+***
+Starts the recording of AWS API calls and log file delivery for a trail. For a trail that is enabled in all regions, this operation must be called from the region in which the trail was created. This operation cannot be called on the shadow trails (replicated trails in other regions) of a trail that is enabled in all regions.
+
+#### Base Command
+
+`aws-cloudtrail-start-logging`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| name | Specifies the name or the CloudTrail ARN of the trail for which CloudTrail logs AWS API calls. | Required |
+| region | The AWS Region, if not specified the default region will be used. | Optional |
+| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional |
+| roleSessionName | An identifier for the assumed role session. | Optional |
+| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+### aws-cloudtrail-stop-logging
+
+***
+Suspends the recording of AWS API calls and log file delivery for the specified trail. Under most circumstances, there is no need to use this action. You can update a trail without stopping it first. This action is the only way to stop recording. For a trail enabled in all regions, this operation must be called from the region in which the trail was created, or an InvalidHomeRegionException will occur. This operation cannot be called on the shadow trails (replicated trails in other regions) of a trail enabled in all regions.
+
+#### Base Command
+
+`aws-cloudtrail-stop-logging`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| name | Specifies the name or the CloudTrail ARN of the trail for which CloudTrail logs AWS API calls. | Required |
+| region | The AWS Region, if not specified the default region will be used. | Optional |
+| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional |
+| roleSessionName | An identifier for the assumed role session. | Optional |
+| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+### aws-cloudtrail-lookup-events
+
+***
+Looks up API activity events captured by CloudTrail that create, update, or delete resources in your account. Events for a region can be looked up for the times in which you had CloudTrail turned on in that region during the last seven days.
+
+#### Base Command
+
+`aws-cloudtrail-lookup-events`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| attributeKey | Specifies an attribute on which to filter the events returned. Possible values are: AccessKeyId, EventId, EventName, Username, ResourceType, ResourceName, EventSource, ReadOnly. | Required |
+| attributeValue | Specifies a value for the specified AttributeKey. | Required |
+| startTime | Specifies that only events that occur after or at the specified time are returned. | Optional |
+| endTime | Specifies that only events that occur before or at the specified time are returned. | Optional |
+| region | The AWS Region, if not specified the default region will be used. | Optional |
+| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional |
+| roleSessionName | An identifier for the assumed role session. | Optional |
+| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| AWS.CloudTrail.Events.EventId | string | The CloudTrail ID of the event returned. |
+| AWS.CloudTrail.Events.EventName | string | The name of the event returned. |
+| AWS.CloudTrail.Events.EventTime | date | The date and time of the event returned. |
+| AWS.CloudTrail.Events.EventSource | string | The AWS service that the request was made to. |
+| AWS.CloudTrail.Events.Username | string | A user name or role name of the requester that called the API in the event returned. |
+| AWS.CloudTrail.Events.ResourceName | string | The type of a resource referenced by the event returned. When the resource type cannot be determined, null is returned. Some examples of resource types are: Instance for EC2, Trail for CloudTrail, DBInstance for RDS, and AccessKey for IAM. |
+| AWS.CloudTrail.Events.ResourceType | string | The name of the resource referenced by the event returned. These are user-created names whose values will depend on the environment. For example, the resource name might be "auto-scaling-test-group" for an Auto Scaling Group or "i-1234567" for an EC2 Instance. |
+| AWS.CloudTrail.Events.CloudTrailEvent | string | A JSON string that contains a representation of the event returned. |
+
+### aws-cloudtrail-get-trail-status
+
+***
+Returns a JSON-formatted list of information about the specified trail. Fields include information on delivery errors, Amazon SNS and Amazon S3 errors, and start and stop logging times for each trail.
+
+#### Base Command
+
+`aws-cloudtrail-get-trail-status`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| trailNameList | Specifies the names of multiple trails. | Optional |
+| region | Specifies the region of the trail. | Required |
+| roleArn | The The Amazon Resource Name (ARN) of the role to assume. | Optional |
+| roleSessionName | An identifier for the assumed role session. | Optional |
+| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional |
+| name | Specifies the name of the trail. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| AWS.CloudTrail.TrailStatus.IsLogging | boolean | Whether the CloudTrail trail is currently logging Amazon Web Services API calls. |
+| AWS.CloudTrail.TrailStatus.LatestDeliveryError | string | Displays any Amazon S3 error that CloudTrail encountered when attempting to deliver log files to the designated bucket. |
+| AWS.CloudTrail.TrailStatus.LatestNotificationError | string | Displays any Amazon SNS error that CloudTrail encountered when attempting to send a notification. |
+| AWS.CloudTrail.TrailStatus.LatestDeliveryTime | date | Specifies the date and time that CloudTrail last delivered log files to an account’s Amazon S3 bucket. |
+| AWS.CloudTrail.TrailStatus.LatestNotificationTime | date | Specifies the date and time of the most recent Amazon SNS notification that CloudTrail has written a new log file to an account’s Amazon S3 bucket. |
+| AWS.CloudTrail.TrailStatus.StartLoggingTime | date | Specifies the most recent date and time when CloudTrail started recording API calls for an Amazon Web Services account. |
+| AWS.CloudTrail.TrailStatus.StopLoggingTime | date | Specifies the most recent date and time when CloudTrail stopped recording API calls for an Amazon Web Services account. |
+| AWS.CloudTrail.TrailStatus.LatestCloudWatchLogsDeliveryError | string | Displays any CloudWatch Logs error that CloudTrail encountered when attempting to deliver logs to CloudWatch Logs. |
+| AWS.CloudTrail.TrailStatus.LatestCloudWatchLogsDeliveryTime | date | Displays the most recent date and time when CloudTrail delivered logs to CloudWatch Logs. |
+| AWS.CloudTrail.TrailStatus.LatestDigestDeliveryTime | date | Specifies the date and time that CloudTrail last delivered a digest file to an account’s Amazon S3 bucket. |
+| AWS.CloudTrail.TrailStatus.LatestDigestDeliveryError | string | Displays any Amazon S3 error that CloudTrail encountered when attempting to deliver a digest file to the designated bucket. |
diff --git a/Packs/AWS-CloudTrail/ReleaseNotes/1_0_11.md b/Packs/AWS-CloudTrail/ReleaseNotes/1_0_11.md
new file mode 100644
index 000000000000..9885337e7ce1
--- /dev/null
+++ b/Packs/AWS-CloudTrail/ReleaseNotes/1_0_11.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### AWS - CloudTrail
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88114*.
diff --git a/Packs/AWS-CloudTrail/ReleaseNotes/1_0_12.md b/Packs/AWS-CloudTrail/ReleaseNotes/1_0_12.md
new file mode 100644
index 000000000000..0d5d1f211a1f
--- /dev/null
+++ b/Packs/AWS-CloudTrail/ReleaseNotes/1_0_12.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### AWS - CloudTrail
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.89556*.
diff --git a/Packs/AWS-CloudTrail/ReleaseNotes/1_1_0.md b/Packs/AWS-CloudTrail/ReleaseNotes/1_1_0.md
new file mode 100644
index 000000000000..093334440231
--- /dev/null
+++ b/Packs/AWS-CloudTrail/ReleaseNotes/1_1_0.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### AWS - CloudTrail
+
+- Added the **aws-cloudtrail-get-trail-status** command, use this command to get information about the trail status.
\ No newline at end of file
diff --git a/TestPlaybooks/NonCircleTests/playbook-AWS-CloudTrail_Test_.yml b/Packs/AWS-CloudTrail/TestPlaybooks/playbook-AWS-CloudTrail_Test_.yml
similarity index 99%
rename from TestPlaybooks/NonCircleTests/playbook-AWS-CloudTrail_Test_.yml
rename to Packs/AWS-CloudTrail/TestPlaybooks/playbook-AWS-CloudTrail_Test_.yml
index 43d31bee1fc0..867a548137a6 100644
--- a/TestPlaybooks/NonCircleTests/playbook-AWS-CloudTrail_Test_.yml
+++ b/Packs/AWS-CloudTrail/TestPlaybooks/playbook-AWS-CloudTrail_Test_.yml
@@ -1,4 +1,4 @@
-id: 3da2e31b-f114-4d7f-8702-117f3b498de9
+id: AWS - CloudTrail Test Playbook
version: -1
fromversion: 5.0.0
name: AWS - CloudTrail Test Playbook
diff --git a/Packs/AWS-CloudTrail/pack_metadata.json b/Packs/AWS-CloudTrail/pack_metadata.json
index 15c81b4fd6e0..1471a042792a 100644
--- a/Packs/AWS-CloudTrail/pack_metadata.json
+++ b/Packs/AWS-CloudTrail/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - CloudTrail",
"description": "Amazon Web Services CloudTrail.",
"support": "xsoar",
- "currentVersion": "1.0.10",
+ "currentVersion": "1.1.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-CloudWatchLogs/CONTRIBUTORS.json b/Packs/AWS-CloudWatchLogs/CONTRIBUTORS.json
new file mode 100644
index 000000000000..ce9d08a30890
--- /dev/null
+++ b/Packs/AWS-CloudWatchLogs/CONTRIBUTORS.json
@@ -0,0 +1,3 @@
+[
+ "Fabio Dias"
+]
diff --git a/Packs/AWS-CloudWatchLogs/Integrations/AWS-CloudWatchLogs/AWS-CloudWatchLogs.yml b/Packs/AWS-CloudWatchLogs/Integrations/AWS-CloudWatchLogs/AWS-CloudWatchLogs.yml
index ec96f2a7f08a..5d9018f63cc4 100644
--- a/Packs/AWS-CloudWatchLogs/Integrations/AWS-CloudWatchLogs/AWS-CloudWatchLogs.yml
+++ b/Packs/AWS-CloudWatchLogs/Integrations/AWS-CloudWatchLogs/AWS-CloudWatchLogs.yml
@@ -65,6 +65,11 @@ configuration:
section: Connect
advanced: true
required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+ section: Connect
script:
script: ''
type: python
@@ -335,7 +340,7 @@ script:
description: The name of the log stream.
- name: timestamp
required: true
- description: The time the event occurred, expressed as the number of milliseconds fter Jan 1, 1970 00:00:00 UTC. (Unix Time)
+ description: The time the event occurred, expressed as the number of milliseconds fter Jan 1, 1970 00:00:00 UTC. (Unix Time).
- name: message
required: true
description: The raw event message.
@@ -447,7 +452,7 @@ script:
description: The name of the log group.
type: string
description: Lists the specified metric filters. You can list all the metric filters or filter the results by log name, prefix, metric name, or metric namespace.
- dockerimage: demisto/boto3py3:1.0.0.52713
+ dockerimage: demisto/boto3py3:1.0.0.88114
tests:
- No Tests
fromversion: 5.0.0
diff --git a/Packs/AWS-CloudWatchLogs/ReleaseNotes/1_2_19.md b/Packs/AWS-CloudWatchLogs/ReleaseNotes/1_2_19.md
new file mode 100644
index 000000000000..1a451b1b6985
--- /dev/null
+++ b/Packs/AWS-CloudWatchLogs/ReleaseNotes/1_2_19.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### AWS - CloudWatchLogs
+
+- Updated the Docker image to: demisto/boto3py3:1.0.0.87655.
+
diff --git a/Packs/AWS-CloudWatchLogs/ReleaseNotes/1_2_20.md b/Packs/AWS-CloudWatchLogs/ReleaseNotes/1_2_20.md
new file mode 100644
index 000000000000..deb4c79ae5aa
--- /dev/null
+++ b/Packs/AWS-CloudWatchLogs/ReleaseNotes/1_2_20.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### AWS - CloudWatchLogs
+
+- Added the *Use system proxy settings* parameter.
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88114*.
diff --git a/Packs/AWS-CloudWatchLogs/pack_metadata.json b/Packs/AWS-CloudWatchLogs/pack_metadata.json
index 5a44101bb0a2..8168a7f79e79 100644
--- a/Packs/AWS-CloudWatchLogs/pack_metadata.json
+++ b/Packs/AWS-CloudWatchLogs/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - CloudWatchLogs",
"description": "Amazon Web Services CloudWatch Logs (logs).",
"support": "xsoar",
- "currentVersion": "1.2.18",
+ "currentVersion": "1.2.20",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -18,4 +18,4 @@
"marketplacev2",
"xpanse"
]
-}
\ No newline at end of file
+}
diff --git a/Packs/AWS-Enrichment-Remediation/ReleaseNotes/1_1_16.md b/Packs/AWS-Enrichment-Remediation/ReleaseNotes/1_1_16.md
new file mode 100644
index 000000000000..627b0169c684
--- /dev/null
+++ b/Packs/AWS-Enrichment-Remediation/ReleaseNotes/1_1_16.md
@@ -0,0 +1,9 @@
+
+#### Scripts
+
+##### AWSRecreateSG
+
+- Fixed an issue where the script failed with the latest version of the `AWS - EC2` integration.
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
+
+**NOTE:** The `AWS - EC2` integration version must be greater than `1.4.0` for this script to run.
diff --git a/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG.py b/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG.py
index 2890f2ebae7a..c221214a2a79 100644
--- a/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG.py
+++ b/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG.py
@@ -1,7 +1,6 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
-
from typing import Any
import traceback
from random import randint
@@ -9,28 +8,6 @@
ROLE_SESSION_NAME = "xsoar-session"
-def get_context_path(context: dict, path: str):
- """Get a context output ignoring the DT suffix.
-
- Args:
- context (dict): The context output with DT paths as keys.
- path (str): The outputs prefix path without the DT transform under which the required data is held.
-
- Return:
- (Any): The context data under the prefix.
-
- Example:
- >>> output = demisto.executeCommand('aws-ec2-describe-addresses')
- >>> output
- {'Contents': {'path.to.data(val.Id && val.Id == obj.Id)': [1, 2, 3, 4]}}
- >>> get_context_path(output, 'path.to.data')
- [1, 2, 3, 4]
- """
- return context.get(
- next((key for key in context if key.partition('(')[0] == path), None)
- )
-
-
def split_rule(rule: dict, port: int, protocol: str) -> list[dict]:
"""
If there are rules with ranges of ports, split them up
@@ -93,7 +70,7 @@ def sg_fix(sg_info: list, port: int, protocol: str, assume_role: str, instance_t
Returns:
Dict: Dict of the new SG to be used
"""
- info = get_context_path(sg_info[0]['Contents'], 'AWS.EC2.SecurityGroups')[0] # type: ignore
+ info = dict_safe_get(sg_info, (0, 'Contents', 0))
recreate_list = []
# Keep track of change in SG or not.
change = False
@@ -109,13 +86,6 @@ def sg_fix(sg_info: list, port: int, protocol: str, assume_role: str, instance_t
and rule['IpProtocol'] == protocol
):
change = True
- elif (
- rule["FromPort"] == port and port == rule["ToPort"]
- and any(d["CidrIp"] == "0.0.0.0/0" for d in rule["IpRanges"])
- and rule["IpProtocol"] == protocol
- ):
- # If condition to check for Quad 0 in the rules list for matching port.
- change = True
elif (
rule['FromPort'] <= port and port <= rule['ToPort']
and any(d["CidrIp"] == "0.0.0.0/0" for d in rule["IpRanges"])
@@ -160,7 +130,7 @@ def sg_fix(sg_info: list, port: int, protocol: str, assume_role: str, instance_t
new_sg = demisto.executeCommand("aws-ec2-create-security-group", cmd_args)
if isError(new_sg):
raise ValueError('Error on creating new security group')
- new_id = new_sg[0]['Contents']['AWS.EC2.SecurityGroups']['GroupId']
+ new_id = dict_safe_get(new_sg, (0, 'Contents', 'GroupId'))
for item in recreate_list:
cmd_args = {"groupId": new_id, "IpPermissionsFull": item, "using": instance_to_use}
if assume_role:
@@ -293,8 +263,7 @@ def instance_info(instance_id: str, public_ip: str, assume_role: str, region: st
# Need a for loop in case multiple AWS-EC2 integrations are configured.
match = False
for instance in instance_info:
- # Check if returned error, in the case of multiple integration instances only one should pass.
- interfaces = get_context_path(instance.get('Contents'), 'AWS.EC2.Instances')[0].get('NetworkInterfaces') # type: ignore
+ interfaces = dict_safe_get(instance, ('Contents', 0, 'NetworkInterfaces'))
if not isError(instance) and interfaces:
mapping_dict = {}
for interface in interfaces:
diff --git a/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG.yml b/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG.yml
index edccc4cc15b3..f9f15a05f2c9 100644
--- a/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG.yml
+++ b/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG.yml
@@ -51,7 +51,7 @@ dependson:
- AWS - EC2|||aws-ec2-authorize-security-group-egress-rule
- AWS - EC2|||aws-ec2-revoke-security-group-ingress-rule
- AWS - EC2|||aws-ec2-revoke-security-group-egress-rule
-dockerimage: demisto/python3:3.10.13.84405
+dockerimage: demisto/python3:3.10.13.87159
enabled: true
name: AWSRecreateSG
runas: DBotWeakRole
diff --git a/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG_test.py b/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG_test.py
index e18a3162abc1..0597bb3ba21f 100644
--- a/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG_test.py
+++ b/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/AWSRecreateSG_test.py
@@ -72,9 +72,8 @@ def test_sg_fix(mocker):
- Checks the output of the helper function with the expected output.
"""
from AWSRecreateSG import sg_fix
- from test_data.sample import SG_INFO
- new_sg = [{'Type': 1, 'Contents': {'AWS.EC2.SecurityGroups': {'GroupId': 'sg-00000000000000001'}}}]
- mocker.patch.object(demisto, "executeCommand", return_value=new_sg)
+ from test_data.sample import SG_INFO, NEW_SG
+ mocker.patch.object(demisto, "executeCommand", return_value=NEW_SG)
args = {"sg_info": SG_INFO, "port": 22, "protocol": "tcp", "assume_role": "test_role", "instance_to_use": "AWS - EC2",
"region": "us-east-1"}
result = sg_fix(**args)
@@ -92,15 +91,13 @@ def test_determine_excessive_access(mocker):
- Checks the output of the helper function with the expected output.
"""
from AWSRecreateSG import determine_excessive_access
- from test_data.sample import SG_INFO
- new_sg = [{'Type': 1, 'Contents': {'AWS.EC2.SecurityGroups': {'GroupId': 'sg-00000000000000001'}}}]
+ from test_data.sample import SG_INFO, NEW_SG
- def executeCommand(name, args):
- if name == "aws-ec2-describe-security-groups":
- return SG_INFO
- elif name == "aws-ec2-create-security-group":
- return new_sg
- return None
+ def executeCommand(name, *_):
+ return {
+ "aws-ec2-describe-security-groups": SG_INFO,
+ "aws-ec2-create-security-group": NEW_SG
+ }.get(name)
mocker.patch.object(demisto, "executeCommand", side_effect=executeCommand)
args = {"int_sg_mapping": {'eni-00000000000000000': ['sg-00000000000000000']}, "port": 22,
@@ -120,38 +117,18 @@ def test_aws_recreate_sg(mocker):
- Checks the output of the function with the expected output.
"""
from AWSRecreateSG import aws_recreate_sg
- from test_data.sample import SG_INFO, INSTANCE_INFO
- new_sg = [{'Type': 1, 'Contents': {'AWS.EC2.SecurityGroups': {'GroupId': 'sg-00000000000000001'}}}]
-
- def executeCommand(name, args):
- if name == "aws-ec2-describe-security-groups":
- return SG_INFO
- elif name == "aws-ec2-create-security-group":
- return new_sg
- elif name == "aws-ec2-describe-instances":
- return INSTANCE_INFO
- return None
+ from test_data.sample import SG_INFO, INSTANCE_INFO, NEW_SG
- mocker.patch.object(demisto, "executeCommand", side_effect=executeCommand)
+ def execute_command(command, *_):
+ return {
+ "aws-ec2-describe-security-groups": SG_INFO,
+ "aws-ec2-create-security-group": NEW_SG,
+ "aws-ec2-describe-instances": INSTANCE_INFO
+ }.get(command)
+
+ mocker.patch.object(demisto, "executeCommand", side_effect=execute_command)
args = {"instance_id": "fake-instance-id", "public_ip": "1.1.1.1", "port": "22", "protocol": "tcp"}
command_results = aws_recreate_sg(args)
readable_output = command_results.readable_output
correct_output = "For interface eni-00000000000000000: \r\nreplaced SG sg-00000000000000000 with sg-00000000000000001 \r\n"
assert readable_output == correct_output
-
-
-def test_get_context_path():
- """
- Given:
- An output from demisto.excuteCommand('some-command')['Context']
- When:
- Calling demisto.excuteCommand.
- Then:
- Get the context output.
- """
- from AWSRecreateSG import get_context_path
-
- outputs = {'path.to.data(dt_path)': [1, 2, 3, 4]}
-
- assert get_context_path(outputs, 'path.to.data') == [1, 2, 3, 4]
- assert get_context_path(outputs, 'wrong.path') is None
diff --git a/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/test_data/sample.py b/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/test_data/sample.py
index ccd2e3d7340b..acbaacb9ecd6 100644
--- a/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/test_data/sample.py
+++ b/Packs/AWS-Enrichment-Remediation/Scripts/AWSRecreateSG/test_data/sample.py
@@ -1,105 +1,61 @@
SG_INFO = [
{
"Type": 1,
- "Contents": {
- "AWS.EC2.SecurityGroups(val.GroupId === obj.GroupId)": [
+ "Contents": [
+ {
+ "Description": "sldkjdlskfjs",
+ "GroupId": "sg-00000000000000001",
+ "GroupName": "demo-sg",
+ "IpPermissions": [
{
- "Description": "sldkjdlskfjs",
- "GroupId": "sg-00000000000000001",
- "GroupName": "demo-sg",
- "IpPermissions": [
+ "FromPort": 0,
+ "IpProtocol": "tcp",
+ "IpRanges": [
{
- "FromPort": 0,
- "IpProtocol": "tcp",
- "IpRanges": [
- {
- "CidrIp": "0.0.0.0/0"
- }
- ],
- "Ipv6Ranges": [],
- "PrefixListIds": [],
- "ToPort": 65535,
- "UserIdGroupPairs": []
- },
+ "CidrIp": "0.0.0.0/0"
+ }
+ ],
+ "Ipv6Ranges": [],
+ "PrefixListIds": [],
+ "ToPort": 65535,
+ "UserIdGroupPairs": []
+ },
+ {
+ "FromPort": 22,
+ "IpProtocol": "tcp",
+ "IpRanges": [
{
- "FromPort": 22,
- "IpProtocol": "tcp",
- "IpRanges": [
- {
- "CidrIp": "0.0.0.0/0"
- }
- ],
- "Ipv6Ranges": [],
- "PrefixListIds": [],
- "ToPort": 22,
- "UserIdGroupPairs": []
+ "CidrIp": "0.0.0.0/0"
}
],
- "IpPermissionsEgress": [
+ "Ipv6Ranges": [],
+ "PrefixListIds": [],
+ "ToPort": 22,
+ "UserIdGroupPairs": []
+ }
+ ],
+ "IpPermissionsEgress": [
+ {
+ "FromPort": 0,
+ "IpProtocol": "tcp",
+ "IpRanges": [
{
- "FromPort": 0,
- "IpProtocol": "tcp",
- "IpRanges": [
- {
- "CidrIp": "0.0.0.0/0"
- }
- ],
- "Ipv6Ranges": [],
- "PrefixListIds": [],
- "ToPort": 65535,
- "UserIdGroupPairs": []
+ "CidrIp": "0.0.0.0/0"
}
],
- "OwnerId": "717007404259",
- "Region": "us-east-1",
- "VpcId": "vpc-061c242911e464170"
+ "Ipv6Ranges": [],
+ "PrefixListIds": [],
+ "ToPort": 65535,
+ "UserIdGroupPairs": []
}
- ]
- },
+ ],
+ "OwnerId": "717007404259",
+ "Region": "us-east-1",
+ "VpcId": "vpc-061c242911e464170"
+ }
+ ],
"HumanReadable": "### AWS EC2 SecurityGroups\n|Description|GroupId|GroupName|OwnerId|Region|VpcId|\n|---|---|---|---|---|---|\n| sldkjdlskfjs | sg-0408c2745d3d13b15 | demo-sg | 717007404259 | us-east-1 | vpc-061c242911e464170 |\n",
"ImportantEntryContext": "None",
- "EntryContext": {
- "AWS.EC2.SecurityGroups(val.GroupId === obj.GroupId)": [
- {
- "Description": "sldkjdlskfjs",
- "GroupId": "sg-0408c2745d3d13b15",
- "GroupName": "demo-sg",
- "IpPermissions": [
- {
- "FromPort": 0,
- "IpProtocol": "tcp",
- "IpRanges": [
- {
- "CidrIp": "0.0.0.0/0"
- }
- ],
- "Ipv6Ranges": "None",
- "PrefixListIds": "None",
- "ToPort": 65535,
- "UserIdGroupPairs": "None"
- }
- ],
- "IpPermissionsEgress": [
- {
- "FromPort": 0,
- "IpProtocol": "tcp",
- "IpRanges": [
- {
- "CidrIp": "0.0.0.0/0"
- }
- ],
- "Ipv6Ranges": "None",
- "PrefixListIds": "None",
- "ToPort": 65535,
- "UserIdGroupPairs": "None"
- }
- ],
- "OwnerId": "717007404259",
- "Region": "us-east-1",
- "VpcId": "vpc-061c242911e464170"
- }
- ]
- }
}
]
INSTANCE_INFO = [
@@ -108,25 +64,24 @@
"instance": "AWS - EC2"
},
"Type": 1,
- "Contents": {
- "AWS.EC2.Instances(val.InstanceId === obj.InstanceId)": [
+ "Contents": [
+ {
+ "NetworkInterfaces": [
{
- "NetworkInterfaces": [
+ "Association": {
+ "PublicIp": "1.1.1.1"
+ },
+ "Groups": [
{
- "Association": {
- "PublicIp": "1.1.1.1"
- },
- "Groups": [
- {
- "GroupId": "sg-00000000000000000",
- "GroupName": "sg-name"
- }
- ],
- "NetworkInterfaceId": "eni-00000000000000000"
+ "GroupId": "sg-00000000000000000",
+ "GroupName": "sg-name"
}
- ]
+ ],
+ "NetworkInterfaceId": "eni-00000000000000000"
}
- ]
- }
+ ]
+ }
+ ]
}
]
+NEW_SG = [{'Type': 1, 'Contents': {'GroupId': 'sg-00000000000000001'}}]
diff --git a/Packs/AWS-Enrichment-Remediation/pack_metadata.json b/Packs/AWS-Enrichment-Remediation/pack_metadata.json
index 02e202cd9549..dafe86614af4 100644
--- a/Packs/AWS-Enrichment-Remediation/pack_metadata.json
+++ b/Packs/AWS-Enrichment-Remediation/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS Enrichment and Remediation",
"description": "Playbooks using multiple AWS content packs for enrichment and remediation purposes",
"support": "xsoar",
- "currentVersion": "1.1.15",
+ "currentVersion": "1.1.16",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-GuardDuty/Integrations/AWSGuardDuty/AWSGuardDuty.yml b/Packs/AWS-GuardDuty/Integrations/AWSGuardDuty/AWSGuardDuty.yml
index 8236d16ebe09..922ed0b64cc9 100644
--- a/Packs/AWS-GuardDuty/Integrations/AWSGuardDuty/AWSGuardDuty.yml
+++ b/Packs/AWS-GuardDuty/Integrations/AWSGuardDuty/AWSGuardDuty.yml
@@ -871,7 +871,7 @@ script:
- contextPath: AWS.GuardDuty.Members.UpdatedAt
description: The time a member was last updated.
type: string
- dockerimage: demisto/boto3py3:1.0.0.87088
+ dockerimage: demisto/boto3py3:1.0.0.88855
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/AWSGuardDutyEventCollector.yml b/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/AWSGuardDutyEventCollector.yml
index 41dd0d4cc361..fedc79815004 100644
--- a/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/AWSGuardDutyEventCollector.yml
+++ b/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/AWSGuardDutyEventCollector.yml
@@ -112,7 +112,7 @@ script:
name: limit
description: Manual command used to fetch events and display them.
name: aws-gd-get-events
- dockerimage: demisto/boto3py3:1.0.0.86592
+ dockerimage: demisto/boto3py3:1.0.0.88855
isfetchevents: true
subtype: python3
marketplaces:
diff --git a/Packs/AWS-GuardDuty/ReleaseNotes/1_3_46.md b/Packs/AWS-GuardDuty/ReleaseNotes/1_3_46.md
new file mode 100644
index 000000000000..9ac947ae2ae8
--- /dev/null
+++ b/Packs/AWS-GuardDuty/ReleaseNotes/1_3_46.md
@@ -0,0 +1,5 @@
+#### Integrations
+##### AWS - GuardDuty Event Collector
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
+##### AWS - GuardDuty
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
diff --git a/Packs/AWS-GuardDuty/pack_metadata.json b/Packs/AWS-GuardDuty/pack_metadata.json
index 547a6da013c4..a44c9fcb7f30 100644
--- a/Packs/AWS-GuardDuty/pack_metadata.json
+++ b/Packs/AWS-GuardDuty/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - GuardDuty",
"description": "Amazon Web Services Guard Duty Service (gd)",
"support": "xsoar",
- "currentVersion": "1.3.45",
+ "currentVersion": "1.3.46",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-IAM/Integrations/AWS-IAM/AWS-IAM.yml b/Packs/AWS-IAM/Integrations/AWS-IAM/AWS-IAM.yml
index 5fc0ffcfe8a7..6286dea67803 100644
--- a/Packs/AWS-IAM/Integrations/AWS-IAM/AWS-IAM.yml
+++ b/Packs/AWS-IAM/Integrations/AWS-IAM/AWS-IAM.yml
@@ -1536,7 +1536,7 @@ script:
- contextPath: AWS.IAM.Roles.AttachedPolicies.Query.Marker
description: When IsTruncated is true, this element is present and contains the value to use for the Marker parameter in a subsequent pagination request.
type: string
- dockerimage: demisto/boto3py3:1.0.0.87582
+ dockerimage: demisto/boto3py3:1.0.0.88855
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AWS-IAM/ReleaseNotes/1_1_58.md b/Packs/AWS-IAM/ReleaseNotes/1_1_58.md
new file mode 100644
index 000000000000..f12193b6e067
--- /dev/null
+++ b/Packs/AWS-IAM/ReleaseNotes/1_1_58.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### AWS - Identity and Access Management
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
diff --git a/Packs/AWS-IAM/pack_metadata.json b/Packs/AWS-IAM/pack_metadata.json
index 5237c9dc7e7d..2601dfc03925 100644
--- a/Packs/AWS-IAM/pack_metadata.json
+++ b/Packs/AWS-IAM/pack_metadata.json
@@ -3,7 +3,7 @@
"description": "Amazon Web Services Identity and Access Management (IAM)",
"support": "xsoar",
"author": "Cortex XSOAR",
- "currentVersion": "1.1.57",
+ "currentVersion": "1.1.58",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
"created": "2020-04-14T00:00:00Z",
diff --git a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml
index 9dd72d74fab6..4c365ed165ef 100755
--- a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml
+++ b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml
@@ -1512,7 +1512,7 @@ script:
- contextPath: AWS-NetworkFirewall.SubnetChangeProtection
description: A setting indicating whether the firewall is protected against changes to the subnet associations. Use this setting to protect against accidentally modifying the subnet associations for a firewall that is in use. When you create a firewall, the operation initializes this setting to TRUE.
type: Unknown
- dockerimage: demisto/boto3py3:1.0.0.41082
+ dockerimage: demisto/boto3py3:1.0.0.87655
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_5.md b/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..0b9f87dc8ebb
--- /dev/null
+++ b/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_5.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### AWS Network Firewall
+
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.87655*.
diff --git a/Packs/AWS-NetworkFirewall/pack_metadata.json b/Packs/AWS-NetworkFirewall/pack_metadata.json
index fbf61ab80e13..68716b84325e 100644
--- a/Packs/AWS-NetworkFirewall/pack_metadata.json
+++ b/Packs/AWS-NetworkFirewall/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - Network Firewall",
"description": "Amazon Web Services Network Firewall",
"support": "xsoar",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-Route53/Integrations/AWSRoute53/AWSRoute53.yml b/Packs/AWS-Route53/Integrations/AWSRoute53/AWSRoute53.yml
index f5af6ea8e69f..01c159722749 100644
--- a/Packs/AWS-Route53/Integrations/AWSRoute53/AWSRoute53.yml
+++ b/Packs/AWS-Route53/Integrations/AWSRoute53/AWSRoute53.yml
@@ -401,7 +401,7 @@ script:
- contextPath: AWS.Route53.RecordSetsChange.Comment
description: A complex type that describes change information about changes made to your hosted zone.
type: string
- dockerimage: demisto/boto3py3:1.0.0.87537
+ dockerimage: demisto/boto3py3:1.0.0.88855
runonce: false
script: ''
subtype: python3
diff --git a/Packs/AWS-Route53/ReleaseNotes/1_1_29.md b/Packs/AWS-Route53/ReleaseNotes/1_1_29.md
new file mode 100644
index 000000000000..9ea530491169
--- /dev/null
+++ b/Packs/AWS-Route53/ReleaseNotes/1_1_29.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### AWS - Route53
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
diff --git a/Packs/AWS-Route53/pack_metadata.json b/Packs/AWS-Route53/pack_metadata.json
index 89c97071f046..615486de5627 100644
--- a/Packs/AWS-Route53/pack_metadata.json
+++ b/Packs/AWS-Route53/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - Route53",
"description": "Amazon Web Services Managed Cloud DNS Service.",
"support": "xsoar",
- "currentVersion": "1.1.28",
+ "currentVersion": "1.1.29",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-SNS-Listener/.pack-ignore b/Packs/AWS-SNS-Listener/.pack-ignore
new file mode 100644
index 000000000000..b0151bdf4b20
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/.pack-ignore
@@ -0,0 +1,2 @@
+[file:AWSSNSListener.yml]
+ignore=BA124
\ No newline at end of file
diff --git a/Packs/AWS-SNS-Listener/.secrets-ignore b/Packs/AWS-SNS-Listener/.secrets-ignore
new file mode 100644
index 000000000000..1a0f000daf66
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/.secrets-ignore
@@ -0,0 +1,3 @@
+https://sns.eu-central-1.amazonaws.com
+https://user:pass@ext-myxsoar-address/xsoar/instance/execute/My-AWS-SNS-Listener/sns_ep
+https://link.pem
\ No newline at end of file
diff --git a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.py b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.py
new file mode 100644
index 000000000000..5471d3105487
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.py
@@ -0,0 +1,317 @@
+from CommonServerPython import * # noqa: F401
+from CommonServerUserPython import *
+from tempfile import NamedTemporaryFile
+from traceback import format_exc
+from collections import deque
+import uvicorn
+from secrets import compare_digest
+from fastapi import Depends, FastAPI, Request, Response, status
+from fastapi.security import HTTPBasic, HTTPBasicCredentials
+from fastapi.security.api_key import APIKeyHeader
+from fastapi.openapi.models import APIKey
+import base64
+from M2Crypto import X509
+
+
+PARAMS: dict = demisto.params()
+sample_events_to_store = deque(maxlen=20) # type: ignore[var-annotated]
+
+app = FastAPI(docs_url=None, redoc_url=None, openapi_url=None)
+basic_auth = HTTPBasic(auto_error=False)
+token_auth = APIKeyHeader(auto_error=False, name='Authorization')
+
+
+PROXIES, USE_SSL = handle_proxy_for_long_running()
+
+
+class AWS_SNS_CLIENT(BaseClient): # pragma: no cover
+ def __init__(self, base_url=None):
+ if PROXIES:
+ self.proxies = PROXIES
+ elif PARAMS.get('proxy'):
+ self.proxies = handle_proxy()
+ headers = {'Accept': 'application/json'}
+ super().__init__(base_url=base_url, proxy=bool(PROXIES), verify=USE_SSL, headers=headers)
+
+ def get(self, full_url, resp_type='json'):
+ return self._http_request(method='GET', full_url=full_url, proxies=PROXIES, resp_type=resp_type)
+
+
+client = AWS_SNS_CLIENT()
+
+
+class ServerConfig(): # pragma: no cover
+ def __init__(self, certificate_path, private_key_path, log_config, ssl_args):
+ self.certificate_path = certificate_path
+ self.private_key_path = private_key_path
+ self.log_config = log_config
+ self.ssl_args = ssl_args
+
+
+def is_valid_sns_message(sns_payload):
+ """
+ Validates an incoming Amazon Simple Notification Service (SNS) message.
+
+ Args:
+ sns_payload (dict): The SNS payload containing relevant fields.
+
+ Returns:
+ bool: True if the message is valid, False otherwise.
+ """
+ # taken from https://github.com/boto/boto3/issues/2508
+ demisto.debug('In is_valid_sns_message')
+ # Can only be one of these types.
+ if sns_payload["Type"] not in ["SubscriptionConfirmation", "Notification", "UnsubscribeConfirmation"]:
+ demisto.error('Not a valid SNS message')
+ return False
+
+ # Amazon SNS currently supports signature version 1 or 2.
+ if sns_payload.get("SignatureVersion") not in ["1", "2"]:
+ demisto.error('Not using the supported AWS-SNS SignatureVersion 1 or 2')
+ return False
+ demisto.debug(f'Handling Signature Version: {sns_payload.get("SignatureVersion")}')
+ # Fields for a standard notification.
+ fields = ["Message", "MessageId", "Subject", "Timestamp", "TopicArn", "Type"]
+
+ # Determine the required fields based on message type
+ if sns_payload["Type"] in ["SubscriptionConfirmation", "UnsubscribeConfirmation"]:
+ fields = ["Message", "MessageId", "SubscribeURL", "Timestamp", "Token", "TopicArn", "Type"]
+
+ # Build the string to be signed.
+ string_to_sign = ""
+ for field in fields:
+ string_to_sign += field + "\n" + sns_payload[field] + "\n"
+
+ # Verify the signature
+ decoded_signature = base64.b64decode(sns_payload["Signature"])
+ try:
+ response = client.get(full_url=sns_payload["SigningCertURL"], resp_type='response')
+ response.raise_for_status()
+ certificate = X509.load_cert_string(response.text)
+ except Exception as e:
+ demisto.error(f'Exception validating sign cert url: {e}')
+ return False
+
+ public_key = certificate.get_pubkey()
+ # Verify the signature based on SignatureVersion
+ if sns_payload["SignatureVersion"] == "1":
+ public_key.reset_context(md="sha1")
+ else: # version2
+ public_key.reset_context(md="sha256")
+
+ public_key.verify_init()
+ public_key.verify_update(string_to_sign.encode())
+ verification_result = public_key.verify_final(decoded_signature)
+
+ if verification_result != 1:
+ demisto.error('Signature verification failed.')
+ return False
+
+ demisto.debug('Signature verification succeeded.')
+ return True
+
+
+def is_valid_integration_credentials(credentials, request_headers, token):
+ credentials_param = PARAMS.get('credentials')
+ auth_failed = False
+ header_name = None
+ if credentials_param and (username := credentials_param.get('identifier')):
+ password = credentials_param.get('password', '')
+ if username.startswith('_header'):
+ header_name = username.split(':')[1]
+ token_auth.model.name = header_name
+ if not token or not compare_digest(token, password):
+ auth_failed = True
+ elif (not credentials) or (not (compare_digest(credentials.username, username)
+ and compare_digest(credentials.password, password))):
+ auth_failed = True
+ if auth_failed:
+ secret_header = (header_name or 'Authorization').lower()
+ if secret_header in request_headers:
+ request_headers[secret_header] = '***'
+ demisto.debug(f'Authorization failed - request headers {request_headers}')
+ if auth_failed: # auth failed not valid credentials
+ return False, header_name
+ else:
+ return True, header_name
+
+
+def handle_subscription_confirmation(subscribe_url) -> Response: # pragma: no cover
+ demisto.debug('SubscriptionConfirmation request')
+ try:
+ return client.get(full_url=subscribe_url)
+ except Exception as e:
+ demisto.error(f'Failed handling SubscriptionConfirmation: {e}')
+ return Response(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ content='Failed handling SubscriptionConfirmation')
+
+
+def handle_notification(payload, raw_json):
+ message = payload['Message']
+ demisto.debug(f'Notification request msg: {message}')
+ return {
+ 'name': payload['Subject'],
+ 'labels': [],
+ 'rawJSON': raw_json,
+ 'occurred': payload['Timestamp'],
+ 'details': f'ExternalID:{payload["MessageId"]} TopicArn:{payload["TopicArn"]} Message:{message}',
+ 'type': 'AWS-SNS Notification'
+ }
+
+
+def store_samples(incident): # pragma: no cover
+ try:
+ sample_events_to_store.append(incident)
+ integration_context = get_integration_context()
+ sample_events = deque(json.loads(integration_context.get('sample_events', '[]')), maxlen=20)
+ sample_events += sample_events_to_store
+ integration_context['sample_events'] = list(sample_events)
+ set_to_integration_context_with_retries(integration_context)
+ except Exception as e:
+ demisto.error(f'Failed storing sample events - {e}')
+
+
+@app.post(f'/{PARAMS.get("endpoint","")}')
+async def handle_post(request: Request,
+ credentials: HTTPBasicCredentials = Depends(basic_auth),
+ token: APIKey = Depends(token_auth)): # pragma: no cover
+ """
+ Handles incoming AWS-SNS POST requests.
+ Supports SubscriptionConfirmation, Notification and UnsubscribeConfirmation.
+
+ Args:
+ request (Request): The incoming HTTP request.
+ credentials (HTTPBasicCredentials): Basic authentication credentials.
+ token (APIKey): API key for authentication.
+
+ Returns:
+ Union[Response, str]: Response data or error message.
+ """
+ data = ''
+ request_headers = dict(request.headers)
+ is_valid_credentials = False
+ try:
+ is_valid_credentials, header_name = is_valid_integration_credentials(credentials, request_headers, token)
+ except Exception as e:
+ demisto.error(f'Error handling auth failure: {e}')
+ if not is_valid_credentials:
+ return Response(status_code=status.HTTP_401_UNAUTHORIZED, content='Authorization failed.')
+
+ secret_header = (header_name or 'Authorization').lower()
+ request_headers.pop(secret_header, None)
+
+ try:
+ type = request_headers['x-amz-sns-message-type']
+ payload = await request.json()
+ raw_json = json.dumps(payload)
+ except Exception as e:
+ demisto.error(f'Error in request parsing: {e}')
+ return Response(status_code=status.HTTP_400_BAD_REQUEST, content='Failed parsing request.')
+ if not is_valid_sns_message(payload):
+ return 'Validation of SNS message failed.'
+
+ if type == 'SubscriptionConfirmation':
+ demisto.debug('SubscriptionConfirmation request')
+ subscribe_url = payload['SubscribeURL']
+ try:
+ response = handle_subscription_confirmation(subscribe_url=subscribe_url)
+ response.raise_for_status()
+ except Exception as e:
+ demisto.error(f'Failed handling SubscriptionConfirmation: {e}')
+ return 'Failed handling SubscriptionConfirmation'
+ demisto.debug(f'Response from subscribe url: {response}')
+ return response
+ elif type == 'Notification':
+ incident = handle_notification(payload, raw_json)
+ data = demisto.createIncidents(incidents=[incident])
+ demisto.debug(f'Created incident: {incident}')
+ if PARAMS.get('store_samples'):
+ store_samples(incident)
+ if not data:
+ demisto.error('Failed creating incident')
+ data = 'Failed creating incident'
+ return data
+ elif type == 'UnsubscribeConfirmation':
+ message = payload['Message']
+ demisto.debug(f'UnsubscribeConfirmation request msg: {message}')
+ return f'UnsubscribeConfirmation request msg: {message}'
+ else:
+ demisto.error(f'Failed handling AWS SNS request, unknown type: {payload["Type"]}')
+ return f'Failed handling AWS SNS request, unknown type: {payload["Type"]}'
+
+
+def unlink_certificate(certificate_path, private_key_path): # pragma: no cover
+ if certificate_path:
+ os.unlink(certificate_path)
+ if private_key_path:
+ os.unlink(private_key_path)
+ time.sleep(5)
+
+
+def setup_server(): # pragma: no cover
+ certificate = PARAMS.get('certificate', '')
+ private_key = PARAMS.get('key', '')
+
+ certificate_path = ''
+ private_key_path = ''
+ ssl_args = {}
+ if certificate and private_key:
+ certificate_file = NamedTemporaryFile(delete=False)
+ certificate_path = certificate_file.name
+ certificate_file.write(bytes(certificate, 'utf-8'))
+ certificate_file.close()
+ ssl_args['ssl_certfile'] = certificate_path
+
+ private_key_file = NamedTemporaryFile(delete=False)
+ private_key_path = private_key_file.name
+ private_key_file.write(bytes(private_key, 'utf-8'))
+ private_key_file.close()
+ ssl_args['ssl_keyfile'] = private_key_path
+
+ demisto.debug('Starting HTTPS Server')
+ else:
+ demisto.debug('Starting HTTP Server')
+
+ integration_logger = IntegrationLogger()
+ integration_logger.buffering = False
+ log_config = dict(uvicorn.config.LOGGING_CONFIG)
+ log_config['handlers']['default']['stream'] = integration_logger
+ log_config['handlers']['access']['stream'] = integration_logger
+ return ServerConfig(log_config=log_config, ssl_args=ssl_args,
+ certificate_path=certificate_path, private_key_path=private_key_path)
+
+
+''' MAIN FUNCTION '''
+
+
+def main(): # pragma: no cover
+ demisto.debug(f'Command being called is {demisto.command()}')
+ try:
+ try:
+ port = PARAMS.get('longRunningPort')
+ except ValueError as e:
+ raise ValueError(f'Invalid listen port - {e}')
+ if demisto.command() == 'test-module':
+ return_results("ok")
+ elif demisto.command() == 'long-running-execution':
+ demisto.debug('Started long-running-execution.')
+ while True:
+ server_config = setup_server()
+ if not server_config:
+ raise DemistoException('Failed to configure server.')
+ try:
+ uvicorn.run(app, host='0.0.0.0', port=port, log_config=server_config.log_config, **server_config.ssl_args)
+ except Exception as e:
+ demisto.error(f'An error occurred in the long running loop: {str(e)} - {format_exc()}')
+ demisto.updateModuleHealth(f'An error occurred: {str(e)}')
+ finally:
+ unlink_certificate(server_config.certificate_path, server_config.private_key_path)
+ else:
+ raise NotImplementedError(f'Command {demisto.command()} is not implemented.')
+ except Exception as e:
+ demisto.error(format_exc())
+ return_error(f'Failed to execute {demisto.command()} command. Error: {e}')
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.yml b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.yml
new file mode 100644
index 000000000000..f554c661c8f7
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.yml
@@ -0,0 +1,73 @@
+category: Messaging and Conferencing
+sectionOrder:
+- Connect
+- Collect
+commonfields:
+ id: AWS-SNS-Listener
+ version: -1
+configuration:
+- display: Long running instance
+ defaultvalue: 'true'
+ name: longRunning
+ type: 8
+ hidden: true
+ section: Connect
+ advanced: true
+ required: false
+- additionalinfo: "Runs the service on this port from within Cortex XSOAR. Requires a unique port for each long-running integration instance. Do not use the same port for multiple instances. Note: If you click the test button more than once, a failure may occur mistakenly indicating that the port is already in use. (For Cortex XSOAR 8 and Cortex XSIAM) If you do not enter a Listen Port, an unused port for AWS SNS Listener will automatically be generated when the instance is saved. However, if using an engine, you must enter a Listen Port."
+ display: Listen Port
+ name: longRunningPort
+ type: 0
+ required: false
+ section: Connect
+- additionalinfo: Uses basic authentication for accessing the list. If empty, no authentication is enforced. (For Cortex XSOAR 8 and Cortex XSIAM) Optional for engines, otherwise mandatory.
+ display: Username
+ name: credentials
+ type: 9
+ section: Connect
+ required: false
+- additionalinfo: "Set the endpoint of your listener. example: /snsv2"
+ display: Endpoint
+ name: endpoint
+ type: 0
+ section: Connect
+ required: false
+- display: Certificate (Required for HTTPS)
+ additionalinfo: "(For Cortex XSOAR 6.x) For use with HTTPS - the certificate that the service should use. (For Cortex XSOAR 8 and Cortex XSIAM) Custom certificates are not supported."
+ name: certificate
+ type: 12
+ section: Connect
+ required: false
+- display: Private Key (Required for HTTPS)
+ additionalinfo: "(For Cortex XSOAR 6.x) For use with HTTPS - the private key that the service should use. (For Cortex XSOAR 8 and Cortex XSIAM) When using an engine, configure a private API key. Not supported on the Cortex XSOAR or Cortex XSIAM server."
+ name: key
+ type: 14
+ section: Connect
+ required: false
+- additionalinfo: "Because this is a push-based integration, it cannot fetch sample events in the mapping wizard. After you finish mapping, it is recommended to turn off the sample events storage to reduce performance overhead."
+ display: Store sample events for mapping
+ name: store_samples
+ type: 8
+ section: Connect
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ section: Connect
+ advanced: true
+ required: false
+description: 'Amazon Simple Notification Service (SNS) is a managed service that provides message delivery from publishers to subscribers.'
+display: AWS-SNS-Listener
+name: AWS-SNS-Listener
+script:
+ commands: []
+ dockerimage: demisto/fastapi:1.0.0.87576
+ longRunning: true
+ longRunningPort: true
+ script: '-'
+ subtype: python3
+ type: python
+ isFetchSamples: true
+fromversion: 6.10.0
+tests:
+- AWS SNS Listener - Test
diff --git a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_description.md b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_description.md
new file mode 100644
index 000000000000..8d4d6f80c3b9
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_description.md
@@ -0,0 +1,32 @@
+## AWS-SNS-Listener Help
+
+In order to configure the AWS-SNS-Listener
+
+XSOAR6
+
+* http: configure an endpoint and a free port for the internal long running server.
+* https: In addition to http configuration please add a CA certificate and private
+* key AWS-SNS works only with CA certificates.
+* Another option is via engine.
+
+Configuring the subscriber on AWS-SNS UI is straightforward:
+http/https://:/
+For more general information on long running integrations on XSOAR6:
+https://xsoar.pan.dev/docs/reference/articles/long-running-invoke
+
+XSOAR8 or XSIAM:
+
+* The instance should be configured to run only on HTTP.
+* The instance is using the HTTPS certificate of the server.
+* Please set a user and password (can be global via long running integrations configurations)
+* or local for this integration only.
+
+Configuring the subscriber on AWS-SNS UI:
+https://@ext-/xsoar/instance/execute/
+
+example:
+https://user:pass@ext-myxsoar-address/xsoar/instance/execute/My-AWS-SNS-Listener/sns_ep
+
+For more info on long running integrations on XSOAR8 or XSIAM:
+https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/8/Cortex-XSOAR-Administrator-Guide/Forward-Requests-to-Long-Running-Integrations
+
diff --git a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_image.png b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_image.png
new file mode 100644
index 000000000000..e2af23f59a4e
Binary files /dev/null and b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_image.png differ
diff --git a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_test.py b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_test.py
new file mode 100644
index 000000000000..9a6f91da2418
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener_test.py
@@ -0,0 +1,113 @@
+import pytest
+from AWSSNSListener import handle_notification, is_valid_sns_message, is_valid_integration_credentials
+from unittest.mock import patch
+import requests
+
+VALID_PAYLOAD = {
+ "Type": "Notification",
+ "MessageId": "uuid",
+ "TopicArn": "topicarn",
+ "Subject": "NotificationSubject",
+ "Message": "NotificationMessage",
+ "Timestamp": "2024-02-13T18:03:27.239Z",
+ "SignatureVersion": "1",
+ "Signature": b"sign",
+ "SigningCertURL": "https://link.pem",
+}
+
+
+@pytest.fixture
+def mock_params(mocker):
+ return mocker.patch('AWSSNSListener.PARAMS', new={'credentials': {'identifier': 'foo', 'password': 'bar'}},
+ autospec=False)
+
+
+def test_handle_notification_valid():
+ '''
+ Given a valid SNS notification message
+ When handle_notification is called with the message and raw json
+ Then should parse to a valid incident
+ '''
+ raw_json = {}
+ expected_notification = {
+ 'name': 'NotificationSubject',
+ 'labels': [],
+ 'rawJSON': raw_json,
+ 'occurred': '2024-02-13T18:03:27.239Z',
+ 'details': 'ExternalID:uuid TopicArn:topicarn Message:NotificationMessage',
+ 'type': 'AWS-SNS Notification'
+ }
+
+ actual_incident = handle_notification(VALID_PAYLOAD, raw_json)
+
+ assert actual_incident == expected_notification
+
+
+@patch("AWSSNSListener.client")
+@patch("AWSSNSListener.X509")
+@patch("M2Crypto.EVP.PKey")
+def test_is_valid_sns_message(mock_client, mock_x509, mock_PKey):
+ mock_resp = requests.models.Response()
+ mock_resp.status_code = 200
+ response_content = '''-----BEGIN VALID CERTIFICATE-----
+ -----END CERTIFICATE-----'''
+ mock_resp._content = str.encode(response_content)
+ mock_client.get.return_value = mock_resp
+ mock_PKey.verify_final.return_value = 1
+ mock_x509.get_pubkey.return_value = mock_PKey
+ mock_x509.load_cert_string.return_value = mock_x509
+ is_valid = is_valid_sns_message(VALID_PAYLOAD)
+ assert is_valid
+
+
+@patch("AWSSNSListener.client")
+@patch("AWSSNSListener.X509")
+@patch("M2Crypto.EVP.PKey")
+def test_not_valid_sns_message(mock_client, mock_x509, mock_PKey, capfd):
+ mock_resp = requests.models.Response()
+ mock_resp.status_code = 200
+ response_content = '''-----BEGIN INVALID CERTIFICATE-----
+ -----END CERTIFICATE-----'''
+ mock_resp._content = str.encode(response_content)
+ mock_client.get.return_value = mock_resp
+ mock_PKey.verify_final.return_value = 2
+ mock_x509.get_pubkey.return_value = mock_PKey
+ mock_x509.load_cert_string.return_value = mock_x509
+ with capfd.disabled():
+ is_valid = is_valid_sns_message(VALID_PAYLOAD)
+ assert is_valid is False
+
+
+@patch('fastapi.security.http.HTTPBasicCredentials')
+def test_valid_credentials(mock_httpBasicCredentials, mock_params):
+ """
+ Given valid credentials, request headers and token
+ When is_valid_integration_credentials is called
+ Then it should return True, header_name
+ """
+ mock_httpBasicCredentials.username = 'foo'
+ mock_httpBasicCredentials.password = 'bar'
+ request_headers = {}
+ token = "sometoken"
+ result, header_name = is_valid_integration_credentials(
+ mock_httpBasicCredentials, request_headers, token
+ )
+ assert result is True
+ assert header_name is None
+
+
+@patch('fastapi.security.http.HTTPBasicCredentials')
+def test_invalid_credentials(mock_httpBasicCredentials, mock_params):
+ """
+ Given invalid credentials, request headers and token
+ When is_valid_integration_credentials is called
+ Then it should return True, header_name
+ """
+ mock_httpBasicCredentials.username = 'foot'
+ mock_httpBasicCredentials.password = 'bark'
+ request_headers = {}
+ token = "sometoken"
+ result, header_name = is_valid_integration_credentials(
+ mock_httpBasicCredentials, request_headers, token
+ )
+ assert result is False
diff --git a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/README.md b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/README.md
new file mode 100644
index 000000000000..2afae159959c
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/README.md
@@ -0,0 +1,27 @@
+Amazon Simple Notification Service (SNS) is a managed service that provides message delivery from publishers to subscribers.
+This integration was integrated and tested with version January 2024 of AWS-SNS-Listener.
+
+## Configure AWS-SNS-Listener on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for AWS-SNS-Listener.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Long running instance | | False |
+ | Listen Port | Runs the service on this port from within Cortex XSOAR. Requires a unique port for each long-running integration instance. Do not use the same port for multiple instances. Note: If you click the test button more than once, a failure may occur mistakenly indicating that the port is already in use. \(For Cortex XSOAR 8 and Cortex XSIAM\) If you do not enter a Listen Port, an unused port for AWS SNS Listener will automatically be generated when the instance is saved. However, if using an engine, you must enter a Listen Port. | False |
+ | Username | Uses basic authentication for accessing the list. If empty, no authentication is enforced. \(For Cortex XSOAR 8 and Cortex XSIAM\) Optional for engines, otherwise mandatory. | False |
+ | Password | | False |
+ | Endpoint | Set the endpoint of your listener. example: /snsv2 | False |
+ | Certificate (Required for HTTPS) | \(For Cortex XSOAR 6.x\) For use with HTTPS - the certificate that the service should use. \(For Cortex XSOAR 8 and Cortex XSIAM\) Custom certificates are not supported. | False |
+ | Private Key (Required for HTTPS) | \(For Cortex XSOAR 6.x\) For use with HTTPS - the private key that the service should use. \(For Cortex XSOAR 8 and Cortex XSIAM\) When using an engine, configure a private API key. Not supported on the Cortex XSOAR or Cortex XSIAM server. | False |
+ | Store sample events for mapping | Because this is a push-based integration, it cannot fetch sample events in the mapping wizard. After you finish mapping, it is recommended to turn off the sample events storage to reduce performance overhead. | False |
+ | Use system proxy settings | | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
diff --git a/Packs/AWS-SNS-Listener/README.md b/Packs/AWS-SNS-Listener/README.md
new file mode 100644
index 000000000000..246c33a23f0f
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/README.md
@@ -0,0 +1,6 @@
+Amazon Simple Notification Service (SNS) is a managed service that provides message delivery from publishers to subscribers. Publishers communicate asynchronously with subscribers by sending messages to a topic, which is a logical access point and communication channel. Clients can subscribe to the SNS topic and receive published messages using a supported endpoint type, such as Amazon Kinesis Data Firehose, Amazon SQS, AWS Lambda, HTTP, email, mobile push notifications, and mobile text messages (SMS).
+
+## What does this pack do
+The AWS SNS Listener supports two types of POST requests:
+* SubscriptionConfirmation: Extract the subscription URL send subscription confirmation.
+* Notification: Extract the subject and message body and creates a Cortex XSOAR / Cortex XSIAM incident.
\ No newline at end of file
diff --git a/Packs/AWS-SNS-Listener/TestPlaybooks/AWS_SNS_Listener_-_Test.yml b/Packs/AWS-SNS-Listener/TestPlaybooks/AWS_SNS_Listener_-_Test.yml
new file mode 100644
index 000000000000..11365dfe5b51
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/TestPlaybooks/AWS_SNS_Listener_-_Test.yml
@@ -0,0 +1,385 @@
+id: AWS SNS Listener - Test
+version: -1
+name: AWS SNS Listener - Test
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 976ffcfd-467b-4c2a-82ee-9285ddb6d84a
+ type: start
+ task:
+ id: 976ffcfd-467b-4c2a-82ee-9285ddb6d84a
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "6"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 50
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 89106f4b-7057-44e3-81d0-5715f937de6d
+ type: regular
+ task:
+ id: 89106f4b-7057-44e3-81d0-5715f937de6d
+ version: -1
+ name: Post a msg to SNS-Listener
+ description: Sends http request. Returns the response as json.
+ scriptName: http
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "7"
+ scriptarguments:
+ body:
+ simple: |-
+ {"Type": "Notification",
+ "MessageId": "afe031bb-5ef5-53b1-b1ad-6c4a4288defb",
+ "TopicArn": "arn:aws:sns:eu-central-1:test:test",
+ "Subject": "SNS-test-subject",
+ "Message": "SNS-test-message body",
+ "Timestamp": "2023-12-11T14:18:37.923Z",
+ "SignatureVersion": "1",
+ "Signature": "Signature_test",
+ "SigningCertURL": "https://sns.eu-central-1.amazonaws.com/SimpleNotificationService-01d088a6f77103d0fe307c0069e40ed6.pem",
+ "UnsubscribeURL": "https://sns.eu-central-1.amazonaws.com/?Action=Unsubscribe"
+ }
+ headers:
+ simple: Authorization:token
+ method:
+ simple: POST
+ url:
+ simple: http://localhost:9000/incident/aws/
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 545
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: 38bf8674-3546-47e1-8d8f-8c921e45733a
+ type: regular
+ task:
+ id: 38bf8674-3546-47e1-8d8f-8c921e45733a
+ version: -1
+ name: Search the incident
+ description: Searches Demisto incidents
+ scriptName: SearchIncidentsV2
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "3"
+ scriptarguments:
+ name:
+ simple: SNS-test-subject
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1070
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: 0ee4c9b7-8a4b-4c59-8064-d01b5214d888
+ type: condition
+ task:
+ id: 0ee4c9b7-8a4b-4c59-8064-d01b5214d888
+ version: -1
+ name: Verify incident was created successfully
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "4"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: foundIncidents.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1245
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: 3daf5a1f-bf67-474d-8a4e-c63f02eb544c
+ type: regular
+ task:
+ id: 3daf5a1f-bf67-474d-8a4e-c63f02eb544c
+ version: -1
+ name: Close webhook triggered incident
+ description: commands.local.cmd.close.inv
+ script: Builtin|||closeInvestigation
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ id:
+ simple: ${foundIncidents.id}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1420
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: 5bf72787-3920-4dd6-87c2-11e307629d7c
+ type: regular
+ task:
+ id: 5bf72787-3920-4dd6-87c2-11e307629d7c
+ version: -1
+ name: DeleteContext
+ description: Delete field from context
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: 813fcb90-ff23-4e50-80df-48cc4890cf29
+ type: condition
+ task:
+ id: 813fcb90-ff23-4e50-80df-48cc4890cf29
+ version: -1
+ name: Verify Success HTTP Response
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "9"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualNumber
+ left:
+ value:
+ simple: HttpRequest.Response.StatusCode
+ iscontext: true
+ right:
+ value:
+ simple: "200"
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 720
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: 813e76da-af96-4d95-8d93-fe10e0a71e56
+ type: regular
+ task:
+ id: 813e76da-af96-4d95-8d93-fe10e0a71e56
+ version: -1
+ name: Sleep 10 seconds to let the webserver spin up
+ description: Sleep for X seconds
+ scriptName: Sleep
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "1"
+ scriptarguments:
+ seconds:
+ simple: "10"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "9":
+ id: "9"
+ taskid: 2bb0bae1-4241-4c88-8d85-ebe7c1586580
+ type: regular
+ task:
+ id: 2bb0bae1-4241-4c88-8d85-ebe7c1586580
+ version: -1
+ name: Sleep 10 seconds before searching the incident
+ description: Sleep for X seconds
+ scriptName: Sleep
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ seconds:
+ simple: "10"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 895
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: d5fbc51b-96bc-4723-8c71-ff960b4eab70
+ type: title
+ task:
+ id: d5fbc51b-96bc-4723-8c71-ff960b4eab70
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1580
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 1595,
+ "width": 380,
+ "x": 50,
+ "y": 50
+ }
+ }
+ }
+inputs: []
+outputs: []
+fromversion: 6.10.0
+description: ''
diff --git a/Packs/AWS-SNS-Listener/pack_metadata.json b/Packs/AWS-SNS-Listener/pack_metadata.json
new file mode 100644
index 000000000000..b506aa119a11
--- /dev/null
+++ b/Packs/AWS-SNS-Listener/pack_metadata.json
@@ -0,0 +1,22 @@
+{
+ "name": "AWS-SNS-Listener",
+ "description": "A long running AWS SNS Listener service that can subscribe to an SNS topic and create incidents from the messages received.",
+ "support": "xsoar",
+ "currentVersion": "1.0.0",
+ "author": "Cortex XSOAR",
+ "url": "https://www.paloaltonetworks.com/cortex",
+ "email": "",
+ "created": "2023-01-12T00:00:00Z",
+ "categories": [
+ "Cloud Services"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [
+ "Amazon"
+ ],
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/AWS-SNS/Integrations/AWSSNS/AWSSNS.yml b/Packs/AWS-SNS/Integrations/AWSSNS/AWSSNS.yml
index 6c3ac3eee0be..2dad1ba0fcb8 100644
--- a/Packs/AWS-SNS/Integrations/AWSSNS/AWSSNS.yml
+++ b/Packs/AWS-SNS/Integrations/AWSSNS/AWSSNS.yml
@@ -225,7 +225,7 @@ script:
outputs:
- contextPath: AWS.SNS.Subscriptions.SubscriptionArn
description: The Subscription Arn.
- dockerimage: demisto/boto3py3:1.0.0.87582
+ dockerimage: demisto/boto3py3:1.0.0.88855
script: ''
subtype: python3
type: python
diff --git a/Packs/AWS-SNS/ReleaseNotes/1_0_13.md b/Packs/AWS-SNS/ReleaseNotes/1_0_13.md
new file mode 100644
index 000000000000..0d839c12861c
--- /dev/null
+++ b/Packs/AWS-SNS/ReleaseNotes/1_0_13.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### AWS - SNS
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
diff --git a/Packs/AWS-SNS/pack_metadata.json b/Packs/AWS-SNS/pack_metadata.json
index 0e0680011fed..5121c9451efa 100644
--- a/Packs/AWS-SNS/pack_metadata.json
+++ b/Packs/AWS-SNS/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - SNS",
"description": "This is the integration content pack which can create or delete topic/subscription on AWS Simple Notification System and send the message via SNS as well.",
"support": "xsoar",
- "currentVersion": "1.0.12",
+ "currentVersion": "1.0.13",
"author": "Jie Liau",
"url": "",
"email": "",
diff --git a/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/AWSSecurityHubEventCollector.yml b/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/AWSSecurityHubEventCollector.yml
index 48b9339e71eb..d11b384dc844 100644
--- a/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/AWSSecurityHubEventCollector.yml
+++ b/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/AWSSecurityHubEventCollector.yml
@@ -116,7 +116,7 @@ script:
name: limit
description: Fetch events from AWS Security Hub.
name: aws-securityhub-get-events
- dockerimage: demisto/boto3py3:1.0.0.87537
+ dockerimage: demisto/boto3py3:1.0.0.88855
isfetchevents: true
script: '-'
subtype: python3
diff --git a/Packs/AWS-SecurityHub/ReleaseNotes/1_3_30.md b/Packs/AWS-SecurityHub/ReleaseNotes/1_3_30.md
new file mode 100644
index 000000000000..9e8db6494069
--- /dev/null
+++ b/Packs/AWS-SecurityHub/ReleaseNotes/1_3_30.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### AWS Security Hub Event Collector
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
diff --git a/Packs/AWS-SecurityHub/pack_metadata.json b/Packs/AWS-SecurityHub/pack_metadata.json
index b60e34a7015d..05ad3b206835 100644
--- a/Packs/AWS-SecurityHub/pack_metadata.json
+++ b/Packs/AWS-SecurityHub/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - Security Hub",
"description": "Amazon Web Services Security Hub Service.",
"support": "xsoar",
- "currentVersion": "1.3.29",
+ "currentVersion": "1.3.30",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.yml b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.yml
index 162ead1c1ad7..1ec4fb92862b 100644
--- a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.yml
+++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.yml
@@ -79,7 +79,7 @@ configuration:
type: 8
advanced: true
description: "Amazon Security Lake is a fully managed security data lake service."
-display: AWS-SecurityLake
+display: Amazon Security Lake
name: AWS Security Lake
script:
commands:
@@ -1202,7 +1202,7 @@ script:
script: "-"
type: python
subtype: python3
- dockerimage: demisto/boto3py3:1.0.0.87582
+ dockerimage: demisto/boto3py3:1.0.0.88855
feed: false
isfetch: false
fromversion: 6.10.0
diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/README.md b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/README.md
index 8ce81cccc4cc..e8105ca12abf 100644
--- a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/README.md
+++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/README.md
@@ -1,7 +1,7 @@
Amazon Security Lake is a fully managed security data lake service.
This integration was integrated and tested with version 1.34.20 of AWS Security Lake SDK (boto3).
-## Configure AWS-SecurityLake on Cortex XSOAR
+## Configure Amazon Security Lake on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
2. Search for AWS-SecurityLake.
diff --git a/Packs/AWS-SecurityLake/README.md b/Packs/AWS-SecurityLake/README.md
index a63416b0dc79..3dd3f2d7fea2 100644
--- a/Packs/AWS-SecurityLake/README.md
+++ b/Packs/AWS-SecurityLake/README.md
@@ -1,4 +1,4 @@
-### AWS Security Lake
+### Amazon Security Lake
Amazon Security Lake is a fully managed security data lake service. You can use Security Lake to automatically centralize security data from AWS environments, SaaS providers, on premises, cloud sources, and third-party sources into a purpose-built data lake that's stored in your AWS account.
Security Lake helps you analyze security data, so you can get a more complete understanding of your security posture across the entire organization. With Security Lake, you can also improve the protection of your workloads, applications, and data.
diff --git a/Packs/AWS-SecurityLake/ReleaseNotes/1_0_5.md b/Packs/AWS-SecurityLake/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..e03304586455
--- /dev/null
+++ b/Packs/AWS-SecurityLake/ReleaseNotes/1_0_5.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### AWS-SecurityLake
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
diff --git a/Packs/AWS-SecurityLake/ReleaseNotes/1_0_6.md b/Packs/AWS-SecurityLake/ReleaseNotes/1_0_6.md
new file mode 100644
index 000000000000..a78d88954114
--- /dev/null
+++ b/Packs/AWS-SecurityLake/ReleaseNotes/1_0_6.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Amazon Security Lake
+
+- Renamed the integration to ***Amazon Security Lake***.
+
diff --git a/Packs/AWS-SecurityLake/pack_metadata.json b/Packs/AWS-SecurityLake/pack_metadata.json
index 2c06244ac607..3bdc19224008 100644
--- a/Packs/AWS-SecurityLake/pack_metadata.json
+++ b/Packs/AWS-SecurityLake/pack_metadata.json
@@ -1,8 +1,8 @@
{
- "name": "AWS - Security Lake",
+ "name": "Amazon - Security Lake",
"description": "Amazon Security Lake is a fully managed security data lake service.",
"support": "xsoar",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS_DynamoDB/Integrations/AWS_DynamoDB/AWS_DynamoDB.yml b/Packs/AWS_DynamoDB/Integrations/AWS_DynamoDB/AWS_DynamoDB.yml
index 171cc8150f60..a788462d90be 100644
--- a/Packs/AWS_DynamoDB/Integrations/AWS_DynamoDB/AWS_DynamoDB.yml
+++ b/Packs/AWS_DynamoDB/Integrations/AWS_DynamoDB/AWS_DynamoDB.yml
@@ -2989,7 +2989,7 @@ script:
description: The name of the TTL attribute used to store the expiration time for items in the table.
- contextPath: AWS-DynamoDB.TimeToLiveSpecification
description: Represents the output of an UpdateTimeToLive operation.
- dockerimage: demisto/boto3py3:1.0.0.41926
+ dockerimage: demisto/boto3py3:1.0.0.87655
script: ''
subtype: python3
type: python
diff --git a/Packs/AWS_DynamoDB/ReleaseNotes/1_0_32.md b/Packs/AWS_DynamoDB/ReleaseNotes/1_0_32.md
new file mode 100644
index 000000000000..4f053df45092
--- /dev/null
+++ b/Packs/AWS_DynamoDB/ReleaseNotes/1_0_32.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Amazon DynamoDB
+
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.87655*.
diff --git a/Packs/AWS_DynamoDB/pack_metadata.json b/Packs/AWS_DynamoDB/pack_metadata.json
index 3eeacd1d9d23..31e515f5d478 100644
--- a/Packs/AWS_DynamoDB/pack_metadata.json
+++ b/Packs/AWS_DynamoDB/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Amazon DynamoDB",
"description": "Amazon DynamoDB Amazon DynamoDB is a fully managed NoSQL database service that provides fast and predictable performance with seamless scalability. DynamoDB lets you offload the administrative burdens of operating and scaling a distributed database, so that you don't have to worry about hardware provisioning, setup and configuration, replication, software patching, or cluster scaling. With DynamoDB, you can create database tables that can store and retrieve any amount of data, and serve any level of request traffic. You can scale up or scale down your tables' throughput capacity without downtime or performance degradation, and use the AWS Management Console to monitor resource utilization and performance metrics. DynamoDB automatically spreads the data and traffic for your tables over a sufficient number of servers to handle your throughput and storage requirements, while maintaining consistent and fast performance. All of your data is stored on solid state disks (SSDs) and automatically replicated across multiple Availability Zones in an AWS region, providing built-in high availability and data durability. ",
"support": "xsoar",
- "currentVersion": "1.0.31",
+ "currentVersion": "1.0.32",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS_WAF/Integrations/AWSWAF/AWSWAF.yml b/Packs/AWS_WAF/Integrations/AWSWAF/AWSWAF.yml
index 09312563e557..35b967335714 100644
--- a/Packs/AWS_WAF/Integrations/AWSWAF/AWSWAF.yml
+++ b/Packs/AWS_WAF/Integrations/AWSWAF/AWSWAF.yml
@@ -1412,7 +1412,7 @@ script:
script: '-'
type: python
subtype: python3
- dockerimage: demisto/boto3py3:1.0.0.83962
+ dockerimage: demisto/boto3py3:1.0.0.87902
fromversion: 6.5.0
tests:
- No tests (auto formatted)
diff --git a/Packs/AWS_WAF/ReleaseNotes/1_0_5.md b/Packs/AWS_WAF/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..ffb77bbe894e
--- /dev/null
+++ b/Packs/AWS_WAF/ReleaseNotes/1_0_5.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### AWS-WAF
+
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.87902*.
diff --git a/Packs/AWS_WAF/pack_metadata.json b/Packs/AWS_WAF/pack_metadata.json
index b1622a240d31..85f5f0eb4b83 100644
--- a/Packs/AWS_WAF/pack_metadata.json
+++ b/Packs/AWS_WAF/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS WAF",
"description": "Amazon Web Services Web Application Firewall (WAF)",
"support": "xsoar",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AbnormalSecurity/Integrations/AbnormalSecurity/AbnormalSecurity.yml b/Packs/AbnormalSecurity/Integrations/AbnormalSecurity/AbnormalSecurity.yml
index 1f3d40dbd0e0..60f51f315d23 100644
--- a/Packs/AbnormalSecurity/Integrations/AbnormalSecurity/AbnormalSecurity.yml
+++ b/Packs/AbnormalSecurity/Integrations/AbnormalSecurity/AbnormalSecurity.yml
@@ -703,7 +703,7 @@ script:
description: Get the latest threat intel feed.
name: abnormal-security-get-latest-threat-intel-feed
deprecated: true
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
script: ""
subtype: python3
type: python
diff --git a/Packs/AbnormalSecurity/ReleaseNotes/2_2_6.md b/Packs/AbnormalSecurity/ReleaseNotes/2_2_6.md
new file mode 100644
index 000000000000..98c5808014af
--- /dev/null
+++ b/Packs/AbnormalSecurity/ReleaseNotes/2_2_6.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Abnormal Security
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/AbnormalSecurity/pack_metadata.json b/Packs/AbnormalSecurity/pack_metadata.json
index 964af1b5e56a..0aa35530b30d 100644
--- a/Packs/AbnormalSecurity/pack_metadata.json
+++ b/Packs/AbnormalSecurity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Abnormal Security",
"description": "Abnormal Security detects and protects against the whole spectrum of email attacks",
"support": "partner",
- "currentVersion": "2.2.5",
+ "currentVersion": "2.2.6",
"author": "Abnormal Security",
"url": "",
"email": "support@abnormalsecurity.com",
diff --git a/Packs/AccentureCTI/Integrations/ACTIIndicatorQuery/ACTIIndicatorQuery.yml b/Packs/AccentureCTI/Integrations/ACTIIndicatorQuery/ACTIIndicatorQuery.yml
index db5c2c95c564..f000225977a9 100644
--- a/Packs/AccentureCTI/Integrations/ACTIIndicatorQuery/ACTIIndicatorQuery.yml
+++ b/Packs/AccentureCTI/Integrations/ACTIIndicatorQuery/ACTIIndicatorQuery.yml
@@ -400,7 +400,7 @@ script:
- contextPath: DBotScore.Score
description: The actual score.
type: String
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AccentureCTI/ReleaseNotes/2_2_34.md b/Packs/AccentureCTI/ReleaseNotes/2_2_34.md
new file mode 100644
index 000000000000..a6f32909174f
--- /dev/null
+++ b/Packs/AccentureCTI/ReleaseNotes/2_2_34.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### ACTI Indicator Query
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/AccentureCTI/pack_metadata.json b/Packs/AccentureCTI/pack_metadata.json
index 435605d33bb1..1a6347b11f36 100644
--- a/Packs/AccentureCTI/pack_metadata.json
+++ b/Packs/AccentureCTI/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Accenture CTI v2",
"description": "Accenture CTI provides intelligence regarding security threats and vulnerabilities.",
"support": "partner",
- "currentVersion": "2.2.33",
+ "currentVersion": "2.2.34",
"author": "Accenture",
"url": "https://www.accenture.com/us-en/services/security/cyber-defense",
"email": "CTI.AcctManagement@accenture.com",
diff --git a/Packs/AccessInvestigation/Playbooks/Access_Investigation_-_Generic.yml b/Packs/AccessInvestigation/Playbooks/Access_Investigation_-_Generic.yml
index f0c3d0b51557..3e9bc22c9d12 100644
--- a/Packs/AccessInvestigation/Playbooks/Access_Investigation_-_Generic.yml
+++ b/Packs/AccessInvestigation/Playbooks/Access_Investigation_-_Generic.yml
@@ -829,7 +829,8 @@ tasks:
simple: inputs.DstIP
iscontext: true
- operator: uniq
- InternalRange: {}
+ InternalRange:
+ simple: ${inputs.InternalRange}
ResolveIP:
simple: "True"
separatecontext: true
@@ -899,6 +900,30 @@ inputs:
required: false
description: Set to true to assign only the users that are currently on shift. Requires Cortex XSOAR v5.5 or later.
playbookInputQuery:
+- key: InternalRange
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
+ required: false
+ description: 'A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
+ playbookInputQuery:
outputs:
- contextPath: Account.Email.Address
description: The email address object associated with the Account
diff --git a/Packs/AccessInvestigation/Playbooks/Access_Investigation_-_Generic_README.md b/Packs/AccessInvestigation/Playbooks/Access_Investigation_-_Generic_README.md
index 327ca723cdc5..be02ddede75b 100644
--- a/Packs/AccessInvestigation/Playbooks/Access_Investigation_-_Generic_README.md
+++ b/Packs/AccessInvestigation/Playbooks/Access_Investigation_-_Generic_README.md
@@ -3,25 +3,31 @@ This playbook investigates an access incident by gathering user and IP informati
The playbook then interacts with the user that triggered the incident to confirm whether or not they initiated the access action.
## Dependencies
+
This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+
* Active Directory - Get User Manager Details
-* Account Enrichment - Generic v2.1
* IP Enrichment - Generic v2
+* Account Enrichment - Generic v2.1
### Integrations
+
This playbook does not use any integrations.
### Scripts
-* AssignAnalystToIncident
+
* EmailAskUser
+* AssignAnalystToIncident
### Commands
-* closeInvestigation
+
* setIncident
+* closeInvestigation
## Playbook Inputs
+
---
| **Name** | **Description** | **Default Value** | **Required** |
@@ -31,8 +37,10 @@ This playbook does not use any integrations.
| Username | The username of the account that was used to access the DstIP. | incident.srcuser | Optional |
| Role | The default role to assign the incident to. | Administrator | Required |
| OnCall | Set to true to assign only the users that are currently on shift. Requires Cortex XSOAR v5.5 or later. | false | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
## Playbook Outputs
+
---
| **Path** | **Description** | **Type** |
@@ -60,5 +68,7 @@ This playbook does not use any integrations.
| Endpoint.Domain | Endpoint domain name | string |
## Playbook Image
+
---
-![Access](https://raw.githubusercontent.com/demisto/content/a5e419535b376b08764f65670c24ea364072e869/Packs/AccessInvestigation/doc_files/Access_Investigation_-_Generic_4_5.png)
+
+![Access Investigation - Generic](../doc_files/Access_Investigation_-_Generic_4_5.png)
diff --git a/Packs/AccessInvestigation/ReleaseNotes/1_2_7.md b/Packs/AccessInvestigation/ReleaseNotes/1_2_7.md
new file mode 100644
index 000000000000..8cd0025dc3ef
--- /dev/null
+++ b/Packs/AccessInvestigation/ReleaseNotes/1_2_7.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Access Investigation - Generic
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
diff --git a/Packs/AccessInvestigation/pack_metadata.json b/Packs/AccessInvestigation/pack_metadata.json
index d81e356c3cff..ac7dbf354949 100644
--- a/Packs/AccessInvestigation/pack_metadata.json
+++ b/Packs/AccessInvestigation/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Access Investigation",
"description": "This Content Pack automates response to unauthorised access incidents and contains customer access incident views and layouts to aid investigation.",
"support": "xsoar",
- "currentVersion": "1.2.6",
+ "currentVersion": "1.2.7",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query.py b/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query.py
index 5875672fb282..e6ccbf6188f4 100644
--- a/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query.py
+++ b/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query.py
@@ -399,7 +399,11 @@ def get_user_dn_by_email(default_base_dn, email):
def modify_user_ou(dn, new_ou):
assert connection is not None
- cn = dn.split(',', 1)[0]
+ cn = dn.split(',OU=', 1)[0]
+ cn = cn.split(',DC=', 1)[0]
+ # removing // to fix customers bug
+ cn = cn.replace('\\', '')
+ dn = dn.replace('\\', '')
success = connection.modify_dn(dn, cn, new_superior=new_ou)
return success
diff --git a/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query.yml b/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query.yml
index 6ea4292169a2..4c32595a3396 100644
--- a/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query.yml
+++ b/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query.yml
@@ -800,7 +800,7 @@ script:
outputs:
- contextPath: ActiveDirectory.ValidCredentials
description: List of usernames that successfully logged in.
- dockerimage: demisto/ldap:2.9.1.87300
+ dockerimage: demisto/ldap:2.9.1.89223
ismappable: true
isremotesyncout: true
runonce: false
diff --git a/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query_test.py b/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query_test.py
index a8f1fc8820c7..12f8ee82ef68 100644
--- a/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query_test.py
+++ b/Packs/Active_Directory_Query/Integrations/Active_Directory_Query/Active_Directory_Query_test.py
@@ -828,3 +828,31 @@ def mock_create_connection(server, server_ip, username, password, ntlm_connectio
patch("Active_Directory_Query.Connection.unbind", side_effect=MockConnection.unbind):
command_results = Active_Directory_Query.test_credentials_command(BASE_TEST_PARAMS['server_ip'], ntlm_connection='true')
assert command_results.readable_output == 'Credential test with username username_test_credentials succeeded.'
+
+
+@pytest.mark.parametrize('dn,expected', [
+ ('CN=name, lastname,OU=Test1,DC=dc1,DC=dc2', 'CN=name, lastname'),
+ ('CN=name\\ lastname,OU=Test1,DC=dc1,DC=dc2', 'CN=name lastname'),
+ ('CN=name,DC=dc1,DC=dc2', 'CN=name')])
+def test_modify_user_ou(mocker, dn, expected):
+ """
+ Given:
+ - user with CN contains //
+ - user with CN contains comma
+ - user without ou
+ When:
+ Run the 'ad-modify-ou' command
+ Then:
+ Validate the cn extracted as expected
+ """
+ import Active_Directory_Query
+
+ class MockConnection:
+ def modify_dn(self, dn, cn, new_superior):
+ pass
+
+ Active_Directory_Query.connection = MockConnection()
+ new_ou = 'OU=Test2'
+ connection_mocker = mocker.patch.object(Active_Directory_Query.connection, 'modify_dn', return_value=True)
+ Active_Directory_Query.modify_user_ou(dn, new_ou)
+ assert connection_mocker.call_args[0][1] == expected
diff --git a/Packs/Active_Directory_Query/ReleaseNotes/1_6_29.md b/Packs/Active_Directory_Query/ReleaseNotes/1_6_29.md
new file mode 100644
index 000000000000..6083963f08a8
--- /dev/null
+++ b/Packs/Active_Directory_Query/ReleaseNotes/1_6_29.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Active Directory Query v2
+
+- Updated the Docker image to: *demisto/ldap:2.9.1.87744*.
diff --git a/Packs/Active_Directory_Query/ReleaseNotes/1_6_30.md b/Packs/Active_Directory_Query/ReleaseNotes/1_6_30.md
new file mode 100644
index 000000000000..261643f3215d
--- /dev/null
+++ b/Packs/Active_Directory_Query/ReleaseNotes/1_6_30.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Active Directory Query v2
+
+- Fixed an issue where the ***ad-modify-user-ou*** command changed the CN value.
\ No newline at end of file
diff --git a/Packs/Active_Directory_Query/ReleaseNotes/1_6_31.md b/Packs/Active_Directory_Query/ReleaseNotes/1_6_31.md
new file mode 100644
index 000000000000..eff76f525285
--- /dev/null
+++ b/Packs/Active_Directory_Query/ReleaseNotes/1_6_31.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Active Directory Query v2
+- Updated the Docker image to: *demisto/ldap:2.9.1.89223*.
+- Fixed an issue where the ***ad-modify-user-ou*** command removed the Last Name from the CN value.
\ No newline at end of file
diff --git a/Packs/Active_Directory_Query/ReleaseNotes/1_6_32.md b/Packs/Active_Directory_Query/ReleaseNotes/1_6_32.md
new file mode 100644
index 000000000000..0bc6ed6e5c5d
--- /dev/null
+++ b/Packs/Active_Directory_Query/ReleaseNotes/1_6_32.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Active Directory Query v2
+- Updated the Docker image to: *demisto/ldap:2.9.1.89223*.
diff --git a/Packs/Active_Directory_Query/pack_metadata.json b/Packs/Active_Directory_Query/pack_metadata.json
index 28985171074b..c349bfdb8a03 100644
--- a/Packs/Active_Directory_Query/pack_metadata.json
+++ b/Packs/Active_Directory_Query/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Active Directory Query",
"description": "Active Directory Query integration enables you to access and manage Active Directory objects (users, contacts, and computers).",
"support": "xsoar",
- "currentVersion": "1.6.28",
+ "currentVersion": "1.6.32",
"author": "Cortex XSOAR",
"url": "",
"email": "",
diff --git a/Packs/AlibabaActionTrail/Integrations/AlibabaActionTrailEventCollector/AlibabaActionTrailEventCollector.yml b/Packs/AlibabaActionTrail/Integrations/AlibabaActionTrailEventCollector/AlibabaActionTrailEventCollector.yml
index 913bce155274..f257ec20355c 100644
--- a/Packs/AlibabaActionTrail/Integrations/AlibabaActionTrailEventCollector/AlibabaActionTrailEventCollector.yml
+++ b/Packs/AlibabaActionTrail/Integrations/AlibabaActionTrailEventCollector/AlibabaActionTrailEventCollector.yml
@@ -81,7 +81,7 @@ script:
- "True"
- "False"
required: true
- dockerimage: demisto/fastapi:1.0.0.36992
+ dockerimage: demisto/fastapi:1.0.0.86524
isfetchevents: true
subtype: python3
marketplaces:
diff --git a/Packs/AlibabaActionTrail/Playbooks/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user.yml b/Packs/AlibabaActionTrail/Playbooks/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user.yml
index 57c3e6ae4e9d..0ee3d7c06b15 100644
--- a/Packs/AlibabaActionTrail/Playbooks/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user.yml
+++ b/Packs/AlibabaActionTrail/Playbooks/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user.yml
@@ -625,7 +625,26 @@ view: |-
}
inputs:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
description: List of internal IP ranges
playbookInputQuery:
diff --git a/Packs/AlibabaActionTrail/Playbooks/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user_README.md b/Packs/AlibabaActionTrail/Playbooks/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user_README.md
index 67dccd4378eb..3668455bd877 100644
--- a/Packs/AlibabaActionTrail/Playbooks/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user_README.md
+++ b/Packs/AlibabaActionTrail/Playbooks/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user_README.md
@@ -5,37 +5,46 @@ Used Sub-playbooks:
* Block IP - Generic v3
To link this playbook to the relevant alerts automatically, we recommend using the following filters when configuring the playbook triggers:
-* Alert Source = Correlation
-* Alert Name = Alibaba ActionTrail - multiple unauthorized action attempts detected by a user
+Alert Source = Correlation
+Alert Name = Alibaba ActionTrail - multiple unauthorized action attempts detected by a user
## Dependencies
+
This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+
* Block IP - Generic v3
* Enrichment for Verdict
### Integrations
+
This playbook does not use any integrations.
### Scripts
+
This playbook does not use any scripts.
### Commands
+
* closeInvestigation
* setAlert
## Playbook Inputs
+
---
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
-| InternalRange | List of internal IP ranges | | Optional |
+| InternalRange | List of internal IP ranges | lists.PrivateIPs | Optional |
## Playbook Outputs
+
---
There are no outputs for this playbook.
## Playbook Image
+
---
+
![Alibaba ActionTrail - multiple unauthorized action attempts detected by a user](../doc_files/Alibaba_ActionTrail_-_multiple_unauthorized_action_attempts_detected_by_a_user.png)
diff --git a/Packs/AlibabaActionTrail/ReleaseNotes/1_0_24.md b/Packs/AlibabaActionTrail/ReleaseNotes/1_0_24.md
new file mode 100644
index 000000000000..39a21e38487f
--- /dev/null
+++ b/Packs/AlibabaActionTrail/ReleaseNotes/1_0_24.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Alibaba Action Trail Event Collector
+
+- Updated the Docker image to: *demisto/fastapi:1.0.0.86524*.
diff --git a/Packs/AlibabaActionTrail/ReleaseNotes/1_0_25.md b/Packs/AlibabaActionTrail/ReleaseNotes/1_0_25.md
new file mode 100644
index 000000000000..f79bccb4262d
--- /dev/null
+++ b/Packs/AlibabaActionTrail/ReleaseNotes/1_0_25.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Alibaba ActionTrail - multiple unauthorized action attempts detected by a user
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
\ No newline at end of file
diff --git a/Packs/AlibabaActionTrail/pack_metadata.json b/Packs/AlibabaActionTrail/pack_metadata.json
index 24f2b0f0e8c9..f604f75d22d2 100644
--- a/Packs/AlibabaActionTrail/pack_metadata.json
+++ b/Packs/AlibabaActionTrail/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Alibaba Action Trail",
"description": "An Integration Pack to fetch Alibaba action trail events.",
"support": "xsoar",
- "currentVersion": "1.0.23",
+ "currentVersion": "1.0.25",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.py b/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.py
index 0e810e939d44..38ac3d9a549a 100644
--- a/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.py
+++ b/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.py
@@ -8,7 +8,6 @@
import dateparser
import urllib3
from datetime import datetime
-from typing import Dict
# Disable insecure warnings
urllib3.disable_warnings()
@@ -77,7 +76,7 @@ def http_request(method, url_suffix, params=None, headers=None, data=None, **kwa
if res.status_code == 401:
raise Exception('UnauthorizedError: please validate your credentials.')
if res.status_code not in {200}:
- raise Exception('Error in API call to Example Integration [{}] - {}'.format(res.status_code, res.reason))
+ raise Exception(f'Error in API call to Example Integration [{res.status_code}] - {res.reason}')
return res.json()
@@ -133,7 +132,7 @@ def get_time_range(time_frame=None, start_time=None, end_time=None):
elif time_frame == 'Last 30 Days':
start_time = end_time - timedelta(days=30)
else:
- raise ValueError('Could not parse time frame: {}'.format(time_frame))
+ raise ValueError(f'Could not parse time frame: {time_frame}')
return date_to_timestamp(start_time), date_to_timestamp(end_time)
@@ -150,8 +149,12 @@ def convert_timestamp_to_iso86(timestamp: str, timezone_letter: str = 'Z') -> st
"""
if not timestamp:
return ''
- datetime_from_timestamp = dateparser.parse(timestamp, settings={"TO_TIMEZONE": timezone_letter,
- "RETURN_AS_TIMEZONE_AWARE": True})
+ try:
+ datetime_from_timestamp = dateparser.parse(str(timestamp), settings={"TO_TIMEZONE": timezone_letter,
+ "RETURN_AS_TIMEZONE_AWARE": True})
+ except Exception as e:
+ demisto.error(f"Encountered issue parsing {timestamp}. err: {str(e)}")
+ return ''
assert datetime_from_timestamp is not None, f'{timestamp} could not be parsed'
time_in_iso86 = datetime_from_timestamp.strftime("%Y-%m-%dT%H:%M:%S.%f")
return time_in_iso86[:-3] + timezone_letter
@@ -261,7 +264,7 @@ def parse_events(events_data):
return events
-def dict_value_to_int(target_dict: Dict, key: str):
+def dict_value_to_int(target_dict: dict, key: str):
"""
:param target_dict: A dictionary which has the key param
:param key: The key that we need to convert it's value to integer
@@ -321,7 +324,7 @@ def get_alarm_command():
# Parse response into context & content entries
alarm_details = parse_alarms(response)
- return_outputs(tableToMarkdown('Alarm {}'.format(alarm_id), alarm_details),
+ return_outputs(tableToMarkdown(f'Alarm {alarm_id}', alarm_details),
{'AlienVault.Alarm(val.ID && val.ID == obj.ID)': alarm_details},
response)
@@ -363,7 +366,7 @@ def search_alarms(start_time=None, end_time=None, status=None, priority=None, sh
params = {
'page': 0,
'size': limit,
- 'sort': 'timestamp_occured,{}'.format(direction),
+ 'sort': f'timestamp_occured,{direction}',
'suppressed': show_suppressed
}
@@ -417,7 +420,7 @@ def search_events(start_time=None, end_time=None, account_name=None, event_name=
params = {
'page': 1,
'size': limit,
- 'sort': 'timestamp_occured,{}'.format(direction),
+ 'sort': f'timestamp_occured,{direction}',
}
if account_name:
@@ -447,7 +450,7 @@ def get_events_by_alarm_command():
events = parse_events(alarm['events'])
- return_outputs(tableToMarkdown('Events of Alarm {}:'.format(alarm_id), events),
+ return_outputs(tableToMarkdown(f'Events of Alarm {alarm_id}:', events),
{'AlienVault.Event(val.ID && val.ID == obj.ID)': events},
alarm)
@@ -503,7 +506,7 @@ def fetch_incidents():
def main():
global AUTH_TOKEN
cmd = demisto.command()
- LOG('Command being called is {}'.format(cmd))
+ LOG(f'Command being called is {cmd}')
try:
handle_proxy()
@@ -522,7 +525,7 @@ def main():
LOG.print_log()
raise
else:
- return_error('An error occurred: {}'.format(str(e)))
+ return_error(f'An error occurred: {str(e)}')
# python2 uses __builtin__ python3 uses builtins
diff --git a/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.yml b/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.yml
index e1a0c913b37b..03e75e57ead5 100644
--- a/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.yml
+++ b/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.yml
@@ -340,7 +340,7 @@ script:
- contextPath: AlienVault.Event.Subcategory
description: The event subcategory.
type: String
- dockerimage: demisto/python3:3.10.13.72123
+ dockerimage: demisto/python3:3.10.13.87159
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_test.py b/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_test.py
index 1d995e66df72..b7db2dd511de 100644
--- a/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_test.py
+++ b/Packs/AlienVault_USM_Anywhere/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_test.py
@@ -1,4 +1,3 @@
-import io
import json
import pytest
import demistomock as demisto
@@ -10,7 +9,7 @@
def util_load_json(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
@@ -93,24 +92,27 @@ def test_get_time_range():
dt = datetime.now()
start, end = get_time_range('Today', None, None)
- assert datetime.fromtimestamp(start / 1000).date() == dt.date() and approximate_compare(dt, end)
+ assert datetime.fromtimestamp(start / 1000).date() == dt.date()
+ assert approximate_compare(dt, end)
dt = datetime.now()
# should ignore the start/end time values
start, end = get_time_range('Today', 'asfd', 'asdf')
- assert datetime.fromtimestamp(start / 1000).date() == dt.date() and approximate_compare(dt, end)
+ assert datetime.fromtimestamp(start / 1000).date() == dt.date()
+ assert approximate_compare(dt, end)
dt = datetime.now()
start, end = get_time_range('Yesterday', None, None)
- assert datetime.fromtimestamp(start / 1000).date() == (dt.date() - timedelta(days=1)) and approximate_compare(dt, end)
+ assert datetime.fromtimestamp(start / 1000).date() == (dt.date() - timedelta(days=1))
+ assert approximate_compare(dt, end)
start, end = get_time_range('Custom', '2019-12-30T01:02:03Z', '2019-12-30T04:05:06Z')
assert ((start, end) == (date_to_timestamp(dateparser.parse('2019-12-30T01:02:03Z')),
date_to_timestamp(dateparser.parse('2019-12-30T04:05:06Z'))))
start, end = get_time_range('Custom', '2019-12-30T01:02:03Z', None)
- assert (start == date_to_timestamp(dateparser.parse('2019-12-30T01:02:03Z'))
- and approximate_compare(end, datetime.now()))
+ assert start == date_to_timestamp(dateparser.parse('2019-12-30T01:02:03Z'))
+ assert approximate_compare(end, datetime.now())
parsed_regular_alarm = {'ID': 'some_uuid',
diff --git a/Packs/AlienVault_USM_Anywhere/ReleaseNotes/1_0_22.md b/Packs/AlienVault_USM_Anywhere/ReleaseNotes/1_0_22.md
new file mode 100644
index 000000000000..3a48ebd82fc0
--- /dev/null
+++ b/Packs/AlienVault_USM_Anywhere/ReleaseNotes/1_0_22.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### AlienVault USM Anywhere
+- Fixed an issue where *alienvault-get-alarm* command failed to convert timestamps to iso86 format.
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
diff --git a/Packs/AlienVault_USM_Anywhere/pack_metadata.json b/Packs/AlienVault_USM_Anywhere/pack_metadata.json
index 84414f7fa0ad..ea1f86012048 100644
--- a/Packs/AlienVault_USM_Anywhere/pack_metadata.json
+++ b/Packs/AlienVault_USM_Anywhere/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AlienVault USM Anywhere",
"description": "Searches for and monitors alarms and events from AlienVault USM Anywhere.",
"support": "xsoar",
- "currentVersion": "1.0.21",
+ "currentVersion": "1.0.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AnsibleAlibabaCloud/TestPlaybooks/Test-AlibabaCloud.yml b/Packs/AnsibleAlibabaCloud/TestPlaybooks/Test-AlibabaCloud.yml
index 86ae41f6d048..1e40df5b73f6 100644
--- a/Packs/AnsibleAlibabaCloud/TestPlaybooks/Test-AlibabaCloud.yml
+++ b/Packs/AnsibleAlibabaCloud/TestPlaybooks/Test-AlibabaCloud.yml
@@ -1,4 +1,4 @@
-id: a60ae34e-7a00-4a06-81ca-2ca6ea1d58ba
+id: Test-AlibabaCloud
version: -1
vcShouldKeepItemLegacyProdMachine: false
name: Test-AlibabaCloud
diff --git a/Packs/AnsibleHetznerCloud/TestPlaybooks/Test-AnsibleHCloud.yml b/Packs/AnsibleHetznerCloud/TestPlaybooks/Test-AnsibleHCloud.yml
index 591f7a52e5de..526d04762532 100644
--- a/Packs/AnsibleHetznerCloud/TestPlaybooks/Test-AnsibleHCloud.yml
+++ b/Packs/AnsibleHetznerCloud/TestPlaybooks/Test-AnsibleHCloud.yml
@@ -1,4 +1,4 @@
-id: 7d8ac1af-2d1e-4ed9-875c-d3257d2c6830
+id: Test-AnsibleHCloud
version: -1
vcShouldKeepItemLegacyProdMachine: false
name: Test-AnsibleHCloud
diff --git a/Packs/ApiModules/ReleaseNotes/2_2_22.md b/Packs/ApiModules/ReleaseNotes/2_2_22.md
new file mode 100644
index 000000000000..f17bc17fd71b
--- /dev/null
+++ b/Packs/ApiModules/ReleaseNotes/2_2_22.md
@@ -0,0 +1,4 @@
+
+#### Scripts
+##### TAXII2ApiModule
+- Improved implementation of determining the TAXII version to use.
diff --git a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py
index 23ef2b54b464..c3139c0eb59a 100644
--- a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py
+++ b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py
@@ -13,18 +13,18 @@
XSOAR_RESOLVED_STATUS_TO_XDR = {
'Other': 'resolved_other',
- 'Duplicate': 'resolved_duplicate',
+ 'Duplicate': 'resolved_duplicate_incident',
'False Positive': 'resolved_false_positive',
'Resolved': 'resolved_true_positive',
- 'Resolved - Security Testing': 'resolved_security_testing',
+ 'Security Testing': 'resolved_security_testing',
}
XDR_RESOLVED_STATUS_TO_XSOAR = {
'resolved_known_issue': 'Other',
- 'resolved_duplicate': 'Duplicate',
+ 'resolved_duplicate_incident': 'Duplicate',
'resolved_false_positive': 'False Positive',
'resolved_true_positive': 'Resolved',
- 'resolved_security_testing': 'Resolved - Security Testing',
+ 'resolved_security_testing': 'Security Testing',
'resolved_other': 'Other',
'resolved_auto': 'Resolved'
}
@@ -346,7 +346,7 @@ def get_endpoints(self,
endpoints = reply.get('reply').get('endpoints', [])
return endpoints
- def set_endpoints_alias(self, filters: list[dict[str, str]], new_alias_name: str | None) -> dict: # pragma: no cover
+ def set_endpoints_alias(self, filters: list[dict[str, str]], new_alias_name: str | None) -> dict: # pragma: no cover
"""
This func is used to set the alias name of an endpoint.
@@ -934,8 +934,7 @@ def get_endpoint_device_control_violations(self, endpoint_ids: list, type_of_vio
ip_list: list, vendor: list, vendor_id: list, product: list,
product_id: list,
serial: list,
- hostname: list, violation_ids: list, username: list) \
- -> Dict[str, Any]:
+ hostname: list, violation_ids: list, username: list) -> Dict[str, Any]:
arg_list = {'type': type_of_violation,
'endpoint_id_list': endpoint_ids,
'ip_list': ip_list,
@@ -1709,8 +1708,8 @@ def validate_args_scan_commands(args):
'and without any other filters. This may cause performance issues.\n' \
'To scan/abort scan some of the endpoints, please use the filter arguments.'
if all_:
- if endpoint_id_list or dist_name or gte_first_seen or gte_last_seen or lte_first_seen or lte_last_seen \
- or ip_list or group_name or platform or alias or hostname:
+ if (endpoint_id_list or dist_name or gte_first_seen or gte_last_seen or lte_first_seen or lte_last_seen
+ or ip_list or group_name or platform or alias or hostname):
raise Exception(err_msg)
elif not endpoint_id_list and not dist_name and not gte_first_seen and not gte_last_seen \
and not lte_first_seen and not lte_last_seen and not ip_list and not group_name and not platform \
@@ -2849,13 +2848,44 @@ def handle_outgoing_incident_owner_sync(update_args):
def handle_user_unassignment(update_args):
if ('assigned_user_mail' in update_args and update_args.get('assigned_user_mail') in ['None', 'null', '', None]) \
- or ('assigned_user_pretty_name' in update_args
- and update_args.get('assigned_user_pretty_name') in ['None', 'null', '', None]):
+ or ('assigned_user_pretty_name' in update_args
+ and update_args.get('assigned_user_pretty_name') in ['None', 'null', '', None]):
update_args['unassign_user'] = 'true'
update_args['assigned_user_mail'] = None
update_args['assigned_user_pretty_name'] = None
+def resolve_xdr_close_reason(xsoar_close_reason: str) -> str:
+ """
+ Resolving XDR close reason from possible custom XSOAR->XDR close-reason mapping or default mapping.
+ :param xsoar_close_reason: XSOAR raw status/close reason e.g. 'False Positive'.
+ :return: XDR close-reason in snake_case format e.g. 'resolved_false_positive'.
+ """
+ # Initially setting the close reason according to the default mapping.
+ xdr_close_reason = XSOAR_RESOLVED_STATUS_TO_XDR.get(xsoar_close_reason, 'Other')
+ # Reading custom XSOAR->XDR close-reason mapping.
+ custom_xsoar_to_xdr_close_reason_mapping = comma_separated_mapping_to_dict(
+ demisto.params().get("custom_xsoar_to_xdr_close_reason_mapping")
+ )
+
+ # Overriding default close-reason mapping if there exists a custom one.
+ if xsoar_close_reason in custom_xsoar_to_xdr_close_reason_mapping:
+ xdr_close_reason_candidate = custom_xsoar_to_xdr_close_reason_mapping[xsoar_close_reason]
+ # Transforming resolved close-reason into snake_case format with known prefix to match XDR status format.
+ demisto.debug(
+ f"resolve_xdr_close_reason XSOAR->XDR custom close-reason exists, using {xsoar_close_reason}={xdr_close_reason}")
+ xdr_close_reason_candidate = "resolved_" + "_".join(xdr_close_reason_candidate.lower().split(" "))
+
+ if xdr_close_reason_candidate not in XDR_RESOLVED_STATUS_TO_XSOAR:
+ demisto.debug("Warning: Provided XDR close-reason does not exist. Using default XDR close-reason mapping. ")
+ else:
+ xdr_close_reason = xdr_close_reason_candidate
+ else:
+ demisto.debug(f"resolve_xdr_close_reason using default mapping {xsoar_close_reason}={xdr_close_reason}")
+
+ return xdr_close_reason
+
+
def handle_outgoing_issue_closure(remote_args):
incident_id = remote_args.remote_incident_id
demisto.debug(f"handle_outgoing_issue_closure {incident_id=}")
@@ -2866,13 +2896,13 @@ def handle_outgoing_issue_closure(remote_args):
# force closing remote incident only if:
# The XSOAR incident is closed
# and the remote incident isn't already closed
- if remote_args.inc_status == 2 and \
- current_remote_status not in XDR_RESOLVED_STATUS_TO_XSOAR and close_reason:
-
+ if remote_args.inc_status == 2 and current_remote_status not in XDR_RESOLVED_STATUS_TO_XSOAR and close_reason:
if close_notes := update_args.get('closeNotes'):
demisto.debug(f"handle_outgoing_issue_closure {incident_id=} {close_notes=}")
update_args['resolve_comment'] = close_notes
- update_args['status'] = XSOAR_RESOLVED_STATUS_TO_XDR.get(close_reason, 'Other')
+
+ xdr_close_reason = resolve_xdr_close_reason(close_reason)
+ update_args['status'] = xdr_close_reason
demisto.debug(f"handle_outgoing_issue_closure Closing Remote incident {incident_id=} with status {update_args['status']}")
@@ -3148,7 +3178,6 @@ def get_script_code_command(client: CoreClient, args: Dict[str, str]) -> Tuple[s
requires_polling_arg=False # means it will always be default to poll, poll=true
)
def script_run_polling_command(args: dict, client: CoreClient) -> PollResult:
-
if action_id := args.get('action_id'):
response = client.get_script_execution_status(action_id)
general_status = response.get('reply', {}).get('general_status') or ''
@@ -3740,7 +3769,6 @@ def create_request_filters(
def args_to_request_filters(args):
-
if set(args.keys()) & { # check if any filter argument was provided
'endpoint_id_list', 'dist_name', 'ip_list', 'group_name', 'platform', 'alias_name',
'isolate', 'hostname', 'status', 'first_seen_gte', 'first_seen_lte', 'last_seen_gte', 'last_seen_lte'
@@ -3814,7 +3842,6 @@ def parse_risky_users_or_hosts(user_or_host_data: dict[str, Any],
score_header: str,
description_header: str
) -> dict[str, Any]:
-
reasons = user_or_host_data.get('reasons', [])
return {
id_header: user_or_host_data.get('id'),
@@ -4046,13 +4073,14 @@ def list_risky_users_or_host_command(client: CoreClient, command: str, args: dic
ValueError: If the API connection fails.
"""
+
def _warn_if_module_is_disabled(e: DemistoException) -> None:
if (
- e is not None
- and e.res is not None
- and e.res.status_code == 500
- and 'No identity threat' in str(e)
- and "An error occurred while processing XDR public API" in e.message
+ e is not None
+ and e.res is not None
+ and e.res.status_code == 500
+ and 'No identity threat' in str(e)
+ and "An error occurred while processing XDR public API" in e.message
):
return_warning(f'Please confirm the XDR Identity Threat Module is enabled.\nFull error message: {e}', exit=True)
diff --git a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py
index 44600411bb20..032a6dfd2fd5 100644
--- a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py
+++ b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py
@@ -9,7 +9,7 @@
import demistomock as demisto
from CommonServerPython import Common, tableToMarkdown, pascalToSpace, DemistoException
-from CoreIRApiModule import CoreClient, handle_outgoing_issue_closure
+from CoreIRApiModule import CoreClient, handle_outgoing_issue_closure, XSOAR_RESOLVED_STATUS_TO_XDR
from CoreIRApiModule import add_tag_to_endpoints_command, remove_tag_from_endpoints_command, quarantine_files_command, \
isolate_endpoint_command, list_user_groups_command, parse_user_groups, list_users_command, list_roles_command, \
change_user_role_command, list_risky_users_or_host_command, enrich_error_message_id_group_role, get_incidents_command
@@ -18,7 +18,6 @@
base_url='https://test_api.com/public_api/v1', headers={}
)
-
Core_URL = 'https://api.xdrurl.com'
''' HELPER FUNCTIONS '''
@@ -544,7 +543,7 @@ def test_allowlist_files_command_with_more_than_one_file(requests_mock):
test_data = load_test_data('test_data/blocklist_allowlist_files_success.json')
expected_command_result = {
'CoreApiModule.allowlist.added_hashes.fileHash(val.fileHash == obj.fileHash)':
- test_data['multi_command_args']['hash_list']
+ test_data['multi_command_args']['hash_list']
}
requests_mock.post(f'{Core_URL}/public_api/v1/hash_exceptions/allowlist/', json=test_data['api_response'])
@@ -859,13 +858,14 @@ def test_handle_outgoing_issue_closure_close_reason(mocker):
"""
from CoreIRApiModule import handle_outgoing_issue_closure
from CommonServerPython import UpdateRemoteSystemArgs
- remote_args = UpdateRemoteSystemArgs({'delta': {'assigned_user_mail': 'None', 'closeReason': 'Resolved - Security Testing'},
+ remote_args = UpdateRemoteSystemArgs({'delta': {'assigned_user_mail': 'None', 'closeReason': 'Security Testing'},
'status': 2, 'inc_status': 2, 'data': {'status': 'other'}})
request_data_log = mocker.patch.object(demisto, 'debug')
handle_outgoing_issue_closure(remote_args)
- assert "handle_outgoing_issue_closure Closing Remote incident incident_id=None with status resolved_security_testing" in request_data_log.call_args[ # noqa: E501
- 0][0]
+ assert "handle_outgoing_issue_closure Closing Remote incident incident_id=None with status resolved_security_testing" in \
+ request_data_log.call_args[ # noqa: E501
+ 0][0]
def test_get_update_args_close_incident():
@@ -3168,8 +3168,8 @@ def test_endpoint_alias_change_command__no_filters(mocker):
},
{
"err_msg": "An error occurred while processing XDR public API - No endpoint "
- "was found "
- "for creating the requested action",
+ "was found "
+ "for creating the requested action",
"status_code": 500,
},
False,
@@ -3444,7 +3444,7 @@ def test_parse_user_groups(data: dict[str, Any], expected_results: list[dict[str
[
({"group_names": "test"}, "Error: Group test was not found. Full error message: Group 'test' was not found"),
({"group_names": "test, test2"}, "Error: Group test was not found. Note: If you sent more than one group name, "
- "they may not exist either. Full error message: Group 'test' was not found")
+ "they may not exist either. Full error message: Group 'test' was not found")
]
)
def test_list_user_groups_command_raise_exception(mocker, test_data: dict[str, str], excepted_error: str):
@@ -3844,3 +3844,67 @@ def test_handle_outgoing_issue_closure(args, expected_delta):
remote_args = UpdateRemoteSystemArgs(args)
handle_outgoing_issue_closure(remote_args)
assert remote_args.delta == expected_delta
+
+
+@pytest.mark.parametrize('custom_mapping, expected_resolved_status',
+ [
+ ("Other=Other,Duplicate=Other,False Positive=False Positive,Resolved=True Positive",
+ ["resolved_other", "resolved_other", "resolved_false_positive", "resolved_true_positive",
+ "resolved_security_testing"]),
+
+ ("Other=True Positive,Duplicate=Other,False Positive=False Positive,Resolved=True Positive",
+ ["resolved_true_positive", "resolved_other", "resolved_false_positive",
+ "resolved_true_positive", "resolved_security_testing"]),
+
+ ("Duplicate=Other", ["resolved_other", "resolved_other", "resolved_false_positive",
+ "resolved_true_positive", "resolved_security_testing"]),
+
+ # Expecting default mapping to be used when no mapping provided.
+ ("", list(XSOAR_RESOLVED_STATUS_TO_XDR.values())),
+
+ # Expecting default mapping to be used when improper mapping is provided.
+ ("Duplicate=RANDOM1, Other=Random2", list(XSOAR_RESOLVED_STATUS_TO_XDR.values())),
+
+ ("Random1=Duplicate Incident", list(XSOAR_RESOLVED_STATUS_TO_XDR.values())),
+
+ # Expecting default mapping to be used when improper mapping *format* is provided.
+ ("Duplicate=Other False Positive=Other", list(XSOAR_RESOLVED_STATUS_TO_XDR.values())),
+
+ # Expecting default mapping to be used for when improper key-value pair *format* is provided.
+ ("Duplicate=Other, False Positive=Other True Positive=Other, Other=True Positive",
+ ["resolved_true_positive", "resolved_other", "resolved_false_positive",
+ "resolved_true_positive", "resolved_security_testing"]),
+
+ ],
+ ids=["case-1", "case-2", "case-3", "empty-case", "improper-input-case-1", "improper-input-case-2",
+ "improper-input-case-3", "improper-input-case-4"]
+ )
+def test_xsoar_to_xdr_flexible_close_reason_mapping(capfd, mocker, custom_mapping, expected_resolved_status):
+ """
+ Given:
+ - A custom XSOAR->XDR close-reason mapping
+ - Expected resolved XDR status according to the custom mapping.
+ When
+ - Handling outgoing issue closure (handle_outgoing_issue_closure(...) executed).
+ Then
+ - The resolved XDR statuses match the expected statuses for all possible XSOAR close-reasons.
+ """
+ from CoreIRApiModule import handle_outgoing_issue_closure
+ from CommonServerPython import UpdateRemoteSystemArgs
+
+ mocker.patch.object(demisto, 'params', return_value={"mirror_direction": "Both",
+ "custom_xsoar_to_xdr_close_reason_mapping": custom_mapping})
+
+ all_xsoar_close_reasons = XSOAR_RESOLVED_STATUS_TO_XDR.keys()
+ for i, close_reason in enumerate(all_xsoar_close_reasons):
+ remote_args = UpdateRemoteSystemArgs({'delta': {'closeReason': close_reason},
+ 'status': 2,
+ 'inc_status': 2,
+ 'data': {'status': 'other'}
+ })
+ # Overcoming expected non-empty stderr test failures (Errors are submitted to stderr when improper mapping is provided).
+ with capfd.disabled():
+ handle_outgoing_issue_closure(remote_args)
+
+ assert remote_args.delta.get('status')
+ assert remote_args.delta['status'] == expected_resolved_status[i]
diff --git a/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule.py b/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule.py
new file mode 100644
index 000000000000..5698fa3baf60
--- /dev/null
+++ b/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule.py
@@ -0,0 +1,228 @@
+from CommonServerPython import *
+
+DEFAULT_LIMIT = 500
+DEFAULT_PAGE_SIZE = 100
+DEFAULT_TIME_FIELD = "created"
+
+
+def build_query_parameter(
+ custom_query: str | None,
+ incident_types: list[str],
+ time_field: str,
+ from_date: str | None,
+ to_date: str | None,
+ non_empty_fields: list[str],
+) -> str:
+ """Builds the query parameter string from given arguments.
+
+ Args:
+ custom_query (str | None): A custom query.
+ incident_types (list[str] | None): Incident types to retrieve.
+ time_field (str): The relevant time field to search by.
+ from_date (str | None): The start date for the incidents query.
+ to_date (str | None): The end date for the incidents query.
+ non_empty_fields (list[str]): Required non-empty incident fields.
+
+ Raises:
+ Exception: If no query parts were added.
+
+ Returns:
+ str: The query string built from the given arguments.
+ """
+ query_parts = []
+ if custom_query:
+ query_parts.append(custom_query)
+ if incident_types:
+ types = [x if "*" in x else f'"{x}"' for x in incident_types]
+ query_parts.append(f"type:({' '.join(types)})")
+ if from_date and time_field:
+ query_parts.append(f'{time_field}:>="{from_date}"')
+ if to_date and time_field:
+ query_parts.append(f'{time_field}:<"{to_date}"')
+ if non_empty_fields:
+ query_parts.append(" and ".join(f"{x}:*" for x in non_empty_fields))
+ if not query_parts:
+ raise DemistoException("Incidents query is empty - please fill at least one argument")
+ return " and ".join(f"({x})" for x in query_parts)
+
+
+def format_incident(inc: dict, fields_to_populate: list[str], include_context: bool) -> dict:
+ """Flattens custom fields with incident data and filters by fields_to_populate
+
+ Args:
+ inc (dict): An incident.
+ fields_to_populate (list[str]): List of fields to populate.
+ include_context (bool): Whether or not to enrich the incident with its context data.
+
+ Returns:
+ dict: The formatted incident.
+ """
+ custom_fields = inc.pop('CustomFields', {})
+ inc.update(custom_fields or {})
+ if fields_to_populate:
+ inc = {k: v for k, v in inc.items() if k.lower() in {val.lower() for val in fields_to_populate}}
+ if any(f.lower() == "customfields" for f in fields_to_populate):
+ inc["CustomFields"] = custom_fields
+ if include_context:
+ inc['context'] = execute_command("getContext", {"id": inc["id"]}, extract_contents=True)
+ return inc
+
+
+def get_incidents_with_pagination(
+ query: str,
+ from_date: str | None,
+ to_date: str | None,
+ fields_to_populate: list[str],
+ include_context: bool,
+ limit: int,
+ page_size: int,
+ sort: str,
+) -> list[dict]:
+ """Performs paginated getIncidents requests until limit is reached or no more results.
+ Each incident in the response is formatted before returned.
+
+ Args:
+ query (str): The incidents query string.
+ from_date (str | None): The fromdate argument for the incidents query.
+ to_date (str | None): The todate argument for the incidents query.
+ fields_to_populate (list[str]): The fields to populate for each incident.
+ include_context (bool): Whether or not to enrich the returned incidents with their the context data.
+ limit (int): Maximum number of incidents to return.
+ page_size (int): Number of incidents to retrieve per page.
+ sort (str): Sort order for incidents.
+
+ Returns:
+ list[dict]: The requested incidents.
+ """
+ incidents: list = []
+ page = -1
+ populate_fields = ",".join(f.split(".")[0] for f in fields_to_populate)
+ demisto.debug(f"Running getIncidents with {query=}")
+ while len(incidents) < limit:
+ page += 1
+ page_results = execute_command(
+ "getIncidents",
+ args={
+ "query": query,
+ "fromdate": from_date,
+ "todate": to_date,
+ "page": page,
+ "populateFields": populate_fields,
+ "size": page_size,
+ "sort": sort,
+ },
+ extract_contents=True,
+ fail_on_error=True,
+ ).get('data') or []
+ incidents += page_results
+ if len(page_results) < page_size:
+ break
+ return [
+ format_incident(inc, fields_to_populate, include_context)
+ for inc in incidents[:limit]
+ ]
+
+
+def prepare_fields_list(fields_list: list[str] | None) -> list[str]:
+ """Removes `incident.` prefix from the fields list and returns a list of unique values.
+
+ Args:
+ fields_list (list[str] | None): The current state of the fields list, as provided by the user.
+
+ Returns:
+ list[str]: The prepared fields list.
+ """
+ return list({
+ field.removeprefix("incident.") for field in fields_list if field
+ }) if fields_list else []
+
+
+def get_incidents(
+ custom_query: str | None = None,
+ incident_types: list[str] | None = None,
+ populate_fields: list[str] | None = None,
+ non_empty_fields: list[str] | None = None,
+ time_field: str = DEFAULT_TIME_FIELD,
+ from_date: datetime | None = None,
+ to_date: datetime | None = None,
+ include_context: bool = False,
+ limit: int = DEFAULT_LIMIT,
+ page_size: int = DEFAULT_PAGE_SIZE,
+) -> list[dict]:
+ """Performs a deeper formatting on the search arguments and runs paginated incidents search.
+
+ Args:
+ custom_query (str | None, optional): A custom query. Defaults to None.
+ incident_types (list[str] | None, optional): Incident types to retrieve. Defaults to None.
+ populate_fields (list[str] | None, optional): Incident fields to populate. Defaults to None.
+ non_empty_fields (list[str] | None, optional): Required non-empty incident fields. Defaults to None.
+ from_date (datetime | None, optional): The start date of the timeframe. Defaults to None.
+ to_date (datetime | None, optional): The end date of the timeframe. Defaults to None.
+ time_field (str, optional): The relevant time field to search by. Defaults to "created".
+ include_context (bool, optional): Whether or not to enrich the returned incidents with their the context data.
+ Defaults to False.
+ limit (int, optional): The search limit. Defaults to 500.
+ page_size (int, optional): Maximal page size. Defaults to 100.
+
+ Returns:
+ list[dict]: The requested incidents.
+ """
+ non_empty_fields = prepare_fields_list(non_empty_fields)
+
+ if populate_fields := prepare_fields_list(populate_fields):
+ populate_fields.extend(non_empty_fields + ["id"])
+ populate_fields = list(set(populate_fields))
+
+ query = build_query_parameter(
+ custom_query,
+ incident_types or [],
+ time_field,
+ from_date.isoformat() if from_date and time_field != "created" else None,
+ to_date.isoformat() if to_date and time_field != "created" else None,
+ non_empty_fields,
+ )
+
+ return get_incidents_with_pagination(
+ query,
+ from_date.astimezone().isoformat() if from_date and time_field == "created" else None,
+ to_date.astimezone().isoformat() if to_date and time_field == "created" else None,
+ populate_fields,
+ include_context,
+ limit,
+ page_size=min(limit, page_size),
+ sort=f"{time_field}.desc",
+ )
+
+
+def get_incidents_by_query(args: dict) -> list[dict]:
+ """Performs an initial parsing of args and calls the get_incidents method.
+
+ Args:
+ args (dict): the GetIncidentsByQuery arguments.
+
+ Returns:
+ list[dict]: The requested incidents.
+ """
+ query = args.get("query")
+ incident_types = argToList(args.get("incidentTypes"), transform=str.strip)
+ populate_fields = argToList(args.get("populateFields"), transform=str.strip)
+ non_empty_fields = argToList(args.get("NonEmptyFields"), transform=str.strip)
+ time_field = args.get("timeField") or DEFAULT_TIME_FIELD
+ from_date = arg_to_datetime(args.get("fromDate"))
+ to_date = arg_to_datetime(args.get("toDate"))
+ include_context = argToBoolean(args.get("includeContext") or False)
+ limit = arg_to_number(args.get("limit")) or DEFAULT_LIMIT
+ page_size = arg_to_number(args.get("pageSize")) or DEFAULT_PAGE_SIZE
+
+ return get_incidents(
+ query,
+ incident_types,
+ populate_fields,
+ non_empty_fields,
+ time_field,
+ from_date,
+ to_date,
+ include_context,
+ limit,
+ page_size,
+ )
diff --git a/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule.yml b/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule.yml
new file mode 100644
index 000000000000..67f64d063482
--- /dev/null
+++ b/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule.yml
@@ -0,0 +1,16 @@
+comment: Common code that is appended into scripts which require searching incidents.
+commonfields:
+ id: GetIncidentsApiModule
+ version: -1
+name: GetIncidentsApiModule
+script: '-'
+subtype: python3
+tags:
+- infra
+- server
+timeout: 0s
+type: python
+dockerimage: demisto/python3:3.10.12.66339
+tests:
+- No Tests
+fromversion: 5.0.0
diff --git a/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule_test.py b/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule_test.py
new file mode 100644
index 000000000000..42f5849c6d43
--- /dev/null
+++ b/Packs/ApiModules/Scripts/GetIncidentsApiModule/GetIncidentsApiModule_test.py
@@ -0,0 +1,266 @@
+import pytest
+from GetIncidentsApiModule import *
+
+
+def mock_incident(
+ inc_id: int,
+ inc_type: str,
+ created: str,
+ modified: str,
+ **kwargs,
+) -> dict:
+ # helper method for creating mock incidents
+ return {
+ "id": inc_id,
+ "name": f"This is incident {inc_id}",
+ "type": inc_type,
+ "severity": 0,
+ "status": 1,
+ "created": created,
+ "modified": modified,
+ "CustomFields": {
+ "testField": "testValue"
+ },
+ "closed": "0001-01-01T00:00:00Z",
+ "labels": [{"type": "subject", "value": "This subject1"}, {"type": "unique", "value": "This subject1"}],
+ "attachment": [{"name": "Test word1 word2"}],
+ } | kwargs
+
+
+INCIDENTS_LIST = [
+ mock_incident(1, "Phishing", "2019-01-02T00:00:00Z", "2020-01-02T00:00:00Z"),
+ mock_incident(2, "Phishing", "2019-02-02T00:00:00Z", "2020-02-02T00:00:00Z"),
+ mock_incident(3, "Malware", "2020-02-02T00:00:00Z", "2020-02-02T00:00:00Z"),
+ mock_incident(4, "Malware", "2021-02-02T00:00:00Z", "2021-02-02T00:00:00Z"),
+ mock_incident(5, "Malware", "2021-02-02T00:00:00Z", "2021-02-02T00:00:00Z"),
+ mock_incident(6, "Unclassified", "2021-02-02T00:00:00Z", "2021-02-02T00:00:00Z"),
+ mock_incident(7, "Unclassified", "2021-02-02T00:00:00Z", "2021-02-02T00:00:00Z"),
+]
+
+
+def does_incident_match_query(
+ inc: dict,
+ time_field: str,
+ from_date_str: str,
+ to_date_str: str,
+ incident_types: list[str],
+) -> bool:
+ # a helper method for mock_execute_command() that determines
+ # whether an incident should be part of the response
+ if not incident_types or inc["type"] in incident_types:
+ inc_time_field = dateparser.parse(inc[time_field])
+ from_date = dateparser.parse(from_date_str or inc[time_field])
+ to_date = dateparser.parse(to_date_str or inc[time_field])
+ return from_date <= inc_time_field < to_date
+ return False
+
+
+def mock_execute_command(command: str, args: dict) -> list[dict]:
+ # Mock implementations for `getIncidents` and `getContext` builtin commands.
+ match command:
+ case "getIncidents":
+ page = args["page"]
+ size = args["size"]
+ query = args["query"] or ""
+ incident_types = []
+ time_field = "modified" if "modified" in query else "created"
+ from_date = args["fromdate"]
+ to_date = args["todate"]
+ # populate_fields = args["populateFields"] or []
+
+ if match := re.search(r"\(modified:>=\"([^\"]*)\"\)", query):
+ from_date = match.group(1)
+ if match := re.search(r"\(modified:<\"([^\"]*)\"\)", query):
+ to_date = match.group(1)
+ if match := re.search(r"\(type:\(([^)]*)\)\)", query):
+ incident_types = argToList(match.group(1), separator=" ", transform=lambda t: t.strip("\""))
+ res = [
+ i # {k: v for k, v in i.items() if not populate_fields or k in populate_fields}
+ for i in INCIDENTS_LIST
+ if does_incident_match_query(i, time_field, from_date, to_date, incident_types)
+ ][page * size:(page + 1) * size]
+ return [{"Contents": {"data": res}, "Type": "json"}]
+ case "getContext":
+ return [{"Contents": "context", "Type": "json"}]
+ case _:
+ raise Exception(f"Unmocked command: {command}")
+
+
+def test_prepare_fields_list():
+ """
+ Given: A list of incident fields
+ When: Running prepare_fields_list()
+ Then: Ensure a unique list of fields without the `incident.` prefix for each item is returned
+ """
+ assert prepare_fields_list(["incident.hello", "", "hello"]) == ["hello"]
+
+
+def test_build_query():
+ """
+ Given: Different query arguments
+ When: Running build_query_parameter()
+ Then: Ensure the result is a query string in the expected format
+ """
+ query = build_query_parameter(
+ custom_query="Extra part",
+ incident_types=["*phish*", "Malware"],
+ time_field="modified",
+ from_date="2019-01-10T00:00:00",
+ to_date="2019-01-12T00:00:00",
+ non_empty_fields=["status", "closeReason"],
+ )
+ assert query == (
+ "(Extra part) and (type:(*phish* \"Malware\")) and (modified:>=\"2019-01-10T00:00:00\") "
+ "and (modified:<\"2019-01-12T00:00:00\") and (status:* and closeReason:*)"
+ )
+
+
+def test_build_query_bad():
+ """
+ Given: No query arguments
+ When: Running build_query_parameter()
+ Then: Ensuring a DemistoException is raised
+ """
+ with pytest.raises(DemistoException):
+ build_query_parameter(
+ custom_query=None,
+ incident_types=[],
+ time_field=None,
+ from_date=None,
+ to_date=None,
+ non_empty_fields=[],
+ )
+
+
+def test_get_incidents_by_query_sanity_test(mocker):
+ """
+ Given:
+ - A mock incidents database (INCIDENTS_LIST)
+ - Search incidents query arguments
+ When: Running get_incidents_by_query()
+ Then: Ensure the expected 4 incidents are returned
+ """
+ mocker.patch.object(demisto, "executeCommand", side_effect=mock_execute_command)
+ args = {
+ "incidentTypes": "Phishing,Malware",
+ "timeField": "created",
+ "fromDate": "2019-02-01T00:00:00",
+ "toDate": "3 days ago",
+ "limit": "10",
+ "includeContext": "false",
+ "pageSize": "10",
+ }
+ results = get_incidents_by_query(args)
+ assert len(results) == 4
+ assert all(
+ inc["type"] in args["incidentTypes"] for inc in results
+ )
+ assert all(
+ dateparser.parse(args["fromDate"]).astimezone() <= dateparser.parse(inc["created"])
+ for inc in results
+ )
+ assert all(
+ dateparser.parse(inc["created"]) < dateparser.parse(args["toDate"]).astimezone()
+ for inc in results
+ )
+
+
+def test_get_incidents_by_query_with_pagination(mocker):
+ """
+ Given:
+ - A mock incidents database (INCIDENTS_LIST)
+ - Search incidents query arguments that should return 4 incidents (same as the sanity test)
+ When:
+ - pageSize is 3
+ Then:
+ - Ensure the expected 4 incidents are returned
+ - Ensure executeCommand was called twice
+ """
+ execute_command = mocker.patch.object(demisto, "executeCommand", side_effect=mock_execute_command)
+ args = {
+ "incidentTypes": "Phishing,Malware",
+ "timeField": "created",
+ "fromDate": "2019-02-01T00:00:00",
+ "toDate": "3 days ago",
+ "limit": "10",
+ "includeContext": "false",
+ "pageSize": "3",
+ }
+ results = get_incidents_by_query(args)
+ assert len(results) == 4
+ assert execute_command.call_count == 2
+
+
+def test_get_incidents_by_query_with_populate_fields(mocker):
+ """
+ Given:
+ - A mock incidents database (INCIDENTS_LIST)
+ - Search incidents query arguments that should return 4 incidents (same as the sanity test)
+ When:
+ - populateFields is id,name,testField
+ Then:
+ - Ensure the expected 4 incidents are returned
+ - Ensure the returned incidents' keys are "id", "name", and "testField" only.
+ """
+ mocker.patch.object(demisto, "executeCommand", side_effect=mock_execute_command)
+ args = {
+ "incidentTypes": "Phishing,Malware",
+ "timeField": "created",
+ "fromDate": "2019-02-01T00:00:00",
+ "toDate": "3 days ago",
+ "limit": "10",
+ "includeContext": "false",
+ "pageSize": "10",
+ "populateFields": "id,name,testField"
+ }
+ results = get_incidents_by_query(args)
+ assert len(results) == 4
+ assert all(set(inc.keys()) == {"id", "name", "testField"} for inc in results)
+
+
+def test_get_incidents_by_query_with_context(mocker):
+ """
+ - A mock incidents database (INCIDENTS_LIST)
+ - Search incidents query arguments that should return 4 incidents (same as the sanity test)
+ When:
+ - includeContext is true
+ Then:
+ - Ensure the expected 4 incidents are returned
+ - Ensure each incidents has a non-empty context key
+ """
+ mocker.patch.object(demisto, "executeCommand", side_effect=mock_execute_command)
+ args = {
+ "incidentTypes": "Phishing,Malware",
+ "timeField": "created",
+ "fromDate": "2019-02-01T00:00:00",
+ "toDate": "3 days ago",
+ "limit": "10",
+ "includeContext": "true",
+ "pageSize": "10",
+ }
+ results = get_incidents_by_query(args)
+ assert len(results) == 4
+ assert all(inc["context"] for inc in results)
+
+
+def test_get_incidents_by_query_timefield_is_modified(mocker):
+ """
+ - A mock incidents database (INCIDENTS_LIST)
+ - Search incidents query arguments
+ When:
+ - timeField is modified
+ Then:
+ - Ensure the expected 1 incident is returned
+ """
+ execute_command = mocker.patch.object(demisto, "executeCommand", side_effect=mock_execute_command)
+ args = {
+ "timeField": "modified",
+ "fromDate": "2020-01-02T00:00:00Z",
+ "toDate": "2020-01-03T00:00:00Z",
+ "limit": "10",
+ "includeContext": "false",
+ "pageSize": "3",
+ }
+ results = get_incidents_by_query(args)
+ assert len(results) == 1
+ assert execute_command.call_count == 1
diff --git a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py
index fcabf902117d..6720c5ec2bd7 100644
--- a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py
+++ b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py
@@ -1496,7 +1496,7 @@ def generate_login_url(client: MicrosoftClient,
login_url = urljoin(login_url, f'{client.tenant_id}/oauth2/v2.0/authorize?'
f'response_type=code&scope=offline_access%20{client.scope.replace(" ", "%20")}'
- f'&client_id={client.client_id}&redirect_uri={client.redirect_uri}')
+ f'&client_id={client.client_id}&redirect_uri={client.redirect_uri}&prompt=consent')
result_msg = f"""### Authorization instructions
1. Click on the [login URL]({login_url}) to sign in and grant Cortex XSOAR permissions for your Azure Service Management.
diff --git a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule_test.py b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule_test.py
index 88420692c887..40c135e09e0b 100644
--- a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule_test.py
+++ b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule_test.py
@@ -724,7 +724,7 @@ def test_generate_login_url():
expected_url = f'[login URL](https://login.microsoftonline.com/{TENANT}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20https://graph.microsoft.com/.default' \
- f'&client_id={CLIENT_ID}&redirect_uri=https://localhost/myapp)'
+ f'&client_id={CLIENT_ID}&redirect_uri=https://localhost/myapp&prompt=consent)'
assert expected_url in result.readable_output, "Login URL is incorrect"
diff --git a/Packs/ApiModules/Scripts/OktaApiModule/OktaApiModule.py b/Packs/ApiModules/Scripts/OktaApiModule/OktaApiModule.py
index a6ee4040d4a1..d6e0eb735409 100644
--- a/Packs/ApiModules/Scripts/OktaApiModule/OktaApiModule.py
+++ b/Packs/ApiModules/Scripts/OktaApiModule/OktaApiModule.py
@@ -1,10 +1,11 @@
from CommonServerPython import *
-import jwt
import uuid
from datetime import datetime, timedelta
from enum import Enum
+import jwt
+
TOKEN_EXPIRATION_TIME = 60 # In minutes. This value must be a maximum of only an hour (according to Okta's documentation).
TOKEN_RENEWAL_TIME_LIMIT = 60 # In seconds. The minimum time before the token expires to renew it.
@@ -26,13 +27,13 @@ class AuthType(Enum):
class OktaClient(BaseClient):
- def __init__(self, api_token: str, auth_type: AuthType = AuthType.API_TOKEN,
+ def __init__(self, auth_type: AuthType = AuthType.API_TOKEN, api_token: str | None = None,
client_id: str | None = None, scopes: list[str] | None = None, private_key: str | None = None,
jwt_algorithm: JWTAlgorithm | None = None, *args, **kwargs):
"""
Args:
- api_token (str): API token for authentication.
auth_type (AuthType, optional): The type of authentication to use.
+ api_token (str | None, optional): API token for authentication (required if 'auth_type' is AuthType.API_TOKEN).
client_id (str | None, optional): Client ID for OAuth authentication (required if 'auth_type' is AuthType.OAUTH).
scopes (list[str] | None, optional): A list of scopes to request for the token
(required if 'auth_type' is AuthType.OAUTH).
@@ -41,36 +42,37 @@ def __init__(self, api_token: str, auth_type: AuthType = AuthType.API_TOKEN,
(required if 'auth_type' is AuthType.OAUTH).
"""
super().__init__(*args, **kwargs)
- self.api_token = api_token
self.auth_type = auth_type
+ self.api_token = api_token
+
+ self.client_id = client_id
+ self.scopes = scopes
+ self.jwt_algorithm = jwt_algorithm
+ self.private_key = private_key
+
missing_required_params = []
+ if self.auth_type == AuthType.API_TOKEN and not api_token:
+ raise ValueError('API token is missing')
+
if self.auth_type == AuthType.OAUTH:
- if not client_id:
+ if not self.client_id:
missing_required_params.append('Client ID')
- if not scopes:
+ if not self.scopes:
missing_required_params.append('Scopes')
- if not jwt_algorithm:
+ if not self.jwt_algorithm:
missing_required_params.append('JWT algorithm')
- if not private_key:
+ if not self.private_key:
missing_required_params.append('Private key')
if missing_required_params:
raise ValueError(f'Required OAuth parameters are missing: {", ".join(missing_required_params)}')
- # Set type of variables non-optional after we assured they're assigned for mypy
- self.client_id: str = client_id # type: ignore
- self.scopes: list[str] = scopes # type: ignore
- self.private_key: str = private_key # type: ignore
- self.jwt_algorithm: JWTAlgorithm = jwt_algorithm # type: ignore
-
- self.initial_setup()
-
- def assign_app_role(self, client_id: str, role: str, auth_type: AuthType = AuthType.API_TOKEN) -> dict:
+ def assign_app_role(self, client_id: str, role: str, auth_type: AuthType) -> dict:
"""
Assign a role to a client application.
@@ -113,8 +115,8 @@ def generate_jwt_token(self, url: str) -> str:
'sub': self.client_id,
'jti': str(uuid.uuid4()),
},
- key=self.private_key,
- algorithm=self.jwt_algorithm.value,
+ key=self.private_key, # type: ignore[arg-type]
+ algorithm=self.jwt_algorithm.value, # type: ignore[union-attr]
)
def generate_oauth_token(self, scopes: list[str]) -> dict:
@@ -148,7 +150,7 @@ def generate_oauth_token(self, scopes: list[str]) -> dict:
def get_token(self):
"""
- Get an API token for authentication.
+ Get an OAuth token for authentication.
If there isn't an existing one, or the existing one is expired, a new one will be generated.
"""
expiration_time_format = '%Y-%m-%dT%H:%M:%S'
@@ -170,7 +172,7 @@ def get_token(self):
else:
demisto.debug('No existing token was found. A new token will be generated.')
- token_generation_response = self.generate_oauth_token(scopes=self.scopes)
+ token_generation_response = self.generate_oauth_token(scopes=self.scopes) # type: ignore[arg-type]
token: str = token_generation_response['access_token']
expires_in: int = token_generation_response['expires_in']
token_expiration = datetime.utcnow() + timedelta(seconds=expires_in)
@@ -182,36 +184,6 @@ def get_token(self):
return token
- def initial_setup(self):
- """
- Initial setup for the first time the integration is used.
- """
- integration_context = get_integration_context()
-
- if integration_context.get('initialized', False): # If the initial setup was already done, do nothing
- return
-
- if self.auth_type == AuthType.OAUTH:
- # Add "SUPER_ADMIN" role to client application, which is required for OAuth authentication
- try:
- self.assign_app_role(client_id=self.client_id, role="SUPER_ADMIN", auth_type=AuthType.API_TOKEN)
- demisto.debug("'SUPER_ADMIN' role has been assigned to the client application.")
-
- except DemistoException as e:
- # If the client application already has the "SUPER_ADMIN" role, ignore the error.
- # E0000090 Error code official docs description: Duplicate role assignment exception.
- if e.res.headers.get('content-type') == 'application/json' and e.res.json().get('errorCode') == 'E0000090':
- demisto.debug('The client application already has the "SUPER_ADMIN" role assigned.')
-
- else:
- raise e
-
- self.get_token()
-
- integration_context = get_integration_context()
- integration_context['initialized'] = True
- set_integration_context(integration_context)
-
def http_request(self, auth_type: AuthType | None = None, **kwargs):
"""
Override BaseClient._http_request() to automatically add authentication headers.
diff --git a/Packs/ApiModules/Scripts/OktaApiModule/OktaApiModule_test.py b/Packs/ApiModules/Scripts/OktaApiModule/OktaApiModule_test.py
index 3b61a355fc19..9f93279f92df 100644
--- a/Packs/ApiModules/Scripts/OktaApiModule/OktaApiModule_test.py
+++ b/Packs/ApiModules/Scripts/OktaApiModule/OktaApiModule_test.py
@@ -46,7 +46,7 @@ def test_okta_client_no_required_params():
OktaClient(
base_url='https://test.url',
api_token='X',
- auth_type=AuthType.NO_AUTH
+ auth_type=AuthType.API_TOKEN,
)
@@ -56,7 +56,6 @@ def test_assign_app_role(mocker):
When: Assigning a role to a client application
Then: Assure the call is made properly, and that the 'auth_type' parameter overrides the client's auth type.
"""
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient(
base_url='https://test.url',
api_token='X',
@@ -116,7 +115,6 @@ def test_generate_jwt_token(mocker):
When: Generating a JWT token
Then: Assure the token is generated correctly.
"""
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient(
base_url='https://test.url',
api_token='X',
@@ -169,7 +167,6 @@ def test_generate_oauth_token(mocker):
When: Generating an OAuth token
Then: Assure the token generation API call is called correctly.
"""
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient(
base_url='https://test.url',
api_token='X',
@@ -209,7 +206,6 @@ def test_get_token_create_new_token(mocker):
Then: Assure a new token is generated, and that the integration context is updated with the new token.
"""
import OktaApiModule
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient( # 'initial_setup' is called within the constructor
base_url='https://test.url',
api_token='X',
@@ -240,7 +236,6 @@ def test_get_token_use_existing(mocker):
Then: Assure the existing token is returned.
"""
import OktaApiModule
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient( # 'initial_setup' is called within the constructor
base_url='https://test.url',
api_token='X',
@@ -264,7 +259,6 @@ def test_get_token_regenerate_existing(mocker):
Then: Assure a new token is generated
"""
import OktaApiModule
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient( # 'initial_setup' is called within the constructor
base_url='https://test.url',
api_token='X',
@@ -291,7 +285,6 @@ def test_http_request_no_auth(mocker):
When: Making an API call with no authentication
Then: Assure the call is made without any authentication headers.
"""
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient(
base_url='https://test.url',
api_token='X',
@@ -322,7 +315,6 @@ def test_http_request_api_token_auth(mocker):
When: Making an API call with API token authentication
Then: Assure the call is made with the API token properly used in the 'Authorization' header.
"""
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient(
base_url='https://test.url',
api_token='X',
@@ -354,7 +346,6 @@ def test_http_request_oauth_auth(mocker):
When: Making an API call with OAuth authentication
Then: Assure the call is made with the JWT token properly used in the 'Authorization' header.
"""
- mocker.patch.object(OktaClient, 'initial_setup')
client = OktaClient(
base_url='https://test.url',
api_token='X',
diff --git a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py
index e8169418fab4..01ae984e4b60 100644
--- a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py
+++ b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py
@@ -1,4 +1,3 @@
-import logging
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
@@ -7,8 +6,11 @@
from typing import Optional, Tuple
from requests.sessions import merge_setting, CaseInsensitiveDict
+from requests.exceptions import HTTPError
import re
import copy
+import logging
+import traceback
import types
import urllib3
from taxii2client import v20, v21
@@ -20,7 +22,7 @@
urllib3.disable_warnings()
-class SuppressWarningFilter(logging.Filter): # pragma: no cover
+class XsoarSuppressWarningFilter(logging.Filter): # pragma: no cover
def filter(self, record):
# Suppress all logger records, but send the important ones to demisto logger
if record.levelno == logging.WARNING:
@@ -30,8 +32,14 @@ def filter(self, record):
return False
+# Make sure we have only one XsoarSuppressWarningFilter
v21_logger = logging.getLogger("taxii2client.v21")
-v21_logger.addFilter(SuppressWarningFilter())
+demisto.debug(f'Logging Filters before cleaning: {v21_logger.filters=}')
+for current_filter in list(v21_logger.filters): # pragma: no cover
+ if 'XsoarSuppressWarningFilter' in type(current_filter).__name__:
+ v21_logger.removeFilter(current_filter)
+v21_logger.addFilter(XsoarSuppressWarningFilter())
+demisto.debug(f'Logging Filters: {v21_logger.filters=}')
# CONSTANTS
TAXII_VER_2_0 = "2.0"
@@ -323,6 +331,12 @@ def init_roots(self):
# try TAXII 2.1
self.set_api_root()
# (TAXIIServiceException, HTTPError) should suffice, but sometimes it raises another type of HTTPError
+ except HTTPError as e:
+ if e.response.status_code != 406 and "version=2.0" not in str(e):
+ raise e
+ # switch to TAXII 2.0
+ self.init_server(version=TAXII_VER_2_0)
+ self.set_api_root()
except Exception as e:
if "406 Client Error" not in str(e) and "version=2.0" not in str(e):
raise e
@@ -382,7 +396,7 @@ def init_collection_to_fetch(self, collection_to_fetch=None):
break
if not collection_found:
raise DemistoException(
- "Could not find the provided Collection name in the available collections. "
+ f"Could not find the provided Collection name {collection_to_fetch} in the available collections. "
"Please make sure you entered the name correctly."
)
@@ -1181,7 +1195,7 @@ def build_iterator(self, limit: int = -1, **kwargs) -> list[dict[str, str]]:
envelopes = self.poll_collection(page_size, **kwargs) # got data from server
indicators = self.load_stix_objects_from_envelope(envelopes, limit)
except InvalidJSONError as e:
- demisto.debug(f'Excepted InvalidJSONError, continuing with empty result.\nError: {e}')
+ demisto.debug(f'Excepted InvalidJSONError, continuing with empty result.\nError: {e}, {traceback.format_exc()}')
# raised when the response is empty, because {} is parsed into '筽'
indicators = []
@@ -1219,20 +1233,35 @@ def load_stix_objects_from_envelope(self, envelopes: types.GeneratorType, limit:
if relationships_lst:
indicators.extend(self.parse_relationships(relationships_lst))
demisto.debug(
- f"TAXII 2 Feed has extracted {len(indicators)} indicators"
+ f"TAXII 2 Feed has extracted {len(list(indicators))} indicators"
)
return indicators
+ def increase_count(self, counter: Dict[str, int], id: str):
+ if id in counter:
+ counter[id] = counter[id] + 1
+ else:
+ counter[id] = 1
+
def parse_generator_type_envelope(self, envelopes: types.GeneratorType, parse_objects_func, limit: int = -1):
indicators = []
relationships_lst = []
+ # Used mainly for logging
+ parsed_objects_counter: Dict[str, int] = {}
try:
for envelope in envelopes:
- stix_objects = envelope.get("objects")
- if not stix_objects:
- # no fetched objects
- break
+ self.increase_count(parsed_objects_counter, 'envelope')
+ try:
+ stix_objects = envelope.get("objects")
+ if not stix_objects:
+ # no fetched objects
+ self.increase_count(parsed_objects_counter, 'not-parsed-envelope-not-stix')
+ break
+ except Exception as e:
+ demisto.info(f"Exception trying to get envelope objects: {e}, {traceback.format_exc()}")
+ self.increase_count(parsed_objects_counter, 'exception-envelope-get-objects')
+ continue
# we should build the id_to_object dict before iteration as some object reference each other
self.id_to_object.update(
@@ -1243,33 +1272,51 @@ def parse_generator_type_envelope(self, envelopes: types.GeneratorType, parse_ob
)
# now we have a list of objects, go over each obj, save id with obj, parse the obj
for obj in stix_objects:
- obj_type = obj.get('type')
+ try:
+ obj_type = obj.get('type')
+ except Exception as e:
+ demisto.info(f"Exception trying to get stix_object-type: {e}, {traceback.format_exc()}")
+ self.increase_count(parsed_objects_counter, 'exception-stix-object-type')
+ continue
# we currently don't support extension object
if obj_type == 'extension-definition':
demisto.debug(f'There is no parsing function for object type "extension-definition", for object {obj}.')
+ self.increase_count(parsed_objects_counter, 'not-parsed-extension-definition')
continue
elif obj_type == 'relationship':
relationships_lst.append(obj)
+ self.increase_count(parsed_objects_counter, 'not-parsed-relationship')
continue
if not parse_objects_func.get(obj_type):
demisto.debug(f'There is no parsing function for object type {obj_type}, for object {obj}.')
-
+ self.increase_count(parsed_objects_counter, f'not-parsed-{obj_type}')
+ continue
+ try:
+ if result := parse_objects_func[obj_type](obj):
+ indicators.extend(result)
+ self.update_last_modified_indicator_date(obj.get("modified"))
+ except Exception as e:
+ demisto.info(f"Exception parsing stix_object-type {obj_type}: {e}, {traceback.format_exc()}")
+ self.increase_count(parsed_objects_counter, f'exception-parsing-{obj_type}')
continue
- if result := parse_objects_func[obj_type](obj):
- indicators.extend(result)
- self.update_last_modified_indicator_date(obj.get("modified"))
+ self.increase_count(parsed_objects_counter, f'parsed-{obj_type}')
if reached_limit(limit, len(indicators)):
- demisto.debug("Reached limit of indicators to fetch")
+ demisto.debug(f"Reached the limit ({limit}) of indicators to fetch. Indicators len: {len(indicators)}."
+ f' Got {len(indicators)} indicators and {len(list(relationships_lst))} relationships.'
+ f' Objects counters: {parsed_objects_counter}')
+
return indicators, relationships_lst
except Exception as e:
+ demisto.info(f"Exception trying to parse envelope: {e}, {traceback.format_exc()}")
if len(indicators) == 0:
demisto.debug("No Indicator were parsed")
raise e
demisto.debug(f"Failed while parsing envelopes, succeeded to retrieve {len(indicators)} indicators.")
- demisto.debug("Finished parsing all objects")
+ demisto.debug(f'Finished parsing all objects. Got {len(list(indicators))} indicators '
+ f'and {len(list(relationships_lst))} relationships. Objects counters: {parsed_objects_counter}')
return indicators, relationships_lst
def poll_collection(
@@ -1286,7 +1333,9 @@ def poll_collection(
self.objects_to_fetch.append('relationship')
kwargs['type'] = self.objects_to_fetch
if isinstance(self.collection_to_fetch, v20.Collection):
+ demisto.debug(f'Collection is a v20 type collction, {self.collection_to_fetch}')
return v20.as_pages(get_objects, per_request=page_size, **kwargs)
+ demisto.debug(f'Collection is a v21 type collction, {self.collection_to_fetch}')
return v21.as_pages(get_objects, per_request=page_size, **kwargs)
def get_page_size(self, max_limit: int, cur_limit: int) -> int:
@@ -1314,6 +1363,8 @@ def extract_indicators_from_stix_objects(
extracted_objs = [
item for item in stix_objs if item.get("type") in required_objects
] # retrieve only required type
+ demisto.debug(f'Extracted {len(list(extracted_objs))} out of {len(list(stix_objs))} Stix objects with the types: '
+ f'{required_objects}')
return extracted_objs
diff --git a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py
index ed6ebfaab380..dc8aced42370 100644
--- a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py
+++ b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py
@@ -1446,3 +1446,25 @@ def test_reached_limit(limit, element_count, return_value):
"""
from TAXII2ApiModule import reached_limit
assert reached_limit(limit, element_count) == return_value
+
+
+def test_increase_count():
+ """
+ Given:
+ - A counters dict.
+ When:
+ - Increasing various counters.
+ Then:
+ - Assert that the counters reflect the expected values.
+ """
+ mock_client = Taxii2FeedClient(url='', collection_to_fetch='', proxies=[], verify=False, objects_to_fetch=[])
+ objects_counter: Dict[str, int] = {}
+
+ mock_client.increase_count(objects_counter, 'counter_a')
+ assert objects_counter == {'counter_a': 1}
+
+ mock_client.increase_count(objects_counter, 'counter_a')
+ assert objects_counter == {'counter_a': 2}
+
+ mock_client.increase_count(objects_counter, 'counter_b')
+ assert objects_counter == {'counter_a': 2, 'counter_b': 1}
diff --git a/Packs/ApiModules/pack_metadata.json b/Packs/ApiModules/pack_metadata.json
index 940629929a27..d269e1a6c430 100644
--- a/Packs/ApiModules/pack_metadata.json
+++ b/Packs/ApiModules/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ApiModules",
"description": "API Modules",
"support": "xsoar",
- "currentVersion": "2.2.21",
+ "currentVersion": "2.2.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AppNovi/Integrations/appNovi/appNovi.yml b/Packs/AppNovi/Integrations/appNovi/appNovi.yml
index ceef1bfa702b..964baaf08060 100644
--- a/Packs/AppNovi/Integrations/appNovi/appNovi.yml
+++ b/Packs/AppNovi/Integrations/appNovi/appNovi.yml
@@ -401,7 +401,7 @@ script:
type: textArea
description: Server IP to search.
description: Search for servers using IP address.
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
tests:
- No tests (auto formatted)
fromversion: 6.5.0
diff --git a/Packs/AppNovi/ReleaseNotes/1_0_24.md b/Packs/AppNovi/ReleaseNotes/1_0_24.md
new file mode 100644
index 000000000000..526c0cf8b34b
--- /dev/null
+++ b/Packs/AppNovi/ReleaseNotes/1_0_24.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### appNovi
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/AppNovi/pack_metadata.json b/Packs/AppNovi/pack_metadata.json
index a72e33fa8a6c..eada72b7ac58 100644
--- a/Packs/AppNovi/pack_metadata.json
+++ b/Packs/AppNovi/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AppNovi",
"description": "Search your combined security data in appNovi via simplified search or search via the appNovi security graph.",
"support": "partner",
- "currentVersion": "1.0.23",
+ "currentVersion": "1.0.24",
"author": "appNovi",
"url": "https://appnovi.com/support",
"email": "",
diff --git a/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.py b/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.py
index 315e16b1bcbf..d003493aa5b1 100644
--- a/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.py
+++ b/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.py
@@ -172,7 +172,7 @@ def login():
return_error('Failed to login, check integration parameters.')
try:
- res_json = res.json()
+ res_json = parse_json_response(res)
if 'log.loginResponse' in res_json and 'log.return' in res_json.get('log.loginResponse'):
auth_token = res_json.get('log.loginResponse').get('log.return')
if demisto.command() not in ['test-module', 'fetch-incidents']:
@@ -265,7 +265,7 @@ def get_query_viewer_results(query_viewer_id):
return_error('Failed to get query viewer results.')
return_object = None
- res_json = res.json()
+ res_json = parse_json_response(res)
if "qvs.getMatrixDataResponse" in res_json and "qvs.return" in res_json["qvs.getMatrixDataResponse"]:
# ArcSight ESM version 6.7 & 6.9 rest API supports qvs.getMatrixDataResponse
@@ -443,7 +443,7 @@ def get_case(resource_id, fetch_base_events=False):
else:
return_error(f'Failed to get case. StatusCode: {res.status_code}')
- res_json = res.json()
+ res_json = parse_json_response(res)
if 'cas.getResourceByIdResponse' in res_json and 'cas.return' in res_json.get('cas.getResourceByIdResponse'):
case = res_json.get('cas.getResourceByIdResponse').get('cas.return')
@@ -500,7 +500,8 @@ def get_all_cases_command():
demisto.debug(res.text)
return_error(f'Failed to get case list. StatusCode: {res.status_code}')
- contents = res.json().get('cas.findAllIdsResponse').get('cas.return')
+ res_json = parse_json_response(res)
+ contents = res_json.get('cas.findAllIdsResponse').get('cas.return')
human_readable = tableToMarkdown(name='All cases', headers='caseID', t=contents, removeNull=True)
outputs = {'ArcSightESM.AllCaseIDs': contents}
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
@@ -562,7 +563,7 @@ def get_security_events(event_ids, last_date_range=None, ignore_empty=False):
'Failed to get security events with ids {}.\nFull URL: {}\nStatus Code: {}\nResponse Body: {}'.format(
event_ids, BASE_URL + query_path, res.status_code, res.text))
- res_json = res.json()
+ res_json = parse_json_response(res)
if res_json.get('sev.getSecurityEventsResponse') and res_json.get('sev.getSecurityEventsResponse').get(
'sev.return'):
events = res_json.get('sev.getSecurityEventsResponse').get('sev.return')
@@ -623,7 +624,7 @@ def update_case(case_id, stage, severity):
' or case is unlocked. \nStatus Code: {}\nResponse Body: {}'.format(case_id, res.status_code,
res.text))
- res_json = res.json()
+ res_json = parse_json_response(res)
if 'cas.updateResponse' in res_json and 'cas.return' in res_json.get('cas.updateResponse'):
return case
@@ -663,7 +664,7 @@ def get_case_event_ids_command():
demisto.debug(res.text)
return_error(f"Failed to get Event IDs with:\nStatus Code: {res.status_code}\nResponse: {res.text}")
- res_json = res.json()
+ res_json = parse_json_response(res)
if 'cas.getCaseEventIDsResponse' in res_json and 'cas.return' in res_json.get('cas.getCaseEventIDsResponse'):
event_ids = res_json.get('cas.getCaseEventIDsResponse').get('cas.return')
if not isinstance(event_ids, list):
@@ -738,7 +739,7 @@ def get_entries_command(use_rest, args):
resource_id, res.status_code, body, res.text))
if use_rest:
- res_json = res.json()
+ res_json = parse_json_response(res)
raw_entries = res_json.get('act.getEntriesResponse', {}).get('act.return', {})
else:
res_json = json.loads(xml2json((res.text).encode('utf-8')))
@@ -908,7 +909,7 @@ def get_all_query_viewers_command():
demisto.debug(res.text)
return_error(f"Failed to get query viewers:\nStatus Code: {res.status_code}\nResponse: {res.text}")
- res_json = res.json()
+ res_json = parse_json_response(res)
if 'qvs.findAllIdsResponse' in res_json and 'qvs.return' in res_json.get('qvs.findAllIdsResponse'):
query_viewers = res_json.get('qvs.findAllIdsResponse').get('qvs.return')
@@ -921,6 +922,41 @@ def get_all_query_viewers_command():
demisto.results('No Query Viewers were found')
+def parse_json_response(response: requests.Response):
+ """
+ Parse the response to JSON.
+ If the parsing fails due to an invalid escape sequence, the function will attempt to fix the response data.
+
+ Args:
+ response: The response to parse.
+
+ Raises:
+ JSONDecodeError: If the response data could not be parsed to JSON.
+ """
+ try:
+ return response.json()
+
+ except requests.exceptions.JSONDecodeError as e:
+ demisto.debug(f'Failed to parse response to JSON.\n'
+ f'HTTP status code: {response.status_code}\n'
+ f'Headers: {response.headers}\n'
+ f'Response:\n{response.text}\n\n'
+ 'Attempting to fix invalid escape sequences and parse the response again.')
+
+ # Replace triple backslashes (where the last one doesn't escape anything) with two backslashes.
+ fixed_response_text = re.sub(r'[^\\]\\\\\\(?P[^\"\\])', r'\\\\\g', response.text)
+
+ try:
+ fixed_response_json = json.loads(fixed_response_text)
+
+ except json.JSONDecodeError:
+ demisto.debug('Failed to parse modified response as JSON. Raising original exception.')
+ raise e # Raise the original exception
+
+ demisto.debug('Response successfully parsed after fixing invalid escape sequences.')
+ return fixed_response_json
+
+
AUTH_TOKEN: str
MAX_UNIQUE: int
FETCH_CHUNK_SIZE: int
diff --git a/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.yml b/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.yml
index 3016f99f0944..faec977b3cf7 100644
--- a/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.yml
+++ b/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.yml
@@ -380,7 +380,7 @@ script:
runonce: false
script: '-'
subtype: python3
- dockerimage: demisto/python3:3.10.13.78960
+ dockerimage: demisto/python3:3.10.13.89009
type: python
tests:
- ArcSight ESM v2 Test
diff --git a/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2_test.py b/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2_test.py
index 21993e456792..9c601474aa11 100644
--- a/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2_test.py
+++ b/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2_test.py
@@ -1,9 +1,11 @@
+from pathlib import Path
+
import demistomock as demisto
import pytest
import requests_mock
PARAMS = {
- 'server': 'https://server',
+ 'server': 'https://server.local',
'credentials': {},
'proxy': True}
@@ -543,3 +545,39 @@ def test_get_all_query_viewers_command(mocker, requests_mock):
output = results['Contents']
assert len(output) == 3
assert output[2] == "56789py4BABCN9NYml6MSoA=="
+
+
+def test_invalid_json_response(mocker, requests_mock):
+ """
+ Given:
+ - The servers responds with a response that is not a valid json
+
+ When:
+ - Running as-get-security-events command
+
+ Then:
+ - Ensure the response data is fixed and parsed.
+ """
+ mocker.patch.object(demisto, 'getIntegrationContext', return_value={'auth_token': 'token'})
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(demisto, 'command', return_value='as-get-security-events')
+ mocker.patch.object(demisto, 'params', return_value=PARAMS)
+ mocker.patch.object(demisto, 'args', return_value={"ids": "X"})
+ mock_data_path = Path.cwd() / 'test_data' / 'QueryViewerService_getMatrixData_invalid_api_response.txt'
+
+ debug_logs_mock = mocker.patch.object(demisto, 'debug')
+
+ import ArcSightESMv2
+ from requests.models import Response
+ mock_response = Response()
+ mock_response._content = mock_data_path.read_bytes()
+ mock_response.status_code = 200
+ mocker.patch.object(ArcSightESMv2, 'send_request', return_value=mock_response)
+ debug_logs_mock = mocker.patch.object(ArcSightESMv2.demisto, 'debug')
+
+ ArcSightESMv2.main()
+ assert debug_logs_mock.call_args_list[0].startswith('Failed to parse response to JSON.\n')
+ assert debug_logs_mock.call_args_list[1].startswith('Response successfully parsed after fixing invalid escape sequences')
+
+ results = demisto.results.call_args[0][0]
+ assert results['Contents'] # assert that the response was parsed successfully
diff --git a/Packs/ArcSightESM/Integrations/ArcSightESMv2/test_data/QueryViewerService_getMatrixData_invalid_api_response.txt b/Packs/ArcSightESM/Integrations/ArcSightESMv2/test_data/QueryViewerService_getMatrixData_invalid_api_response.txt
new file mode 100644
index 000000000000..98c0a492678c
--- /dev/null
+++ b/Packs/ArcSightESM/Integrations/ArcSightESMv2/test_data/QueryViewerService_getMatrixData_invalid_api_response.txt
@@ -0,0 +1,276 @@
+{
+ "sev.getSecurityEventsResponse": {
+ "sev.return": {
+ "agent": {
+ "mutable": true,
+ "address": 170524911,
+ "addressAsBytes": "CioA7w==",
+ "assetLocalId": -9223372036854775808,
+ "hostName": "test.local",
+ "macAddress": 345050876412,
+ "translatedAddress": -9223372036854775808,
+ "zone": {
+ "id": "dGVzdDE=",
+ "isModifiable": false,
+ "managerID": "dGVzdDI=",
+ "referenceID": 2084,
+ "referenceName": "Zone",
+ "referenceString": "",
+ "referenceType": 29,
+ "uri": "/All Zones/ArcSight System/Private Address Space Zones/RFC1918: 10.0.0.0-10.255.255.255"
+ },
+ "timeZone": "Asia/Jerusalem",
+ "version": "8.4.2.9113.0",
+ "id": "dGVzdDM=",
+ "name": "Test1",
+ "type": "winc"
+ },
+ "agentReceiptTime": 1708550500510,
+ "agentSeverity": 1,
+ "aggregatedEventCount": 1,
+ "assetCriticality": 0,
+ "baseEventCount": 1,
+ "bytesIn": -2147483648,
+ "bytesOut": -2147483648,
+ "category": {
+ "mutable": true,
+ "behavior": "/Execute/Start",
+ "deviceGroup": "/Operating System",
+ "deviceType": "Applications",
+ "object": "/Host/Resource/Process",
+ "outcome": "/Success",
+ "significance": "/Informational"
+ },
+ "concentratorAgents": {
+ "mutable": true,
+ "address": 170524911,
+ "addressAsBytes": "CioA7w==",
+ "assetLocalId": -9223372036854775808,
+ "hostName": "test.local",
+ "macAddress": 345050876412,
+ "translatedAddress": -9223372036854775808,
+ "zone": {
+ "id": "dGVzdDE=",
+ "isModifiable": false,
+ "managerID": "dGVzdDI=",
+ "referenceID": 2084,
+ "referenceName": "Zone",
+ "referenceString": "",
+ "referenceType": 29,
+ "uri": "/All Zones/ArcSight System/Private Address Space Zones/RFC1918: 10.0.0.0-10.255.255.255"
+ },
+ "timeZone": "Asia/Jerusalem",
+ "version": "8.4.2.9113.0",
+ "id": "dGVzdDM=",
+ "name": "Test1",
+ "type": "winc"
+ },
+ "concentratorDevices": {
+ "mutable": true,
+ "address": -9223372036854775808,
+ "assetLocalId": -9223372036854775808,
+ "hostName": "test.local",
+ "macAddress": -9223372036854775808,
+ "translatedAddress": -9223372036854775808,
+ "timeZone": "Asia/Jerusalem",
+ "version": "Unknown",
+ "product": "Sysmon",
+ "vendor": "Microsoft"
+ },
+ "correlatedEventCount": 0,
+ "destination": {
+ "mutable": true,
+ "address": -9223372036854775808,
+ "assetLocalId": -9223372036854775808,
+ "macAddress": -9223372036854775808,
+ "translatedAddress": -9223372036854775808,
+ "port": -2147483648,
+ "processId": 19196,
+ "processName": "E:\\Test\\Z\\Bin\\Test.exe",
+ "serviceName":"\"E:\\Test\\Z\\Bin\\Test.exe\" _silentMode=1 _execute=CHECKUPDATE _registerInformation=\"{\"message\":{\"command\":\"ProductStart\",\"parameter\":{\"cookiename\":\"iPlanetDirectoryPro\",\"cookievalue\":\"XXX\",\"deploymentmode\":\"CUSTOMER_MANAGED\",\"downloadinfotimeout\":\"30000\",\"downloadthrottle\":\"2\",\"enabledownloadresume\":\"false\",\"enablehttps\":\"false\",\"enableinterneturl\":\"true\",\"enablesha256\":\"false\",\"opsfilelocation\":\"\\\"E:\\\Test\\\User\\\Cache\\\Test\\\OPS\\\"\",\"proxypassword\":\"\",\"proxyserver\":\"\",\"proxyusername\":\"\",\"updaterserverurl\":\"http://test.local/Apps/DesktopDeployment\",\"uuid\":\"TEST1\"}}}\n\" _pipename=UpdateScheduler.24624",
+ "translatedPort": -2147483648,
+ "userName": "SYSTEM"
+ },
+ "device": {
+ "mutable": true,
+ "address": -9223372036854775808,
+ "assetLocalId": -9223372036854775808,
+ "hostName": "test.local",
+ "macAddress": -9223372036854775808,
+ "translatedAddress": -9223372036854775808,
+ "timeZone": "Asia/Jerusalem",
+ "version": "Unknown",
+ "product": "Sysmon",
+ "vendor": "Microsoft"
+ },
+ "deviceAction": "Process Create",
+ "deviceCustom": {
+ "mutable": true,
+ "string1Label": "Integrity Level",
+ "string2Label": "EventlogCategory",
+ "string3Label": "EventSource",
+ "string4Label": "Process Command Line",
+ "string6Label": "LogonGuid"
+ },
+ "deviceCustomDate1": -9223372036854775808,
+ "deviceCustomDate2": -9223372036854775808,
+ "deviceCustomFloatingPoint1": "4.9E-324",
+ "deviceCustomFloatingPoint2": "4.9E-324",
+ "deviceCustomFloatingPoint3": "4.9E-324",
+ "deviceCustomFloatingPoint4": "4.9E-324",
+ "deviceCustomNumber1": -9223372036854775808,
+ "deviceCustomNumber2": -9223372036854775808,
+ "deviceCustomNumber3": -9223372036854775808,
+ "deviceCustomString1": "Medium",
+ "deviceCustomString2": 1,
+ "deviceCustomString3": "Microsoft-Windows-Sysmon",
+ "deviceCustomString4":"\"E:\\Test\\Z\\Bin\\Test.exe\" _silentMode=1 _execute=CHECKUPDATE _registerInformation=\"{\"message\":{\"command\":\"ProductStart\",\"parameter\":{\"cookiename\":\"iPlanetDirectoryPro\",\"cookievalue\":\"XXX\",\"deploymentmode\":\"CUSTOMER_MANAGED\",\"downloadinfotimeout\":\"30000\",\"downloadthrottle\":\"2\",\"enabledownloadresume\":\"false\",\"enablehttps\":\"false\",\"enableinterneturl\":\"true\",\"enablesha256\":\"false\",\"opsfilelocation\":\"\\\"E:\\\Test\\\User\\\Cache\\\Test\\\OPS\\\"\",\"proxypassword\":\"\",\"proxyserver\":\"\",\"proxyusername\":\"\",\"updaterserverurl\":\"http://test.local/Apps/DesktopDeployment\",\"uuid\":\"TEST2\"}}}\n\" _pipename=UpdateScheduler.24624",
+ "deviceCustomString6": "{cd7afa44-efbc-65d2-2c74-4a0000000000}",
+ "deviceDirection": -2147483648,
+ "deviceEventCategory": "Microsoft-Windows-Sysmon/Operational",
+ "deviceEventClassId": "Microsoft-Windows-Sysmon:1",
+ "deviceProcessId": -2147483648,
+ "deviceReceiptTime": 1708550478431,
+ "deviceSeverity": "Information",
+ "domainDate1": -9223372036854775808,
+ "domainDate2": -9223372036854775808,
+ "domainDate3": -9223372036854775808,
+ "domainDate4": -9223372036854775808,
+ "domainDate5": -9223372036854775808,
+ "domainDate6": -9223372036854775808,
+ "domainFp1": "4.9E-324",
+ "domainFp2": "4.9E-324",
+ "domainFp3": "4.9E-324",
+ "domainFp4": "4.9E-324",
+ "domainFp5": "4.9E-324",
+ "domainFp6": "4.9E-324",
+ "domainFp7": "4.9E-324",
+ "domainFp8": "4.9E-324",
+ "domainIpv4addr1": -9223372036854775808,
+ "domainIpv4addr2": -9223372036854775808,
+ "domainIpv4addr3": -9223372036854775808,
+ "domainIpv4addr4": -9223372036854775808,
+ "domainNumber1": -9223372036854775808,
+ "domainNumber10": -9223372036854775808,
+ "domainNumber11": -9223372036854775808,
+ "domainNumber12": -9223372036854775808,
+ "domainNumber13": -9223372036854775808,
+ "domainNumber2": -9223372036854775808,
+ "domainNumber3": -9223372036854775808,
+ "domainNumber4": -9223372036854775808,
+ "domainNumber5": -9223372036854775808,
+ "domainNumber6": -9223372036854775808,
+ "domainNumber7": -9223372036854775808,
+ "domainNumber8": -9223372036854775808,
+ "domainNumber9": -9223372036854775808,
+ "endTime": 1708550478431,
+ "eventAnnotation": {
+ "auditTrail": "1,1708598829379,root,Queued,,,,\n",
+ "flags": 0,
+ "modificationTime": 1708550506447,
+ "stage": {
+ "id": "R9MHiNfoAABCASsxbPIxG0g==",
+ "isModifiable": false,
+ "managerID": "dGVzdDI=",
+ "referenceID": 2209,
+ "referenceName": "Stage",
+ "referenceString": "",
+ "referenceType": 34,
+ "uri": "/All Stages/Queued"
+ },
+ "stageUpdateTime": 1708550506447,
+ "version": 1,
+ "endTime": 1708550478431,
+ "eventId": 14457190769,
+ "managerReceiptTime": 1708550506447
+ },
+ "eventId": 14457190769,
+ "externalId": 1,
+ "file": {
+ "createTime": -9223372036854775808,
+ "hash": "MD5=3EA3F345952BE712D49B3AAF29B54347,SHA256=835851546727FCF7F7CDD736F26E7DECED76DBDFE7C81ADB1FE3999531CD1BD6,IMPHASH=F34D5F2D4577ED6D9CEEC516C1F5A744",
+ "id": "Process Guid: {cd7afa44-694e-65d6-0a64-00000000e000}",
+ "modificationTime": -9223372036854775808,
+ "size": -9223372036854775808
+ },
+ "finalDevice": {
+ "mutable": true,
+ "address": -9223372036854775808,
+ "assetLocalId": -9223372036854775808,
+ "hostName": "test.local",
+ "macAddress": -9223372036854775808,
+ "translatedAddress": -9223372036854775808,
+ "timeZone": "Asia/Jerusalem",
+ "version": "Unknown",
+ "product": "Sysmon",
+ "vendor": "Microsoft"
+ },
+ "flexDate1": -9223372036854775808,
+ "flexNumber1": -9223372036854775808,
+ "flexNumber2": -9223372036854775808,
+ "locality": 0,
+ "managerId": -128,
+ "managerReceiptTime": 1708550506447,
+ "message": "Refinitiv Deployment Manager",
+ "modelConfidence": 0,
+ "name": "Process Created",
+ "oldFile": {
+ "createTime": -9223372036854775808,
+ "hash": " ",
+ "id": "Parent Process Guid: {cd7afa44-f198-65d2-fc02-00000000e000}",
+ "modificationTime": -9223372036854775808,
+ "name": "Test.exe",
+ "path": "E:\\Test\\Z\\Bin\\",
+ "size": -9223372036854775808
+ },
+ "originalAgent": {
+ "mutable": true,
+ "address": 170524911,
+ "addressAsBytes": "CioA7w==",
+ "assetLocalId": -9223372036854775808,
+ "hostName": "test.local",
+ "macAddress": 345050876412,
+ "translatedAddress": -9223372036854775808,
+ "zone": {
+ "id": "dGVzdDE=",
+ "isModifiable": false,
+ "managerID": "dGVzdDI=",
+ "referenceID": 2084,
+ "referenceName": "Zone",
+ "referenceString": "",
+ "referenceType": 29,
+ "uri": "/All Zones/ArcSight System/Private Address Space Zones/RFC1918: 10.0.0.0-10.255.255.255"
+ },
+ "timeZone": "Asia/Jerusalem",
+ "version": "8.4.2.9113.0",
+ "id": "dGVzdDM=",
+ "name": "Test1",
+ "type": "winc"
+ },
+ "originator": "SOURCE",
+ "persistence": -2147483648,
+ "priority": 3,
+ "relevance": 10,
+ "sessionId": -9223372036854775808,
+ "severity": 0,
+ "source": {
+ "mutable": true,
+ "address": -9223372036854775808,
+ "assetLocalId": -9223372036854775808,
+ "macAddress": -9223372036854775808,
+ "ntDomain": "TEST",
+ "translatedAddress": -9223372036854775808,
+ "port": -2147483648,
+ "processId": 24624,
+ "processName": "E:\\Test\\Z\\Bin\\Test.exe",
+ "serviceName": "\"E:\\Test\\Z\\Bin\\Test.exe\" ",
+ "translatedPort": -2147483648,
+ "userId": "0x4a742c",
+ "userName": "TEST"
+ },
+ "startTime": 1708550478431,
+ "ttl": 10,
+ "type": "BASE"
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/ArcSightESM/ReleaseNotes/1_2_1.md b/Packs/ArcSightESM/ReleaseNotes/1_2_1.md
new file mode 100644
index 000000000000..020c9dfc582f
--- /dev/null
+++ b/Packs/ArcSightESM/ReleaseNotes/1_2_1.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### ArcSight ESM v2
+
+Improved API response handling, adding support for responses with known invalid JSON formats.
diff --git a/Packs/ArcSightESM/pack_metadata.json b/Packs/ArcSightESM/pack_metadata.json
index bc9b4d9554c7..2bfaa3e73530 100644
--- a/Packs/ArcSightESM/pack_metadata.json
+++ b/Packs/ArcSightESM/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ArcSight ESM",
"description": "ArcSight ESM SIEM by Micro Focus (Formerly HPE Software).",
"support": "xsoar",
- "currentVersion": "1.2.0",
+ "currentVersion": "1.2.1",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/ArcherRSA/ReleaseNotes/1_2_16.md b/Packs/ArcherRSA/ReleaseNotes/1_2_16.md
new file mode 100644
index 000000000000..8a5060008b0d
--- /dev/null
+++ b/Packs/ArcherRSA/ReleaseNotes/1_2_16.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ArcherCreateIncidentExample
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/ArcherRSA/Scripts/ArcherCreateIncidentExample/ArcherCreateIncidentExample.yml b/Packs/ArcherRSA/Scripts/ArcherCreateIncidentExample/ArcherCreateIncidentExample.yml
index d8908f23e4cd..116a97870887 100644
--- a/Packs/ArcherRSA/Scripts/ArcherCreateIncidentExample/ArcherCreateIncidentExample.yml
+++ b/Packs/ArcherRSA/Scripts/ArcherCreateIncidentExample/ArcherCreateIncidentExample.yml
@@ -33,7 +33,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 5.0.0
skipprepare:
diff --git a/Packs/ArcherRSA/pack_metadata.json b/Packs/ArcherRSA/pack_metadata.json
index d60048714851..d98608e5ff24 100644
--- a/Packs/ArcherRSA/pack_metadata.json
+++ b/Packs/ArcherRSA/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "RSA Archer",
"description": "The RSA Archer GRC Platform provides a common foundation for managing policies, controls, risks, assessments and deficiencies across lines of business.",
"support": "xsoar",
- "currentVersion": "1.2.15",
+ "currentVersion": "1.2.16",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Armorblox/Integrations/Armorblox/Armorblox.yml b/Packs/Armorblox/Integrations/Armorblox/Armorblox.yml
index df4282607ace..99e9c39ece5c 100644
--- a/Packs/Armorblox/Integrations/Armorblox/Armorblox.yml
+++ b/Packs/Armorblox/Integrations/Armorblox/Armorblox.yml
@@ -88,7 +88,7 @@ script:
- contextPath: Armorblox.Threat.remediation_actions
description: Should be the remediation action name for the incident under inspection.
type: string
- dockerimage: demisto/armorblox:1.0.0.87345
+ dockerimage: demisto/armorblox:1.0.0.88851
isfetch: true
script: ''
subtype: python3
diff --git a/Packs/Armorblox/ReleaseNotes/1_0_34.md b/Packs/Armorblox/ReleaseNotes/1_0_34.md
new file mode 100644
index 000000000000..a4b0bcf0626f
--- /dev/null
+++ b/Packs/Armorblox/ReleaseNotes/1_0_34.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Armorblox
+- Updated the Docker image to: *demisto/armorblox:1.0.0.88851*.
diff --git a/Packs/Armorblox/pack_metadata.json b/Packs/Armorblox/pack_metadata.json
index 1fdbc47c4142..76630daab47d 100644
--- a/Packs/Armorblox/pack_metadata.json
+++ b/Packs/Armorblox/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Armorblox",
"description": "Armorblox is an API-based platform that stops targeted email attacks, protects sensitive data, and automates incident response.",
"support": "partner",
- "currentVersion": "1.0.33",
+ "currentVersion": "1.0.34",
"author": "Armorblox",
"url": "https://www.armorblox.com/",
"email": "support@armorblox.com",
diff --git a/Packs/Ataya/Integrations/Ataya/Ataya.yml b/Packs/Ataya/Integrations/Ataya/Ataya.yml
index 0a6c58bb8f64..361869c922e9 100644
--- a/Packs/Ataya/Integrations/Ataya/Ataya.yml
+++ b/Packs/Ataya/Integrations/Ataya/Ataya.yml
@@ -39,7 +39,7 @@ script:
description: the cilient imsi which need to be assigned.
description: approve user to access external network.
name: ataya-assign-user
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
runonce: false
script: ''
subtype: python3
diff --git a/Packs/Ataya/ReleaseNotes/1_0_5.md b/Packs/Ataya/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..5456419ab5c5
--- /dev/null
+++ b/Packs/Ataya/ReleaseNotes/1_0_5.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Ataya Harmony
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/Ataya/pack_metadata.json b/Packs/Ataya/pack_metadata.json
index 5d11df5c75e2..fc6bbe44d138 100644
--- a/Packs/Ataya/pack_metadata.json
+++ b/Packs/Ataya/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Ataya",
"description": "Integrate with Ataya Harmony for manage the 5G UE session",
"support": "partner",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "Ataya Inc.",
"url": "https://ataya.io",
"email": "",
diff --git a/Packs/AtlassianConfluenceCloud/Integrations/AtlassianConfluenceCloud/AtlassianConfluenceCloud.yml b/Packs/AtlassianConfluenceCloud/Integrations/AtlassianConfluenceCloud/AtlassianConfluenceCloud.yml
index 758df2f41e7e..0faaf6cabda5 100644
--- a/Packs/AtlassianConfluenceCloud/Integrations/AtlassianConfluenceCloud/AtlassianConfluenceCloud.yml
+++ b/Packs/AtlassianConfluenceCloud/Integrations/AtlassianConfluenceCloud/AtlassianConfluenceCloud.yml
@@ -1915,7 +1915,7 @@ script:
- contextPath: ConfluenceCloud.Group._links.self
description: Link to the group.
type: String
- dockerimage: demisto/python3:3.10.13.86272
+ dockerimage: demisto/python3:3.10.13.87159
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AtlassianConfluenceCloud/ReleaseNotes/1_0_24.md b/Packs/AtlassianConfluenceCloud/ReleaseNotes/1_0_24.md
new file mode 100644
index 000000000000..b52379b8a23d
--- /dev/null
+++ b/Packs/AtlassianConfluenceCloud/ReleaseNotes/1_0_24.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Atlassian Confluence Cloud
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
diff --git a/Packs/AtlassianConfluenceCloud/pack_metadata.json b/Packs/AtlassianConfluenceCloud/pack_metadata.json
index d00cb055a8e2..862ecd626a25 100644
--- a/Packs/AtlassianConfluenceCloud/pack_metadata.json
+++ b/Packs/AtlassianConfluenceCloud/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Atlassian Confluence Cloud",
"description": "Atlassian Confluence Cloud allows users to interact with confluence entities like content, space, users and groups. Users can also manage the space permissions.",
"support": "xsoar",
- "currentVersion": "1.0.23",
+ "currentVersion": "1.0.24",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Attlasian/Integrations/Attlasian_IAM/Attlasian_IAM.yml b/Packs/Attlasian/Integrations/Attlasian_IAM/Attlasian_IAM.yml
index 259c1984516a..f2ab47579153 100644
--- a/Packs/Attlasian/Integrations/Attlasian_IAM/Attlasian_IAM.yml
+++ b/Packs/Attlasian/Integrations/Attlasian_IAM/Attlasian_IAM.yml
@@ -238,7 +238,7 @@ script:
type: String
- description: Retrieves a User Profile schema, which holds all of the user fields within the application. Used for outgoing-mapping through the Get Schema option.
name: get-mapping-fields
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.13.86272
ismappable: true
isremotesyncout: true
script: '-'
diff --git a/Packs/Attlasian/ReleaseNotes/1_1_15.md b/Packs/Attlasian/ReleaseNotes/1_1_15.md
new file mode 100644
index 000000000000..c83a3cb90369
--- /dev/null
+++ b/Packs/Attlasian/ReleaseNotes/1_1_15.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Atlassian IAM
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/Attlasian/pack_metadata.json b/Packs/Attlasian/pack_metadata.json
index e1abca28981d..5a3a2d901a08 100644
--- a/Packs/Attlasian/pack_metadata.json
+++ b/Packs/Attlasian/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Atlassian IAM",
"description": "Atlassian IAM Integration allows the customers to do the generic ILM management operations such as create, update, delete, etc.",
"support": "xsoar",
- "currentVersion": "1.1.14",
+ "currentVersion": "1.1.15",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/AwsSecretsManager.yml b/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/AwsSecretsManager.yml
index f7c573f6909f..005bef1a5320 100644
--- a/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/AwsSecretsManager.yml
+++ b/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/AwsSecretsManager.yml
@@ -286,7 +286,7 @@ script:
script: '-'
type: python
subtype: python3
- dockerimage: demisto/boto3py3:1.0.0.87537
+ dockerimage: demisto/boto3py3:1.0.0.88855
fromversion: 6.5.0
tests:
- No tests (auto formatted)
diff --git a/Packs/Aws-SecretsManager/ReleaseNotes/1_0_38.md b/Packs/Aws-SecretsManager/ReleaseNotes/1_0_38.md
new file mode 100644
index 000000000000..afea10607838
--- /dev/null
+++ b/Packs/Aws-SecretsManager/ReleaseNotes/1_0_38.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Aws Secrets Manager
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.88855*.
diff --git a/Packs/Aws-SecretsManager/pack_metadata.json b/Packs/Aws-SecretsManager/pack_metadata.json
index 2e1c72b6e10c..d79b7638400f 100644
--- a/Packs/Aws-SecretsManager/pack_metadata.json
+++ b/Packs/Aws-SecretsManager/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS Secrets Manager",
"description": "AWS Secrets Manager helps you to securely encrypt, store, and retrieve credentials for your databases and other services.",
"support": "xsoar",
- "currentVersion": "1.0.37",
+ "currentVersion": "1.0.38",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Azure-Enrichment-Remediation/.pack-ignore b/Packs/Azure-Enrichment-Remediation/.pack-ignore
index e69de29bb2d1..1360511b24c3 100644
--- a/Packs/Azure-Enrichment-Remediation/.pack-ignore
+++ b/Packs/Azure-Enrichment-Remediation/.pack-ignore
@@ -0,0 +1,2 @@
+[file:Azure_-_Network_Security_Group_Remediation.yml]
+ignore=PB106
\ No newline at end of file
diff --git a/Packs/Azure-Enrichment-Remediation/Playbooks/Azure_-_Network_Security_Group_Remediation.yml b/Packs/Azure-Enrichment-Remediation/Playbooks/Azure_-_Network_Security_Group_Remediation.yml
index fab936c56edf..f65536d152c2 100644
--- a/Packs/Azure-Enrichment-Remediation/Playbooks/Azure_-_Network_Security_Group_Remediation.yml
+++ b/Packs/Azure-Enrichment-Remediation/Playbooks/Azure_-_Network_Security_Group_Remediation.yml
@@ -2,7 +2,7 @@ id: Azure - Network Security Group Remediation
version: -1
name: Azure - Network Security Group Remediation
description: |-
- This playbook adds new Azure Network Security Groups (NSG) rules to NSGs attached to a NIC. The new rules will give access only to a private IP address range and block traffic that's exposed to the public internet ([using the private IP of the VM as stated in Azure documentation](https://learn.microsoft.com/en-us/azure/virtual-network/network-security-groups-overview)). For example, if RDP is exposed to the public internet, this playbook adds new firewall rules that only allows traffic from private IP address and blocks the rest of the RDP traffic.
+ This playbook adds new Azure Network Security Groups (NSG) rules to NSGs attached to a NIC. The new rules will give access only to a private IP address range and block traffic that's exposed to the public internet ([using the private IP of the VM as stated in Azure documentation](https://learn.microsoft.com/en-us/azure/virtual-network/network-security-groups-overview)). For example, if RDP is exposed to the public internet, this playbook adds new firewall rules that only allow traffic from private IP addresses and blocks the rest of the RDP traffic.
Conditions and limitations:
- Limited to one resource group.
@@ -43,10 +43,10 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: e594c0b5-83ff-487e-8a93-e26bff748ea3
+ taskid: 1adc8ea1-823e-440b-82da-b83a8d7451d2
type: regular
task:
- id: e594c0b5-83ff-487e-8a93-e26bff748ea3
+ id: 1adc8ea1-823e-440b-82da-b83a8d7451d2
version: -1
name: Retrieve Rules from NSG Associated to Public IP
description: List all rules of the specified security groups.
@@ -80,6 +80,8 @@ tasks:
applyIfEmpty: {}
defaultValue: {}
operator: SetIfEmpty
+ using:
+ simple: ${inputs.InstanceName}
separatecontext: false
continueonerrortype: ""
view: |-
@@ -187,10 +189,10 @@ tasks:
isautoswitchedtoquietmode: false
"22":
id: "22"
- taskid: 8cc8c11f-23d8-4d25-83ad-c9d0d8142833
+ taskid: 8b08e2be-7090-4530-8d81-840e906cbbff
type: condition
task:
- id: 8cc8c11f-23d8-4d25-83ad-c9d0d8142833
+ id: 8b08e2be-7090-4530-8d81-840e906cbbff
version: -1
name: Does offending rule exist?
description: Checks whether the last command returned rules or not.
@@ -237,6 +239,14 @@ tasks:
value:
simple: inputs.RemotePort
iscontext: true
+ - left:
+ iscontext: true
+ value:
+ simple: AzureNSG.Rule.destinationPortRange
+ operator: isEqualString
+ right:
+ value:
+ simple: '*'
- - operator: isEqualString
left:
value:
@@ -425,10 +435,10 @@ tasks:
isautoswitchedtoquietmode: false
"32":
id: "32"
- taskid: 56f3b649-2961-479a-8afb-ac0e5919c77b
+ taskid: b5146806-4b94-4d33-8277-5ea7d3e51bdf
type: regular
task:
- id: 56f3b649-2961-479a-8afb-ac0e5919c77b
+ id: b5146806-4b94-4d33-8277-5ea7d3e51bdf
version: -1
name: Update existing remediation allow rule
description: |-
@@ -484,6 +494,8 @@ tasks:
applyIfEmpty: {}
defaultValue: {}
operator: SetIfEmpty
+ using:
+ simple: ${inputs.InstanceName}
separatecontext: false
continueonerrortype: ""
view: |-
@@ -769,10 +781,10 @@ tasks:
isautoswitchedtoquietmode: false
"37":
id: "37"
- taskid: cc549549-1a9d-4ae3-8d20-6cf8324b7a00
+ taskid: 1a7d4cac-6979-4cf3-8705-ec356925dda6
type: regular
task:
- id: cc549549-1a9d-4ae3-8d20-6cf8324b7a00
+ id: 1a7d4cac-6979-4cf3-8705-ec356925dda6
version: -1
name: Update existing remediation deny rule
description: |-
@@ -828,6 +840,8 @@ tasks:
applyIfEmpty: {}
defaultValue: {}
operator: SetIfEmpty
+ using:
+ simple: ${inputs.InstanceName}
separatecontext: false
continueonerrortype: ""
view: |-
@@ -1116,10 +1130,10 @@ tasks:
isautoswitchedtoquietmode: false
"42":
id: "42"
- taskid: a3d6d6e8-b01d-418b-8af2-033300d717c7
+ taskid: f871b58d-6155-4b03-880a-1889551b6b00
type: regular
task:
- id: a3d6d6e8-b01d-418b-8af2-033300d717c7
+ id: f871b58d-6155-4b03-880a-1889551b6b00
version: -1
name: Add allow rule for port ${inputs.RemotePort} and ${inputs.RemoteProtocol}
description: |-
@@ -1180,7 +1194,7 @@ tasks:
simple: ${inputs.RemoteProtocol}
iscontext: true
source:
- simple: 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16
+ simple: ${inputs.RemediationAllowRanges}
resource_group_name:
complex:
root: inputs.ResourceGroup
@@ -1197,6 +1211,8 @@ tasks:
applyIfEmpty: {}
defaultValue: {}
operator: SetIfEmpty
+ using:
+ simple: ${inputs.InstanceName}
separatecontext: false
continueonerrortype: ""
view: |-
@@ -1215,10 +1231,10 @@ tasks:
isautoswitchedtoquietmode: false
"43":
id: "43"
- taskid: e5f451a1-edd6-4b06-8b32-c9ad5038de45
+ taskid: c98dc204-241c-4c23-8de5-f9e778ac7395
type: regular
task:
- id: e5f451a1-edd6-4b06-8b32-c9ad5038de45
+ id: c98dc204-241c-4c23-8de5-f9e778ac7395
version: -1
name: Set variable for offending rule priority
description: Sets variable for the offending rule priority in the list of rules returned.
@@ -1253,6 +1269,14 @@ tasks:
value:
simple: inputs.RemotePort
iscontext: true
+ - left:
+ iscontext: true
+ value:
+ simple: AzureNSG.Rule.destinationPortRange
+ operator: isEqualString
+ right:
+ value:
+ simple: '*'
- - operator: isEqualString
left:
value:
@@ -1326,10 +1350,10 @@ tasks:
isautoswitchedtoquietmode: false
"44":
id: "44"
- taskid: 44a359f8-455d-4de4-8beb-a193599922ca
+ taskid: 76be7dd2-448b-47b5-8ad1-8e5197e74bc8
type: regular
task:
- id: 44a359f8-455d-4de4-8beb-a193599922ca
+ id: 76be7dd2-448b-47b5-8ad1-8e5197e74bc8
version: -1
name: Add block rule for port ${inputs.RemotePort}
description: |-
@@ -1407,6 +1431,8 @@ tasks:
applyIfEmpty: {}
defaultValue: {}
operator: SetIfEmpty
+ using:
+ simple: ${inputs.InstanceName}
separatecontext: false
continueonerrortype: ""
view: |-
@@ -1663,6 +1689,17 @@ inputs:
playbookInputQuery:
required: false
value: {}
+- description: Azure Network Security Groups integration instance to use if you have multiple instances configured (optional).
+ key: InstanceName
+ playbookInputQuery:
+ required: false
+ value: {}
+- description: Comma-separated list of IPv4 network ranges to be used as source addresses for the `remediation-allow-port--` rule to be created. Typically this will be private IP ranges (to allow access within the vnet and bastion hosts) but other networks can be added as needed.
+ key: RemediationAllowRanges
+ playbookInputQuery:
+ required: false
+ value:
+ simple: 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16
outputs:
- contextPath: remediatedFlag
description: Output key to determine if remediation was successfully done.
@@ -1682,6 +1719,8 @@ inputSections:
- RemotePort
- SubscriptionID
- ResourceGroup
+ - InstanceName
+ - RemediationAllowRanges
name: General (Inputs group)
outputSections:
- description: Generic group for outputs
@@ -1689,3 +1728,5 @@ outputSections:
outputs:
- remediatedFlag
- remediatedReason
+contentitemexportablefields:
+ contentitemfields: {}
diff --git a/Packs/Azure-Enrichment-Remediation/Playbooks/Azure_-_Network_Security_Group_Remediation_README.md b/Packs/Azure-Enrichment-Remediation/Playbooks/Azure_-_Network_Security_Group_Remediation_README.md
index 75fb61f3e50b..8855dd28fd00 100644
--- a/Packs/Azure-Enrichment-Remediation/Playbooks/Azure_-_Network_Security_Group_Remediation_README.md
+++ b/Packs/Azure-Enrichment-Remediation/Playbooks/Azure_-_Network_Security_Group_Remediation_README.md
@@ -1,4 +1,4 @@
-This playbook adds new Azure Network Security Groups (NSG) rules to NSGs attached to a NIC. The new rules will give access only to a private ip address range and block traffic that's exposed to the public internet ([using the private IP of the VM as stated in Azure documentation](https://learn.microsoft.com/en-us/azure/virtual-network/network-security-groups-overview)). For example, if RDP is exposed to the public internet, this playbook adds new firewall rules that only allow traffic from a private IP address and blocks the rest of the RDP traffic.
+This playbook adds new Azure Network Security Groups (NSG) rules to NSGs attached to a NIC. The new rules will give access only to a private IP address range and block traffic that's exposed to the public internet ([using the private IP of the VM as stated in Azure documentation](https://learn.microsoft.com/en-us/azure/virtual-network/network-security-groups-overview)). For example, if RDP is exposed to the public internet, this playbook adds new firewall rules that only allow traffic from private IP addresses and blocks the rest of the RDP traffic.
Conditions and limitations:
- Limited to one resource group.
@@ -20,14 +20,14 @@ This playbook does not use any sub-playbooks.
### Scripts
-* AzureFindAvailableNSGPriorities
* Set
+* AzureFindAvailableNSGPriorities
### Commands
+* azure-nsg-security-rule-update
* azure-nsg-security-rule-create
* azure-nsg-security-rules-list
-* azure-nsg-security-rule-update
## Playbook Inputs
@@ -41,6 +41,8 @@ This playbook does not use any sub-playbooks.
| RemotePort | The remote port that is publicly exposed. | | Required |
| SubscriptionID | The Azure subscription ID \(optional\). | | Optional |
| ResourceGroup | The Azure resource group \(optional\). | | Optional |
+| InstanceName | Azure Network Security Groups integration instance to use if you have multiple instances configured \(optional\). | | Optional |
+| RemediationAllowRanges | Comma-separated list of IPv4 network ranges to be used as source addresses for the \`remediation-allow-port-<port\#>-<tcp\|udp>\` rule to be created. Typically this will be private IP ranges \(to allow access within the vnet and bastion hosts\) but other networks can be added as needed. | 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16 | Optional |
## Playbook Outputs
diff --git a/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_14.md b/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_14.md
new file mode 100644
index 000000000000..b42b93ff65db
--- /dev/null
+++ b/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_14.md
@@ -0,0 +1,8 @@
+
+#### Playbooks
+
+##### Azure - Network Security Group Remediation
+
+- Added the *instance_name* optional playbook input to allow users to specify an Azure Network Security Groups integration instance to use.
+- Added the *RemediationAllowRanges* optional playbook input to allow users to specify IPv4 network ranges to be used as source addresses for the `remediation-allow-port--` Azure NSG rule to be created.
+- Fixed an issue with not being able to detect all offending rules.
diff --git a/Packs/Azure-Enrichment-Remediation/pack_metadata.json b/Packs/Azure-Enrichment-Remediation/pack_metadata.json
index aa82b6bf2389..88e4b641f5c2 100644
--- a/Packs/Azure-Enrichment-Remediation/pack_metadata.json
+++ b/Packs/Azure-Enrichment-Remediation/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Enrichment and Remediation",
"description": "Playbooks using multiple Azure content packs for enrichment and remediation purposes",
"support": "xsoar",
- "currentVersion": "1.1.13",
+ "currentVersion": "1.1.14",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureActiveDirectory/ReleaseNotes/1_3_20.md b/Packs/AzureActiveDirectory/ReleaseNotes/1_3_20.md
new file mode 100644
index 000000000000..6a8e6f113435
--- /dev/null
+++ b/Packs/AzureActiveDirectory/ReleaseNotes/1_3_20.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Active Directory Identity Protection (Deprecated)
+
+Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
diff --git a/Packs/AzureActiveDirectory/pack_metadata.json b/Packs/AzureActiveDirectory/pack_metadata.json
index 10d83b1f29ee..108e2f286f6d 100644
--- a/Packs/AzureActiveDirectory/pack_metadata.json
+++ b/Packs/AzureActiveDirectory/pack_metadata.json
@@ -3,7 +3,7 @@
"description": "Deprecated. Use Microsoft Graph Identity and Access instead.",
"support": "xsoar",
"hidden": true,
- "currentVersion": "1.3.19",
+ "currentVersion": "1.3.20",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureCompute/ReleaseNotes/1_2_21.md b/Packs/AzureCompute/ReleaseNotes/1_2_21.md
new file mode 100644
index 000000000000..79697f8a76ee
--- /dev/null
+++ b/Packs/AzureCompute/ReleaseNotes/1_2_21.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Compute v2
+
+Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureCompute/pack_metadata.json b/Packs/AzureCompute/pack_metadata.json
index 8ae62c4c94f9..094ac8ccf962 100644
--- a/Packs/AzureCompute/pack_metadata.json
+++ b/Packs/AzureCompute/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Compute",
"description": "Create and Manage Azure Virtual Machines",
"support": "xsoar",
- "currentVersion": "1.2.20",
+ "currentVersion": "1.2.21",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py
index f59019f3c77a..fd903e48f7f8 100644
--- a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py
+++ b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py
@@ -321,6 +321,6 @@ def test_generate_login_url(mocker):
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code' \
'&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri})'
+ f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
res = AzureDataExplorer.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureDataExplorer/ReleaseNotes/1_2_38.md b/Packs/AzureDataExplorer/ReleaseNotes/1_2_38.md
new file mode 100644
index 000000000000..fa723040d26c
--- /dev/null
+++ b/Packs/AzureDataExplorer/ReleaseNotes/1_2_38.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Data Explorer
+
+Added support for the OAuth consent dialog after the user signs in using the ***azure-data-explorer-generate-login-url*** command.
\ No newline at end of file
diff --git a/Packs/AzureDataExplorer/pack_metadata.json b/Packs/AzureDataExplorer/pack_metadata.json
index aad0db744413..3ef2399d7743 100644
--- a/Packs/AzureDataExplorer/pack_metadata.json
+++ b/Packs/AzureDataExplorer/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Data Explorer",
"description": "Use Azure Data Explorer integration to collect and analyze data inside clusters of Azure Data Explorer and manage search queries.",
"support": "xsoar",
- "currentVersion": "1.2.37",
+ "currentVersion": "1.2.38",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps_test.py b/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps_test.py
index be5dc03b6ccb..7b4259f89c07 100644
--- a/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps_test.py
+++ b/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps_test.py
@@ -822,7 +822,7 @@ def test_generate_login_url(mocker):
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code' \
'&scope=offline_access%20499b84ac-1321-427f-aa17-267ca6975798/user_impersonation%20offline_access' \
- f'&client_id={client_id}&redirect_uri={redirect_uri})'
+ f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
res = AzureDevOps.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureDevOps/ReleaseNotes/1_3_16.md b/Packs/AzureDevOps/ReleaseNotes/1_3_16.md
new file mode 100644
index 000000000000..e2a6c2c6149f
--- /dev/null
+++ b/Packs/AzureDevOps/ReleaseNotes/1_3_16.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### AzureDevOps
+
+Added support for the OAuth consent dialog after the user signs in using the ***azure-devops-generate-login-url*** command.
\ No newline at end of file
diff --git a/Packs/AzureDevOps/pack_metadata.json b/Packs/AzureDevOps/pack_metadata.json
index daa7d537ea93..842f387656cf 100644
--- a/Packs/AzureDevOps/pack_metadata.json
+++ b/Packs/AzureDevOps/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AzureDevOps",
"description": "Create and manage Git repositories in Azure DevOps Services.",
"support": "xsoar",
- "currentVersion": "1.3.15",
+ "currentVersion": "1.3.16",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureFirewall/ReleaseNotes/1_1_37.md b/Packs/AzureFirewall/ReleaseNotes/1_1_37.md
new file mode 100644
index 000000000000..61f666dcf71b
--- /dev/null
+++ b/Packs/AzureFirewall/ReleaseNotes/1_1_37.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Firewall
+
+Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureFirewall/pack_metadata.json b/Packs/AzureFirewall/pack_metadata.json
index 680885b187a4..e18ebe16838a 100644
--- a/Packs/AzureFirewall/pack_metadata.json
+++ b/Packs/AzureFirewall/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Firewall",
"description": "Azure Firewall is a cloud-native and intelligent network firewall security service that provides breed threat protection for cloud workloads running in Azure.It's a fully stateful, firewall as a service with built-in high availability and unrestricted cloud scalability.",
"support": "xsoar",
- "currentVersion": "1.1.36",
+ "currentVersion": "1.1.37",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureKeyVault/ReleaseNotes/1_1_41.md b/Packs/AzureKeyVault/ReleaseNotes/1_1_41.md
new file mode 100644
index 000000000000..5ba68f3a3b6b
--- /dev/null
+++ b/Packs/AzureKeyVault/ReleaseNotes/1_1_41.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Key Vault
+
+Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureKeyVault/pack_metadata.json b/Packs/AzureKeyVault/pack_metadata.json
index 1407524961ff..e790d9250444 100644
--- a/Packs/AzureKeyVault/pack_metadata.json
+++ b/Packs/AzureKeyVault/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Key Vault",
"description": "Use Key Vault to safeguard and manage cryptographic keys and secrets used by cloud applications and services.",
"support": "xsoar",
- "currentVersion": "1.1.40",
+ "currentVersion": "1.1.41",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices_test.py b/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices_test.py
index 1345bf4077d3..d21bb6845694 100644
--- a/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices_test.py
+++ b/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices_test.py
@@ -193,6 +193,6 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri})'
+ f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
res = AzureKubernetesServices.return_results.call_args[0][0].readable_output
assert expected_url in res, "Login URL is incorrect"
diff --git a/Packs/AzureKubernetesServices/ReleaseNotes/1_1_22.md b/Packs/AzureKubernetesServices/ReleaseNotes/1_1_22.md
new file mode 100644
index 000000000000..cf8c5ededee2
--- /dev/null
+++ b/Packs/AzureKubernetesServices/ReleaseNotes/1_1_22.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Kubernetes Services
+
+Added support for the OAuth consent dialog after the user signs in using the ***azure-ks-generate-login-url*** command.
diff --git a/Packs/AzureKubernetesServices/pack_metadata.json b/Packs/AzureKubernetesServices/pack_metadata.json
index ac502176aef8..4e0ba724ab8e 100644
--- a/Packs/AzureKubernetesServices/pack_metadata.json
+++ b/Packs/AzureKubernetesServices/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Kubernetes Services",
"description": "Deploy and manage containerized applications with a fully managed Kubernetes service.",
"support": "xsoar",
- "currentVersion": "1.1.21",
+ "currentVersion": "1.1.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_description.md b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_description.md
index a9d71488725c..eeff642a6a24 100644
--- a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_description.md
+++ b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_description.md
@@ -2,18 +2,19 @@ Log Analytics is a service that helps you collect and analyze data generated by
Full documentation for this integration is available in the [reference docs](https://xsoar.pan.dev/docs/reference/integrations/azure-log-analytics).
+There are two authentication methods available:
-## Authorize Cortex XSOAR for Azure Log Analytics
+ * [Cortex XSOAR Application](https://xsoar.pan.dev/docs/reference/articles/microsoft-integrations---authentication#cortex-xsoar-application)
+ * [Self-Deployed Application](https://xsoar.pan.dev/docs/reference/articles/microsoft-integrations---authentication#self-deployed-application)
-You need to grant Cortex XSOAR authorization to access Azure Log Analytics.
+Depending on the authentication method that you use, the integration parameters might change.
-1. Access the [authorization flow](https://oproxy.demisto.ninja/ms-azure-log-analytics).
-2. Click the **Start Authorization Process** button.
- You will be prompted to grant Cortex XSOAR permissions for your Azure Service Management.
-3. Click the **Accept** button.
- You will receive your ID, token, and key. You need to enter this information, when you configure the Azure Log Analytics integration instance in Cortex XSOAR.
+#### Cortex XSOAR Azure App
-## Authorize Cortex XSOAR for Azure Log Analytics - Self-Deployed Configuration
+To use the **Cortex XSOAR application** and allow Cortex XSOAR access to Azure Log Analytics, an administrator has to approve our app using an admin consent flow by clicking this **[link](https://oproxy.demisto.ninja/ms-azure-log-analytics)**.
+After authorizing the Cortex XSOAR app, you will get an ID, Token, and Key which should be inserted in the integration instance settings fields.
+
+#### Self-Deployed Azure App
To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal. To add the registration, go to the [Microsoft article](https://docs.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app).
### Required permissions
diff --git a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_test.py b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_test.py
index 3ba70e36f975..4749e216e742 100644
--- a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_test.py
+++ b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_test.py
@@ -327,7 +327,7 @@ def test_generate_login_url(mocker: MockerFixture) -> None:
f"[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?"
"response_type=code&scope=offline_access%20https://api.loganalytics.io/Data.Read"
"%20https://management.azure.com/user_impersonation"
- f"&client_id={client_id}&redirect_uri={redirect_uri})"
+ f"&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)"
)
res = AzureLogAnalytics.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/README.md b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/README.md
index 087fde07d942..f72675b9d7da 100644
--- a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/README.md
+++ b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/README.md
@@ -1,9 +1,10 @@
Log Analytics is a service that helps you collect and analyze data generated by resources in your cloud and on-premises environments.
This integration was integrated and tested with version 2022-10-01 of Azure Log Analytics.
-## Authorize Cortex XSOAR for Azure Log Analytics
+# Authorization
+In order to connect to the Azure Log Analytics use either the Cortex XSOAR Azure App or the Self-Deployed Azure App.
-You need to grant Cortex XSOAR authorization to access Azure Log Analytics.
+Depending on the authentication method that you use, the integration parameters might change.
**Note**: The Azure account must have permission to manage applications in Azure Active Directory (Azure AD). Any of the following Azure AD roles include the required permissions:
@@ -11,19 +12,22 @@ You need to grant Cortex XSOAR authorization to access Azure Log Analytics.
- Application developer
- Cloud application administrator
-In addition, the user needs to be assigned the **Log Analytics Reader** role.
-To add the role, see the [Microsoft article](https://docs.microsoft.com/en-us/azure/active-directory/develop/howto-add-app-roles-in-azure-ad-apps#assign-users-and-groups-to-roles).
+In addition, the user that granted the authorization needs to be assigned the [**Log Analytics Reader** role](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/manage-access?tabs=portal#log-analytics-reader).
+For the search job commands the user needs to be assigned the [**Log Analytics Contributor** role](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/manage-access?tabs=portal#log-analytics-contributor).
+
+ To add these roles:
+ 1. In the Azure portal, go to `Log Analytics workspace` and select the workspace you are using -> Access control (IAM).
+ 2. From Access control (IAM) select: Add role assignment
+ 3. Select the user that granted the authorization and assign the Roles.
-For the search job commands the user needs to be assigned the [**Log Analytics Contributor** role](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/manage-access?tabs=portal#log-analytics-contributor).
-To add the role, see the [Microsoft article](https://docs.microsoft.com/en-us/azure/active-directory/develop/howto-add-app-roles-in-azure-ad-apps#assign-users-and-groups-to-roles)
+For more information, refer to the following [Microsoft article](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/api/access-api#set-up-authentication).
-1. Access the [authorization flow](https://oproxy.demisto.ninja/ms-azure-log-analytics).
-2. Click **Start Authorization Process**.
- You will be prompted to grant Cortex XSOAR permissions for your Azure Service Management.
-3. Click **Accept**.
-You will receive your ID, token, and key. You need to enter these when you configure the Azure Log Analytics integration instance in Cortex XSOAR.
+## Cortex XSOAR Azure Application
+
+You need to grant Cortex XSOAR authorization to access Azure Log Analytics.
+For more information, refer to the following [article](https://xsoar.pan.dev/docs/reference/articles/microsoft-integrations---authentication#cortex-xsoar-application).
-## Authorize Cortex XSOAR for Azure Log Analytics (Self-Deployed Configuration)
+## Self Deployed Application
To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal. To add the registration, see the [Microsoft article](https://docs.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app).
@@ -42,7 +46,7 @@ In the self-deployed mode you can authenticate, by using one of the following fl
---
1. In the instance configuration, select the **Use a self-deployed Azure application - Authorization Code flow** checkbox.
-2. Enter your client ID in the **ID \ Client ID** parameter (credentials username).
+2. Enter your client ID in the **ID / Client ID** parameter (credentials username).
3. Enter your client secret in the **Key / Client Secret** parameter (credentials password).
4. Enter your tenant ID in the **Token** parameter.
5. Enter your redirect URI in the **Redirect URI** parameter.
@@ -134,16 +138,16 @@ Executes an Analytics query for data.
#### Human Readable Output
-## Query Results
+>## Query Results
-### PrimaryResult
+>### PrimaryResult
-|Tenant Id|Computer|Time Generated|Source System|Start Time|End Time|Resource Uri|Data Type|Solution|Batches Within Sla|Batches Outside Sla|Batches Capped|Total Batches|Avg Latency In Seconds|Quantity|Quantity Unit|Is Billable|Meter Id|Linked Meter Id|Type|
-|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
-| TENANT_ID | Deprecated field: see | 2020-07-30T04:00:00Z | OMS | 2020-07-30T03:00:00Z | 2020-07-30T04:00:00Z | /subscriptions/SUBSCRIPTION_ID/resourcegroups/RESOURCE_GROUP/providers/microsoft.operationalinsights/workspaces/WORKSPACE_NAME | Operation | LogManagement | 0 | 0 | 0 | 0 | 0 | 0.00714 | MBytes | false | METER_ID | 00000000-0000-0000-0000-000000000000 | Usage |
-| TENANT_ID | Deprecated field: see | 2020-07-30T04:00:00Z | OMS | 2020-07-30T03:00:00Z | 2020-07-30T04:00:00Z | /subscriptions/SUBSCRIPTION_ID/resourcegroups/RESOURCE_GROUP/providers/microsoft.operationalinsights/workspaces/WORKSPACE_NAME | SigninLogs | LogManagement | 0 | 0 | 0 | 0 | 0 | 0.012602 | MBytes | true | METER_ID | 00000000-0000-0000-0000-000000000000 | Usage |
-| TENANT_ID | Deprecated field: see | 2020-07-30T05:00:00Z | OMS | 2020-07-30T04:00:00Z | 2020-07-30T05:00:00Z | /subscriptions/SUBSCRIPTION_ID/resourcegroups/RESOURCE_GROUP/providers/microsoft.operationalinsights/workspaces/WORKSPACE_NAME | OfficeActivity | Office365/SecurityInsights | 0 | 0 | 0 | 0 | 0 | 0.00201499908978072 | MBytes | false | METER_ID | 00000000-0000-0000-0000-000000000000 | Usage |
-| TENANT_ID | Deprecated field: see | 2020-07-30T05:00:00Z | OMS | 2020-07-30T04:00:00Z | 2020-07-30T05:00:00Z | /subscriptions/SUBSCRIPTION_ID/resourcegroups/RESOURCE_GROUP/providers/microsoft.operationalinsights/workspaces/WORKSPACE_NAME | SigninLogs | LogManagement | 0 | 0 | 0 | 0 | 0 | 0.009107 | MBytes | true | METER_ID | 00000000-0000-0000-0000-000000000000 | Usage |
+>|Tenant Id|Computer|Time Generated|Source System|Start Time|End Time|Resource Uri|Data Type|Solution|Batches Within Sla|Batches Outside Sla|Batches Capped|Total Batches|Avg Latency In Seconds|Quantity|Quantity Unit|Is Billable|Meter Id|Linked Meter Id|Type|
+>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
+>| TENANT_ID | Deprecated field: see | 2020-07-30T04:00:00Z | OMS | 2020-07-30T03:00:00Z | 2020-07-30T04:00:00Z | /subscriptions/SUBSCRIPTION_ID/resourcegroups/RESOURCE_GROUP/providers/microsoft.operationalinsights/workspaces/WORKSPACE_NAME | Operation | LogManagement | 0 | 0 | 0 | 0 | 0 | 0.00714 | MBytes | false | METER_ID | 00000000-0000-0000-0000-000000000000 | Usage |
+>| TENANT_ID | Deprecated field: see | 2020-07-30T04:00:00Z | OMS | 2020-07-30T03:00:00Z | 2020-07-30T04:00:00Z | /subscriptions/SUBSCRIPTION_ID/resourcegroups/RESOURCE_GROUP/providers/microsoft.operationalinsights/workspaces/WORKSPACE_NAME | SigninLogs | LogManagement | 0 | 0 | 0 | 0 | 0 | 0.012602 | MBytes | true | METER_ID | 00000000-0000-0000-0000-000000000000 | Usage |
+>| TENANT_ID | Deprecated field: see | 2020-07-30T05:00:00Z | OMS | 2020-07-30T04:00:00Z | 2020-07-30T05:00:00Z | /subscriptions/SUBSCRIPTION_ID/resourcegroups/RESOURCE_GROUP/providers/microsoft.operationalinsights/workspaces/WORKSPACE_NAME | OfficeActivity | Office365/SecurityInsights | 0 | 0 | 0 | 0 | 0 | 0.00201499908978072 | MBytes | false | METER_ID | 00000000-0000-0000-0000-000000000000 | Usage |
+>| TENANT_ID | Deprecated field: see | 2020-07-30T05:00:00Z | OMS | 2020-07-30T04:00:00Z | 2020-07-30T05:00:00Z | /subscriptions/SUBSCRIPTION_ID/resourcegroups/RESOURCE_GROUP/providers/microsoft.operationalinsights/workspaces/WORKSPACE_NAME | SigninLogs | LogManagement | 0 | 0 | 0 | 0 | 0 | 0.009107 | MBytes | true | METER_ID | 00000000-0000-0000-0000-000000000000 | Usage |
### azure-log-analytics-test
@@ -169,7 +173,7 @@ There is no context output for this command.
#### Human Readable Output
-```✅ Success!```
+>```✅ Success!```
### azure-log-analytics-list-saved-searches
@@ -214,13 +218,13 @@ Gets the saved searches of the Log Analytics workspace.
#### Human Readable Output
-### Saved searches
+>### Saved searches
-|Etag|Id|Category|Display Name|Function Alias|Function Parameters|Query|Tags|Version|Type|
-|---|---|---|---|---|---|---|---|---|---|
-| W/"datetime'2020-07-05T13%3A38%3A41.053438Z'" | test2 | category1 | test2 | heartbeat_func | a:int=1 | Heartbeat \| summarize Count() by Computer \| take a | {'name': 'Group', 'value': 'Computer'} | 2 | Microsoft.OperationalInsights/savedSearches |
-| W/"datetime'2020-07-28T18%3A43%3A56.8625448Z'" | test123 | Saved Search Test Category | test123 | heartbeat_func | a:int=1 | Heartbeat \| summarize Count() by Computer \| take a | {'name': 'Group', 'value': 'Computer'} | 2 | Microsoft.OperationalInsights/savedSearches |
-| W/"datetime'2020-07-30T11%3A41%3A35.1459664Z'" | test1234 | test | test | | | SecurityAlert \| summarize arg_max(TimeGenerated, *) by SystemAlertId \| where SystemAlertId in("TEST_SYSTEM_ALERT_ID") | | 2 | Microsoft.OperationalInsights/savedSearches |
+>|Etag|Id|Category|Display Name|Function Alias|Function Parameters|Query|Tags|Version|Type|
+>|---|---|---|---|---|---|---|---|---|---|
+>| W/"datetime'2020-07-05T13%3A38%3A41.053438Z'" | test2 | category1 | test2 | heartbeat_func | a:int=1 | Heartbeat \| summarize Count() by Computer \| take a | {'name': 'Group', 'value': 'Computer'} | 2 | Microsoft.OperationalInsights/savedSearches |
+>| W/"datetime'2020-07-28T18%3A43%3A56.8625448Z'" | test123 | Saved Search Test Category | test123 | heartbeat_func | a:int=1 | Heartbeat \| summarize Count() by Computer \| take a | {'name': 'Group', 'value': 'Computer'} | 2 | Microsoft.OperationalInsights/savedSearches |
+>| W/"datetime'2020-07-30T11%3A41%3A35.1459664Z'" | test1234 | test | test | | | SecurityAlert \| summarize arg_max(TimeGenerated, *) by SystemAlertId \| where SystemAlertId in("TEST_SYSTEM_ALERT_ID") | | 2 | Microsoft.OperationalInsights/savedSearches |
### azure-log-analytics-get-saved-search-by-id
@@ -264,11 +268,11 @@ Gets a specified saved search from the Log Analytics workspace.
#### Human Readable Output
-### Saved search `test1234` properties
+>### Saved search `test1234` properties
-|Etag|Id|Category|Display Name|Query|Version|
-|---|---|---|---|---|---|
-| W/"datetime'2020-07-30T12%3A21%3A05.3197505Z'" | test1234 | test | test | SecurityAlert | summarize arg_max(TimeGenerated, *) by SystemAlertId | where SystemAlertId in("TEST_SYSTEM_ALERT_ID") | 2 |
+>|Etag|Id|Category|Display Name|Query|Version|
+>|---|---|---|---|---|---|
+>| W/"datetime'2020-07-30T12%3A21%3A05.3197505Z'" | test1234 | test | test | SecurityAlert | summarize arg_max(TimeGenerated, *) by SystemAlertId | where SystemAlertId in("TEST_SYSTEM_ALERT_ID") | 2 |
### azure-log-analytics-create-or-update-saved-search
@@ -322,11 +326,11 @@ Creates or updates a saved search from the Log Analytics workspace.
#### Human Readable Output
-### Saved search `test1234` properties
+>### Saved search `test1234` properties
-|Etag|Id|Category|Display Name|Query|Version|
-|---|---|---|---|---|---|
-| W/"datetime'2020-07-30T12%3A21%3A05.3197505Z'" | test1234 | test | new display name test | SecurityAlert | summarize arg_max(TimeGenerated, *) by SystemAlertId | where SystemAlertId in("TEST_SYSTEM_ALERT_ID") | 2 |
+>|Etag|Id|Category|Display Name|Query|Version|
+>|---|---|---|---|---|---|
+>| W/"datetime'2020-07-30T12%3A21%3A05.3197505Z'" | test1234 | test | new display name test | SecurityAlert | summarize arg_max(TimeGenerated, *) by SystemAlertId | where SystemAlertId in("TEST_SYSTEM_ALERT_ID") | 2 |
### azure-log-analytics-delete-saved-search
@@ -357,7 +361,7 @@ There is no context output for this command.
#### Human Readable Output
-Successfully deleted the saved search test1234.
+>Successfully deleted the saved search test1234.
### azure-log-analytics-generate-login-url
diff --git a/Packs/AzureLogAnalytics/ReleaseNotes/1_1_27.md b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_27.md
new file mode 100644
index 000000000000..f29316d3f34d
--- /dev/null
+++ b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_27.md
@@ -0,0 +1,5 @@
+
+#### Integrations
+
+##### Azure Log Analytics
+Documentation and metadata improvements.
\ No newline at end of file
diff --git a/Packs/AzureLogAnalytics/ReleaseNotes/1_1_28.md b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_28.md
new file mode 100644
index 000000000000..f3c13897bcbc
--- /dev/null
+++ b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_28.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Log Analytics
+
+Added support for the OAuth consent dialog after the user signs in using the ***azure-log-analytics-generate-login-url*** command.
\ No newline at end of file
diff --git a/Packs/AzureLogAnalytics/pack_metadata.json b/Packs/AzureLogAnalytics/pack_metadata.json
index 94ecfecdc837..9867b484abb3 100644
--- a/Packs/AzureLogAnalytics/pack_metadata.json
+++ b/Packs/AzureLogAnalytics/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Log Analytics",
"description": "Log Analytics is a service that helps you collect and analyze data generated by resources in your cloud and on-premises environments.",
"support": "xsoar",
- "currentVersion": "1.1.26",
+ "currentVersion": "1.1.28",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_test.py b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_test.py
index 99e3565eb76a..62aa4ee785d9 100644
--- a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_test.py
+++ b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_test.py
@@ -194,6 +194,6 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri})'
+ f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
res = AzureNetworkSecurityGroups.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_24.md b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_24.md
new file mode 100644
index 000000000000..4de929ef844b
--- /dev/null
+++ b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_24.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Network Security Groups
+
+Added support for the OAuth consent dialog after the user signs in using the ***azure-nsg-generate-login-url*** command.
\ No newline at end of file
diff --git a/Packs/AzureNetworkSecurityGroups/pack_metadata.json b/Packs/AzureNetworkSecurityGroups/pack_metadata.json
index 00807e845399..d5fe672dc533 100644
--- a/Packs/AzureNetworkSecurityGroups/pack_metadata.json
+++ b/Packs/AzureNetworkSecurityGroups/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Network Security Groups",
"description": "Azure Network Security Groups are used to filter network traffic to and from Azure resources in an Azure virtual network",
"support": "xsoar",
- "currentVersion": "1.2.23",
+ "currentVersion": "1.2.24",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureRiskyUsers/ReleaseNotes/1_1_32.md b/Packs/AzureRiskyUsers/ReleaseNotes/1_1_32.md
new file mode 100644
index 000000000000..f86c5225b969
--- /dev/null
+++ b/Packs/AzureRiskyUsers/ReleaseNotes/1_1_32.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Risky Users
+
+Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureRiskyUsers/pack_metadata.json b/Packs/AzureRiskyUsers/pack_metadata.json
index 649e4f9ed79e..95a6a8d95ed0 100644
--- a/Packs/AzureRiskyUsers/pack_metadata.json
+++ b/Packs/AzureRiskyUsers/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Risky Users",
"description": "Azure Risky Users provides access to all at-risk users and risk detections in Azure AD environment.",
"support": "xsoar",
- "currentVersion": "1.1.31",
+ "currentVersion": "1.1.32",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement_test.py b/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement_test.py
index 68d199d43b3e..b6d1c9f9ac50 100644
--- a/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement_test.py
+++ b/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement_test.py
@@ -303,7 +303,7 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri})'
+ f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
res = AzureSQLManagement.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureSQLManagement/ReleaseNotes/1_1_42.md b/Packs/AzureSQLManagement/ReleaseNotes/1_1_42.md
new file mode 100644
index 000000000000..6669a258185e
--- /dev/null
+++ b/Packs/AzureSQLManagement/ReleaseNotes/1_1_42.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure SQL Management
+
+Added support for the OAuth consent dialog after the user signs in using the ***azure-sql-generate-login-url*** command.
\ No newline at end of file
diff --git a/Packs/AzureSQLManagement/pack_metadata.json b/Packs/AzureSQLManagement/pack_metadata.json
index a958282cbd01..7a8749af1c9c 100644
--- a/Packs/AzureSQLManagement/pack_metadata.json
+++ b/Packs/AzureSQLManagement/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure SQL Management",
"description": "Microsoft Azure SQL Database is a managed cloud database provided as part of Microsoft Azure",
"support": "xsoar",
- "currentVersion": "1.1.41",
+ "currentVersion": "1.1.42",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureSecurityCenter/ReleaseNotes/2_0_22.md b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_22.md
new file mode 100644
index 000000000000..cdf2cb17ad10
--- /dev/null
+++ b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_22.md
@@ -0,0 +1,10 @@
+
+#### Integrations
+
+##### Microsoft Defender for Cloud Event Collector
+
+Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
+
+##### Microsoft Defender for Cloud
+
+Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
diff --git a/Packs/AzureSecurityCenter/pack_metadata.json b/Packs/AzureSecurityCenter/pack_metadata.json
index e273913052d0..33b3b91b42e0 100644
--- a/Packs/AzureSecurityCenter/pack_metadata.json
+++ b/Packs/AzureSecurityCenter/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Defender for Cloud",
"description": "Unified security management and advanced threat protection across hybrid cloud workloads.",
"support": "xsoar",
- "currentVersion": "2.0.21",
+ "currentVersion": "2.0.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureSentinel/ReleaseNotes/1_5_38.md b/Packs/AzureSentinel/ReleaseNotes/1_5_38.md
new file mode 100644
index 000000000000..178b6d77789f
--- /dev/null
+++ b/Packs/AzureSentinel/ReleaseNotes/1_5_38.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Sentinel
+
+Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureSentinel/pack_metadata.json b/Packs/AzureSentinel/pack_metadata.json
index 67990718e181..4e6629d38b87 100644
--- a/Packs/AzureSentinel/pack_metadata.json
+++ b/Packs/AzureSentinel/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Sentinel",
"description": "Microsoft Sentinel is a cloud-native security information and event manager (SIEM) platform that uses built-in AI to help analyze large volumes of data across an enterprise.",
"support": "xsoar",
- "currentVersion": "1.5.37",
+ "currentVersion": "1.5.38",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -17,4 +17,4 @@
"xsoar",
"marketplacev2"
]
-}
+}
\ No newline at end of file
diff --git a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_test.py b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_test.py
index e7f209e75f84..1e26a1ce6305 100644
--- a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_test.py
+++ b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_test.py
@@ -361,6 +361,6 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri})'
+ f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
res = AzureStorage.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureStorage/ReleaseNotes/1_2_22.md b/Packs/AzureStorage/ReleaseNotes/1_2_22.md
new file mode 100644
index 000000000000..e5b63160967f
--- /dev/null
+++ b/Packs/AzureStorage/ReleaseNotes/1_2_22.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Storage Management
+
+Added support for the OAuth consent dialog after the user signs in using the ***azure-storage-generate-login-url*** command.
diff --git a/Packs/AzureStorage/pack_metadata.json b/Packs/AzureStorage/pack_metadata.json
index f66afb41b02b..ac798986cdca 100644
--- a/Packs/AzureStorage/pack_metadata.json
+++ b/Packs/AzureStorage/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Storage Management",
"description": "Deploy and manage storage accounts and blob service properties.",
"support": "xsoar",
- "currentVersion": "1.2.21",
+ "currentVersion": "1.2.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureStorageFileShare/Integrations/AzureStorageFileShare/AzureStorageFileShare.yml b/Packs/AzureStorageFileShare/Integrations/AzureStorageFileShare/AzureStorageFileShare.yml
index 16928fe16255..9be6be389af6 100644
--- a/Packs/AzureStorageFileShare/Integrations/AzureStorageFileShare/AzureStorageFileShare.yml
+++ b/Packs/AzureStorageFileShare/Integrations/AzureStorageFileShare/AzureStorageFileShare.yml
@@ -210,7 +210,7 @@ script:
description: Delete file from Share.
execution: true
name: azure-storage-fileshare-file-delete
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AzureStorageFileShare/ReleaseNotes/1_0_26.md b/Packs/AzureStorageFileShare/ReleaseNotes/1_0_26.md
new file mode 100644
index 000000000000..f18a08b32ee7
--- /dev/null
+++ b/Packs/AzureStorageFileShare/ReleaseNotes/1_0_26.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Azure Storage FileShare
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/AzureStorageFileShare/pack_metadata.json b/Packs/AzureStorageFileShare/pack_metadata.json
index e61c2c9b8ab2..2633768ef6b8 100644
--- a/Packs/AzureStorageFileShare/pack_metadata.json
+++ b/Packs/AzureStorageFileShare/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Storage FileShare",
"description": "Create and Manage Azure FileShare Files and Directories.",
"support": "xsoar",
- "currentVersion": "1.0.25",
+ "currentVersion": "1.0.26",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF_test.py b/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF_test.py
index ed1e04c25a77..8312ca8c0871 100644
--- a/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF_test.py
+++ b/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF_test.py
@@ -379,7 +379,7 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20{Scopes.management_azure}' \
- f'&client_id={client_id}&redirect_uri={redirect_uri})'
+ f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
res = AzureWAF.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureWAF/ReleaseNotes/1_1_20.md b/Packs/AzureWAF/ReleaseNotes/1_1_20.md
new file mode 100644
index 000000000000..acc261e43bca
--- /dev/null
+++ b/Packs/AzureWAF/ReleaseNotes/1_1_20.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Web Application Firewall
+
+Added support for the OAuth consent dialog after the user signs in using the ***azure-waf-generate-login-url*** command.
diff --git a/Packs/AzureWAF/pack_metadata.json b/Packs/AzureWAF/pack_metadata.json
index 5034fdc75310..708cf11bf36b 100644
--- a/Packs/AzureWAF/pack_metadata.json
+++ b/Packs/AzureWAF/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure WAF",
"description": "Azure Web Application Firewall is used to detect web related attacks targeting your web servers hosted in azure and allow quick respond to threats",
"support": "xsoar",
- "currentVersion": "1.1.19",
+ "currentVersion": "1.1.20",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Base/ReleaseNotes/1_33_31.md b/Packs/Base/ReleaseNotes/1_33_31.md
new file mode 100644
index 000000000000..b7a6e3971e62
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_31.md
@@ -0,0 +1,9 @@
+
+#### Scripts
+
+##### DeleteIndicatorRelationships
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### DBotShowClusteringModelInfo
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/Base/ReleaseNotes/1_33_32.md b/Packs/Base/ReleaseNotes/1_33_32.md
new file mode 100644
index 000000000000..30a4e223365c
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_32.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+- Added support for long running integrations handle proxies.
diff --git a/Packs/Base/ReleaseNotes/1_33_33.md b/Packs/Base/ReleaseNotes/1_33_33.md
new file mode 100644
index 000000000000..f9cfc0c1bc55
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_33.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+- Added support for execution metrics in the *BaseClient* class.
diff --git a/Packs/Base/ReleaseNotes/1_33_34.md b/Packs/Base/ReleaseNotes/1_33_34.md
new file mode 100644
index 000000000000..a2279792e8d2
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_34.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### DrawRelatedIncidentsCanvas
+
+- Updated the Docker image to: *demisto/sklearn:1.0.0.86554*.
diff --git a/Packs/Base/ReleaseNotes/1_33_35.md b/Packs/Base/ReleaseNotes/1_33_35.md
new file mode 100644
index 000000000000..699fb5330fc2
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_35.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+- Fixed an issue where, in some cases, an AttributeError was raised during the destruction of a BaseClient object.
diff --git a/Packs/Base/ReleaseNotes/1_33_36.md b/Packs/Base/ReleaseNotes/1_33_36.md
new file mode 100644
index 000000000000..4b18832581ad
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_36.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+- Added ignore to sleep method to satisfy linters.
diff --git a/Packs/Base/ReleaseNotes/1_33_37.md b/Packs/Base/ReleaseNotes/1_33_37.md
new file mode 100644
index 000000000000..cdbd75c00ad1
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_37.md
@@ -0,0 +1,18 @@
+
+#### Scripts
+
+##### GetIncidentsByQuery
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
+- Moved the implementation to *GetIncidentsApiModule*.
+- The *includeContext* argument is now deprecated due to performance considerations. Rather than using this argument, it is recommended to retrieve the context of the incidents separately, preferably for a limited number of incidents.
+
+
+
+##### DBotFindSimilarIncidentsByIndicators
+- Updated the Docker image to: *demisto/ml:1.0.0.88591*.
+- Internal code enhancements for improved performance.
+
+##### DBotFindSimilarIncidents
+- Updated the Docker image to: *demisto/ml:1.0.0.88591*.
+- Internal code enhancements for improved performance.
+
diff --git a/Packs/Base/ReleaseNotes/1_33_38.md b/Packs/Base/ReleaseNotes/1_33_38.md
new file mode 100644
index 000000000000..85672c1be7fd
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_38.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+Added a utility function `comma_separated_mapping_to_dict` that gets a comma-separated mapping `key1=value1,key2=value2,...` and transforms it into a dictionary object.
diff --git a/Packs/Base/ReleaseNotes/1_33_39.md b/Packs/Base/ReleaseNotes/1_33_39.md
new file mode 100644
index 000000000000..8f44f477aee6
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_39.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+Improved implementation to prevent sensitive information from being logged.
diff --git a/Packs/Base/ReleaseNotes/1_33_40.md b/Packs/Base/ReleaseNotes/1_33_40.md
new file mode 100644
index 000000000000..74a03cd50c46
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_40.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### GetMLModelEvaluation
+
+- Updated the Docker image to: *demisto/ml:1.0.0.88591*.
diff --git a/Packs/Base/ReleaseNotes/1_33_41.md b/Packs/Base/ReleaseNotes/1_33_41.md
new file mode 100644
index 000000000000..5dd64608ec74
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_41.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+Improved implementation of *ConnectionError* handling.
\ No newline at end of file
diff --git a/Packs/Base/ReleaseNotes/1_33_42.md b/Packs/Base/ReleaseNotes/1_33_42.md
new file mode 100644
index 000000000000..676fbda8b2e0
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_33_42.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### SanePdfReports
+
+- Updated the Docker image to: *demisto/sane-pdf-reports:1.0.0.88753*.
diff --git a/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py b/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py
index e3642a550c23..5e7469f4a57c 100644
--- a/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py
+++ b/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py
@@ -49,8 +49,8 @@ def __line__():
ASSETS = "assets"
EVENTS = "events"
DATA_TYPES = [EVENTS, ASSETS]
-
-SECRET_REPLACEMENT_STRING = ''
+MASK = ''
+SEND_PREFIX = "send: b'"
def register_module_line(module_name, start_end, line, wrapper=0):
@@ -199,7 +199,7 @@ def __del__(self):
import requests
from requests.adapters import HTTPAdapter
from urllib3.util import Retry
- from typing import Optional, Dict, List, Any, Union, Set
+ from typing import Optional, Dict, List, Any, Union, Set, cast
from urllib3 import disable_warnings
disable_warnings()
@@ -512,6 +512,7 @@ class ErrorTypes(object):
PROXY_ERROR = 'ProxyError'
SSL_ERROR = 'SSLError'
TIMEOUT_ERROR = 'TimeoutError'
+ RETRY_ERROR = "RetryError"
class FeedIndicatorType(object):
@@ -826,6 +827,42 @@ def add_http_prefix_if_missing(address=''):
return 'http://' + address
+def handle_proxy_for_long_running(proxy_param_name='proxy', checkbox_default_value=False, handle_insecure=True,
+ insecure_param_name=None):
+ """
+ Handle logic for long running integration routing traffic through the system proxy.
+ Should usually be called at the beginning of the integration, depending on proxy checkbox state.
+ Long running integrations on hosted tenants XSOAR8 and XSIAM has a dedicated env. var.: CRTX_HTTP_PROXY.
+ Fallback call to handle_proxy in cases long running integration on engine or XSOAR6
+
+ :type proxy_param_name: ``string``
+ :param proxy_param_name: name of the "use system proxy" integration parameter
+
+ :type checkbox_default_value: ``bool``
+ :param checkbox_default_value: Default value of the proxy param checkbox
+
+ :type handle_insecure: ``bool``
+ :param handle_insecure: Whether to check the insecure param and unset env variables
+
+ :type insecure_param_name: ``string``
+ :param insecure_param_name: Name of insecure param. If None will search insecure and unsecure
+
+ :return: proxies dict for the 'proxies' parameter of 'requests' functions and use_ssl boolean
+ :rtype: ``Tuple[dict, boolean]``
+ """
+ proxies = {}
+ crtx_http_proxy = os.environ.get('CRTX_HTTP_PROXY', None)
+ if crtx_http_proxy:
+ demisto.error('Setting proxies according to CRTX_HTTP_PROXY: {}'.format(crtx_http_proxy))
+ proxies = {
+ 'http': crtx_http_proxy,
+ 'https': crtx_http_proxy
+ }
+ handle_insecure = True
+ return proxies, handle_insecure
+ return handle_proxy(proxy_param_name, checkbox_default_value, handle_insecure, insecure_param_name), handle_insecure
+
+
def handle_proxy(proxy_param_name='proxy', checkbox_default_value=False, handle_insecure=True,
insecure_param_name=None):
"""
@@ -1595,7 +1632,7 @@ def encode(self, message):
else:
res = "Failed encoding message with error: {}".format(exception)
for s in self.replace_strs:
- res = res.replace(s, SECRET_REPLACEMENT_STRING)
+ res = res.replace(s, MASK)
return res
def __call__(self, message):
@@ -1664,7 +1701,7 @@ def build_curl(self, text):
:rtype: ``None``
"""
http_methods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH']
- data = text.split("send: b'")[1]
+ data = text.split(SEND_PREFIX)[1]
if data and data[0] in {'{', '<'}:
# it is the request url query params/post body - will always come after we already have the url and headers
# `<` is for xml body
@@ -1674,7 +1711,6 @@ def build_curl(self, text):
url = ''
headers = []
headers_to_skip = ['Content-Length', 'User-Agent', 'Accept-Encoding', 'Connection']
- headers_to_sanitize = ['Authorization', 'Cookie']
request_parts = repr(data).split('\\\\r\\\\n') # splitting lines on repr since data is a bytes-string
for line, part in enumerate(request_parts):
if line == 0:
@@ -1686,9 +1722,6 @@ def build_curl(self, text):
else:
if any(header_to_skip in part for header_to_skip in headers_to_skip):
continue
- if any(header_to_sanitize in part for header_to_sanitize in headers_to_sanitize):
- headers.append(part.split(' ')[0] + " " + SECRET_REPLACEMENT_STRING)
- continue
headers.append(part)
curl_headers = ''
for header in headers:
@@ -1720,12 +1753,18 @@ def write(self, msg):
if self.buffering:
self.messages.append(text)
else:
+ if is_debug_mode():
+ if text.startswith(('send:', 'header:')):
+ try:
+ text = censor_request_logs(text)
+ except Exception as e: # should fail silently
+ demisto.debug('Failed censoring request logs - {}'.format(str(e)))
+ if text.startswith('send:'):
+ try:
+ self.build_curl(text)
+ except Exception as e: # should fail silently
+ demisto.debug('Failed generating curl - {}'.format(str(e)))
demisto.info(text)
- if is_debug_mode() and text.startswith('send:'):
- try:
- self.build_curl(text)
- except Exception as e: # should fail silently
- demisto.debug('Failed generating curl - {}'.format(str(e)))
self.write_buf = []
def print_override(self, *args, **kwargs):
@@ -7286,7 +7325,7 @@ class ExecutionMetrics(object):
"""
def __init__(self, success=0, quota_error=0, general_error=0, auth_error=0, service_error=0, connection_error=0,
- proxy_error=0, ssl_error=0, timeout_error=0):
+ proxy_error=0, ssl_error=0, timeout_error=0, retry_error=0):
self._metrics = []
self.metrics = None
self.success = success
@@ -7298,6 +7337,7 @@ def __init__(self, success=0, quota_error=0, general_error=0, auth_error=0, serv
self.proxy_error = proxy_error
self.ssl_error = ssl_error
self.timeout_error = timeout_error
+ self.retry_error = retry_error
"""
Initializes an ExecutionMetrics object. Once initialized, you may increment each metric type according to the
metric you'd like to report. Afterwards, pass the `metrics` value to CommandResults.
@@ -7329,6 +7369,9 @@ def __init__(self, success=0, quota_error=0, general_error=0, auth_error=0, serv
:type timeout_error: ``int``
:param timeout_error: Quantity of Timeout Error metrics
+ :type retry_error: ``int``
+ :param retry_error: Quantity of Retry Error metrics
+
:type metrics: ``CommandResults``
:param metrics: Append this value to your CommandResults list to report the metrics to your server.
"""
@@ -7420,6 +7463,15 @@ def timeout_error(self, value):
self._timeout_error = value
self.update_metrics(ErrorTypes.TIMEOUT_ERROR, self._timeout_error)
+ @property
+ def retry_error(self):
+ return self._retry_error
+
+ @retry_error.setter
+ def retry_error(self, value):
+ self._retry_error = value
+ self.update_metrics(ErrorTypes.RETRY_ERROR, self._retry_error)
+
def get_metric_list(self):
return self._metrics
@@ -8337,6 +8389,41 @@ def emit(self, record):
pass
+def censor_request_logs(request_log):
+ """
+ Censors the request logs generated from the urllib library directly by replacing sensitive information such as tokens and cookies with a mask.
+ In most cases, the sensitive value is the first word after the keyword, but in some cases, it is the second one.
+ :param request_log: The request log to censor
+ :type request_log: ``str``
+
+ :return: The censored request log
+ :rtype: ``str``
+ """
+ keywords_to_censor = ['Authorization:', 'Cookie', "Token"]
+ lower_keywords_to_censor = [word.lower() for word in keywords_to_censor]
+
+ trimed_request_log = request_log.lstrip(SEND_PREFIX)
+ request_log_with_spaces = trimed_request_log.replace("\\r\\n", " \\r\\n")
+ request_log_lst = request_log_with_spaces.split()
+
+ for i, word in enumerate(request_log_lst):
+ # Check if the word is a keyword or contains a keyword (e.g "Cookies" containes "Cookie")
+ if any(keyword in word.lower() for keyword in lower_keywords_to_censor):
+ next_word = request_log_lst[i + 1] if i + 1 < len(request_log_lst) else None
+ if next_word:
+ # If the next word is "Bearer" or "Basic" then we replace the word after it since thats the token
+ if next_word.lower() in ["bearer", "basic"] and i + 2 < len(request_log_lst):
+ request_log_lst[i + 2] = MASK
+ else:
+ request_log_lst[i + 1] = MASK
+
+ # Rebuild the request log so that the only change is the masked information.
+ censored_string = SEND_PREFIX + \
+ ' '.join(request_log_lst) if request_log.startswith(SEND_PREFIX) else ' '.join(request_log_lst)
+ censored_string = censored_string.replace(" \\r\\n", "\\r\\n")
+ return censored_string
+
+
class DebugLogger(object):
"""
Wrapper to initiate logging at logging.DEBUG level.
@@ -8736,7 +8823,10 @@ def __init__(
system_timeout = os.getenv('REQUESTS_TIMEOUT', '')
self.timeout = float(entity_timeout or system_timeout or timeout)
+ self.execution_metrics = ExecutionMetrics()
+
def __del__(self):
+ self._return_execution_metrics_results()
try:
self._session.close()
except AttributeError:
@@ -8826,7 +8916,8 @@ def _http_request(self, method, url_suffix='', full_url=None, headers=None, auth
params=None, data=None, files=None, timeout=None, resp_type='json', ok_codes=None,
return_empty_response=False, retries=0, status_list_to_retry=None,
backoff_factor=5, raise_on_redirect=False, raise_on_status=False,
- error_handler=None, empty_valid_codes=None, params_parser=None, **kwargs):
+ error_handler=None, empty_valid_codes=None, params_parser=None, with_metrics=False,
+ **kwargs):
"""A wrapper for requests lib to send our requests and handle requests and responses better.
:type method: ``str``
@@ -8924,6 +9015,9 @@ def _http_request(self, method, url_suffix='', full_url=None, headers=None, auth
see here for more info: https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode
Note! supported only in python3.
+ :type with_metrics ``bool``
+ :param with_metrics: Whether or not to calculate execution metrics from the response
+
:return: Depends on the resp_type parameter
:rtype: ``dict`` or ``str`` or ``bytes`` or ``xml.etree.ElementTree.Element`` or ``requests.Response``
"""
@@ -8953,40 +9047,20 @@ def _http_request(self, method, url_suffix='', full_url=None, headers=None, auth
timeout=timeout,
**kwargs
)
- # Handle error responses gracefully
if not self._is_status_code_valid(res, ok_codes):
- if error_handler:
- error_handler(res)
- else:
- self.client_error_handler(res)
+ self._handle_error(error_handler, res, with_metrics)
- if not empty_valid_codes:
- empty_valid_codes = [204]
- is_response_empty_and_successful = (res.status_code in empty_valid_codes)
- if is_response_empty_and_successful and return_empty_response:
- return res
+ return self._handle_success(res, resp_type, empty_valid_codes, return_empty_response, with_metrics)
- resp_type = resp_type.lower()
- try:
- if resp_type == 'json':
- return res.json()
- if resp_type == 'text':
- return res.text
- if resp_type == 'content':
- return res.content
- if resp_type == 'xml':
- ET.fromstring(res.text)
- if resp_type == 'response':
- return res
- return res
- except ValueError as exception:
- raise DemistoException('Failed to parse {} object from response: {}' # type: ignore[str-bytes-safe]
- .format(resp_type, res.content), exception, res)
except requests.exceptions.ConnectTimeout as exception:
+ if with_metrics:
+ self.execution_metrics.timeout_error += 1
err_msg = 'Connection Timeout Error - potential reasons might be that the Server URL parameter' \
' is incorrect or that the Server is not accessible from your host.'
raise DemistoException(err_msg, exception)
except requests.exceptions.SSLError as exception:
+ if with_metrics:
+ self.execution_metrics.ssl_error += 1
# in case the "Trust any certificate" is already checked
if not self._verify:
raise
@@ -8994,19 +9068,30 @@ def _http_request(self, method, url_suffix='', full_url=None, headers=None, auth
' the integration configuration.'
raise DemistoException(err_msg, exception)
except requests.exceptions.ProxyError as exception:
+ if with_metrics:
+ self.execution_metrics.proxy_error += 1
err_msg = 'Proxy Error - if the \'Use system proxy\' checkbox in the integration configuration is' \
' selected, try clearing the checkbox.'
raise DemistoException(err_msg, exception)
except requests.exceptions.ConnectionError as exception:
+ if with_metrics:
+ self.execution_metrics.connection_error += 1
# Get originating Exception in Exception chain
error_class = str(exception.__class__)
err_type = '<' + error_class[error_class.find('\'') + 1: error_class.rfind('\'')] + '>'
+
err_msg = 'Verify that the server URL parameter' \
- ' is correct and that you have access to the server from your host.' \
- '\nError Type: {}\nError Number: [{}]\nMessage: {}\n' \
- .format(err_type, exception.errno, exception.strerror)
+ ' is correct and that you have access to the server from your host.' \
+ '\nError Type: {}'.format(err_type)
+ if exception.errno and exception.strerror:
+ err_msg += '\nError Number: [{}]\nMessage: {}\n'.format(exception.errno, exception.strerror)
+ else:
+ err_msg += '\n{}'.format(str(exception))
raise DemistoException(err_msg, exception)
+
except requests.exceptions.RetryError as exception:
+ if with_metrics:
+ self.execution_metrics.retry_error += 1
try:
reason = 'Reason: {}'.format(exception.args[0].reason.args[0])
except Exception: # noqa: disable=broad-except
@@ -9014,6 +9099,136 @@ def _http_request(self, method, url_suffix='', full_url=None, headers=None, auth
err_msg = 'Max Retries Error- Request attempts with {} retries failed. \n{}'.format(retries, reason)
raise DemistoException(err_msg, exception)
+ def _handle_error(self, error_handler, res, should_update_metrics):
+ """ Handles error response by calling error handler or default handler.
+ If an exception is raised, update metrics with failure. Otherwise, proceeds.
+
+ :type res: ``requests.Response``
+ :param res: Response from API after the request for which to check error type
+
+ :type error_handler ``callable``
+ :param error_handler: Given an error entry, the error handler outputs the
+ new formatted error message.
+
+ :type should_update_metrics ``bool``
+ :param should_update_metrics: Whether or not to update execution metrics according to response
+ """
+ try:
+ if error_handler:
+ error_handler(res)
+ else:
+ self.client_error_handler(res)
+ except Exception:
+ if should_update_metrics:
+ self._update_metrics(res, success=False)
+ raise
+
+ def _handle_success(self, res, resp_type, empty_valid_codes, return_empty_response, should_update_metrics):
+ """ Handles successful response
+
+ :type res: ``requests.Response``
+ :param res: Response from API after the request for which to check error type
+
+ :type resp_type: ``str``
+ :param resp_type:
+ Determines which data format to return from the HTTP request. The default
+ is 'json'. Other options are 'text', 'content', 'xml' or 'response'. Use 'response'
+ to return the full response object.
+
+ :type empty_valid_codes: ``list``
+ :param empty_valid_codes: A list of all valid status codes of empty responses (usually only 204, but
+ can vary)
+
+ :type return_empty_response: ``bool``
+ :param response: Whether to return an empty response body if the response code is in empty_valid_codes
+
+ :type should_update_metrics ``bool``
+ :param should_update_metrics: Whether or not to update execution metrics according to response
+ """
+ if should_update_metrics:
+ self._update_metrics(res, success=True)
+
+ if not empty_valid_codes:
+ empty_valid_codes = [204]
+ is_response_empty_and_successful = (res.status_code in empty_valid_codes)
+ if is_response_empty_and_successful and return_empty_response:
+ return res
+
+ return self.cast_response(res, resp_type)
+
+ def cast_response(self, res, resp_type, raise_on_error=True):
+ resp_type = resp_type.lower()
+ try:
+ if resp_type == 'json':
+ return res.json()
+ if resp_type == 'text':
+ return res.text
+ if resp_type == 'content':
+ return res.content
+ if resp_type == 'xml':
+ ET.fromstring(res.text)
+ if resp_type == 'response':
+ return res
+ return res
+ except ValueError as exception:
+ if raise_on_error:
+ raise DemistoException('Failed to parse {} object from response: {}' # type: ignore[str-bytes-safe]
+ .format(resp_type, res.content), exception, res)
+
+ def _update_metrics(self, res, success):
+ """ Updates execution metrics based on response and success flag.
+
+ :type response: ``requests.Response``
+ :param response: Response from API after the request for which to check error type
+
+ :type success: ``bool``
+ :param success: Wheter the request succeeded or failed
+ """
+ if success:
+ if not self.is_polling_in_progress(res):
+ self.execution_metrics.success += 1
+ else:
+ error_type = self.determine_error_type(res)
+ if error_type == ErrorTypes.QUOTA_ERROR:
+ self.execution_metrics.quota_error += 1
+ elif error_type == ErrorTypes.AUTH_ERROR:
+ self.execution_metrics.auth_error += 1
+ elif error_type == ErrorTypes.SERVICE_ERROR:
+ self.execution_metrics.service_error += 1
+ elif error_type == ErrorTypes.GENERAL_ERROR:
+ self.execution_metrics.general_error += 1
+
+ def determine_error_type(self, response):
+ """ Determines the type of error based on response status code and content.
+ Note: this method can be overriden by subclass when implementing execution metrics.
+
+ :type response: ``requests.Response``
+ :param response: Response from API after the request for which to check error type
+
+ :return: The error type if found, otherwise None
+ :rtype: ``ErrorTypes``
+ """
+ if response.status_code == 401:
+ return ErrorTypes.AUTH_ERROR
+ elif response.status_code == 429:
+ return ErrorTypes.QUOTA_ERROR
+ elif response.status_code == 500:
+ return ErrorTypes.SERVICE_ERROR
+ return ErrorTypes.GENERAL_ERROR
+
+ def is_polling_in_progress(self, response):
+ """If thie response indicates polling operation in progress, return True.
+ Note: this method should be overriden by subclass when implementing polling reputation commands
+ with execution metrics.
+
+ :type response: ``requests.Response``
+ :param response: Response from API after the request for which to check the polling status
+
+ :return: Whether the response indicates polling in progress
+ :rtype: ``bool``
+ """
+ return False
+
def _is_status_code_valid(self, response, ok_codes=None):
"""If the status code is OK, return 'True'.
@@ -9051,6 +9266,16 @@ def client_error_handler(self, res):
err_msg += '\n{}'.format(res.text)
raise DemistoException(err_msg, res=res)
+ def _return_execution_metrics_results(self):
+ """ Returns execution metrics results.
+ Might raise an AttributeError exception if execution_metrics is not initialized.
+ """
+ try:
+ if self.execution_metrics.metrics:
+ return_results(cast(CommandResults, self.execution_metrics.metrics))
+ except AttributeError:
+ pass
+
def batch(iterable, batch_size=1):
"""Gets an iterable and yields slices of it.
@@ -9265,7 +9490,7 @@ def set_to_integration_context_with_retries(context, object_keys=None, sync=True
''.format(version, str(ve), CONTEXT_UPDATE_RETRY_TIMES - attempt))
# Sleep for a random time
time_to_sleep = randint(1, 100) / 1000
- time.sleep(time_to_sleep)
+ time.sleep(time_to_sleep) # pylint: disable=E9003
def get_integration_context_with_version(sync=True):
@@ -11568,6 +11793,47 @@ def data_error_handler(res):
demisto.updateModuleHealth({'{data_type}Pulled'.format(data_type=data_type): data_size})
+def comma_separated_mapping_to_dict(raw_text):
+ """
+ Transforming a textual comma-separated mapping into a dictionary object.
+
+ :type raw_text: ``str``
+ :param raw_text: Comma-separated mapping e.g ('key1=value1', 'key2=value2', ...)
+
+ :rtype: ``dict``
+ :return: Validated dictionary of the raw mapping e.g {'key1': 'value1', 'key2': 'value2', ...}
+ """
+ demisto.debug("comma_separated_mapping_to_dict "
+ ">> Resolving comma-separated input mapping: {raw_text}".format(raw_text=raw_text))
+
+ mapping_dict = {} # type: Dict[str, str]
+ # If a proper mapping was not provided, return an empty dict.
+ if not raw_text:
+ return mapping_dict
+
+ key_value_pairs = raw_text.split(',')
+
+ for pair in key_value_pairs:
+ # Trimming trailing whitespace
+ pair = pair.strip()
+
+ try:
+ key, value = pair.split('=')
+ except ValueError:
+ demisto.error("Error: Invalid mapping was provided. "
+ "Expected comma-separated mapping of format `key1=value1, key2=value2, ...`")
+ key = value = ''
+
+ if key in mapping_dict:
+ demisto.debug(
+ "comma_separated_mapping_to_dict "
+ "Warning: duplicate key provided for {key}: using latter value: {value}".format(key=key, value=value)
+ )
+ mapping_dict[key] = value
+ demisto.debug("comma_separated_mapping_to_dict << Resolved mapping: {mapping_dict}".format(mapping_dict=mapping_dict))
+ return mapping_dict
+
+
###########################################
# DO NOT ADD LINES AFTER THIS ONE #
###########################################
diff --git a/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py b/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
index 74de2379560d..bdbd7fc50be0 100644
--- a/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
+++ b/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
@@ -28,7 +28,7 @@
url_to_clickable_markdown, WarningsHandler, DemistoException, SmartGetDict, JsonTransformer, \
remove_duplicates_from_list_arg, DBotScoreType, DBotScoreReliability, Common, send_events_to_xsiam, ExecutionMetrics, \
response_to_context, is_integration_command_execution, is_xsiam_or_xsoar_saas, is_xsoar, is_xsoar_on_prem, \
- is_xsoar_hosted, is_xsoar_saas, is_xsiam, send_data_to_xsiam
+ is_xsoar_hosted, is_xsoar_saas, is_xsiam, send_data_to_xsiam, censor_request_logs, censor_request_logs
EVENTS_LOG_ERROR = \
"""Error sending new events into XSIAM.
@@ -1452,7 +1452,7 @@ def test_build_curl_post_noproxy():
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"data\": \"value\"}'")
assert ilog.curl == [
- 'curl -X POST https://demisto.com/api -H "Authorization: " -H "Content-Type: application/json" '
+ 'curl -X POST https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"data": "value"}\''
]
@@ -1479,7 +1479,7 @@ def test_build_curl_post_xml():
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b''")
assert ilog.curl == [
- 'curl -X POST https://demisto.com/api -H "Authorization: " -H "Content-Type: application/json" '
+ 'curl -X POST https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'\''
]
@@ -1511,7 +1511,7 @@ def test_build_curl_get_withproxy(mocker):
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"data\": \"value\"}'")
assert ilog.curl == [
- 'curl -X GET https://demisto.com/api -H "Authorization: " -H "Content-Type: application/json" '
+ 'curl -X GET https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--proxy http://proxy -k -d \'{"data": "value"}\''
]
@@ -1548,9 +1548,9 @@ def test_build_curl_multiple_queries():
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"getdata\": \"value\"}'")
assert ilog.curl == [
- 'curl -X POST https://demisto.com/api/post -H "Authorization: " -H "Content-Type: application/json" '
+ 'curl -X POST https://demisto.com/api/post -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"postdata": "value"}\'',
- 'curl -X GET https://demisto.com/api/get -H "Authorization: " -H "Content-Type: application/json" '
+ 'curl -X GET https://demisto.com/api/get -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"getdata": "value"}\''
]
@@ -2974,10 +2974,19 @@ def test_http_request_proxy_error(self, requests_mock):
with raises(DemistoException, match="Proxy Error"):
self.client._http_request('get', 'event', resp_type='response')
- def test_http_request_connection_error(self, requests_mock):
+ def test_http_request_connection_error_with_errno(self, requests_mock):
from CommonServerPython import DemistoException
- requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ConnectionError)
- with raises(DemistoException, match="Verify that the server URL parameter"):
+ err = requests.exceptions.ConnectionError()
+ err.errno = 104
+ err.strerror = "Connection reset by peer test"
+ requests_mock.get('http://example.com/api/v2/event', exc=err)
+ with raises(DemistoException, match="Error Number: \[104\]\\nMessage: Connection reset by peer test"):
+ self.client._http_request('get', 'event', resp_type='response')
+
+ def test_http_request_connection_error_without_errno(self, requests_mock):
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ConnectionError("Generic error"))
+ with raises(DemistoException, match="Generic error"):
self.client._http_request('get', 'event', resp_type='response')
def test_text_exception_parsing(self, requests_mock):
@@ -3134,6 +3143,242 @@ def test_http_request_params_parser_none(self, requests_mock):
assert mock_request.last_request.query == 'key=value+with+spaces'
+ def test_http_request_execution_metrics_success(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A successful response.
+ Then: Verify the successful execution metrics is incremented.
+ """
+ requests_mock.get('http://example.com/api/v2/event', text="success")
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ client._http_request('get', 'event', resp_type='response', with_metrics=True)
+ assert client.execution_metrics.success == 1
+
+ def test_http_request_execution_metrics_success_but_polling_in_progress(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A successful response.
+ - Response is determined as polling in progress.
+ Then: Verify the successful execution metrics is not incremented.
+ """
+ requests_mock.get('http://example.com/api/v2/event', text="success")
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ client.is_polling_in_progress = lambda _: True
+ client._http_request('get', 'event', resp_type='response', with_metrics=True)
+ assert client.execution_metrics.success == 0
+
+ def test_http_request_execution_metrics_timeout(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A timeout error is returned.
+ Then: Verify the timeout error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ConnectTimeout)
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException):
+ client._http_request('get', 'event', resp_type='response', with_metrics=True)
+ assert client.execution_metrics.timeout_error == 1
+
+ def test_http_request_execution_metrics_ssl_error(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - An SSL error is returned.
+ Then: Verify the ssl error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.SSLError)
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201))
+ with raises(DemistoException):
+ client._http_request('get', 'event', resp_type='response', with_metrics=True)
+ assert client.execution_metrics.ssl_error == 1
+
+ def test_http_request_execution_metrics_proxy_error(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A proxy error is returned.
+ Then: Verify the proxy error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ProxyError)
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException):
+ client._http_request('get', 'event', resp_type='response', with_metrics=True)
+ assert client.execution_metrics.proxy_error == 1
+
+ def test_http_request_execution_metrics_connection_error(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A connection error is returned.
+ Then: Verify the connection error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ConnectionError)
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException):
+ client._http_request('get', 'event', resp_type='response', with_metrics=True)
+ assert client.execution_metrics.connection_error == 1
+
+ def test_http_request_execution_metrics_retry_error(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A retry error is returned.
+ Then: Verify the retry error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.RetryError)
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException):
+ client._http_request('get', 'event', resp_type='response', with_metrics=True)
+ assert client.execution_metrics.retry_error == 1
+
+ def test_http_request_execution_metrics_auth_error(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - An auth error (401 status code) is returned.
+ Then: Verify the auth error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', status_code=401, text="err")
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException, match="Error in API call"):
+ client._http_request('get', 'event', with_metrics=True)
+ assert client.execution_metrics.auth_error == 1
+
+ def test_http_request_execution_metrics_quota_error(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A quota error (429 status code) is returned.
+ Then: Verify the quota error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', status_code=429, text="err")
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException, match="Error in API call"):
+ client._http_request('get', 'event', with_metrics=True)
+ assert client.execution_metrics.quota_error == 1
+
+ def test_http_request_execution_metrics_service_error(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A service error (500 status code) is returned.
+ Then: Verify the service error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', status_code=500, text="err")
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException, match="Error in API call"):
+ client._http_request('get', 'event', with_metrics=True)
+ assert client.execution_metrics.service_error == 1
+
+ def test_http_request_execution_metrics_general_error(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A general error (400 status code) is returned.
+ Then: Verify the general error execution metrics is incremented.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', status_code=400, text="err")
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException, match="Error in API call"):
+ client._http_request('get', 'event', with_metrics=True)
+ assert client.execution_metrics.general_error == 1
+
+ def test_http_request_execution_metrics_not_found_error_but_ok(cls, requests_mock):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - A not found error (404 status code) is returned.
+ - 404 is considered ok
+ Then: Verify the success execution metrics is incremented, and not the general error metrics.
+ """
+ requests_mock.get('http://example.com/api/v2/event', status_code=404, text="err")
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201, 404), verify=False)
+ res = client._http_request('get', 'event', resp_type='response', with_metrics=True)
+ assert res.status_code == 404
+ assert client.execution_metrics.success == 1
+ assert client.execution_metrics.general_error == 0
+
+ def test_http_request_execution_metrics_results(cls, requests_mock, mocker):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function with metrics
+ - An general error is returned
+ - The client object is then deleted
+ Then: Verify an execution metrics entry is sent to demisto.results() accordingly.
+ """
+ from CommonServerPython import DemistoException, EntryType, ErrorTypes
+ requests_mock.get('http://example.com/api/v2/event', status_code=400, text="err")
+ demisto_results_mock = mocker.patch.object(demisto, 'results')
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException, match="Error in API call"):
+ client._http_request('get', 'event', with_metrics=True)
+ del client
+ demisto_results_mock.assert_called_once
+ entry = demisto_results_mock.call_args[0][0]
+ assert entry["Type"] == EntryType.EXECUTION_METRICS
+ assert entry["APIExecutionMetrics"] == [{
+ "Type": ErrorTypes.GENERAL_ERROR,
+ "APICallsCount": 1,
+ }]
+
+ def test_http_request_no_execution_metrics_results(cls, requests_mock, mocker):
+ """
+ Given: A BaseClient object
+ When:
+ - Calling _http_request function without metrics
+ - An general error is returned
+ - The client object is then deleted
+ Then: Verify demisto.results() is not called.
+ """
+ from CommonServerPython import DemistoException
+ requests_mock.get('http://example.com/api/v2/event', status_code=400, text="err")
+ demisto_results_mock = mocker.patch.object(demisto, 'results')
+ client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
+ with raises(DemistoException, match="Error in API call"):
+ client._http_request('get', 'event')
+ del client
+ demisto_results_mock.assert_not_called
+
+ def test_base_client_subclass_without_execution_metrics_initialized(self):
+ """
+ Given: A BaseClient object and a subclass of it that does not initialize execution_metrics
+ When: deleting the client object
+ Then: Ensure the deletion does not raise any exception
+ """
+ from CommonServerPython import BaseClient
+
+ class Client(BaseClient):
+ def __init__(self):
+ pass
+
+ client = Client()
+ del client
+
@pytest.mark.skipif(not IS_PY3, reason='test not supported in py2')
def test_http_request_params_parser_quote(self, requests_mock):
"""
@@ -9318,3 +9563,68 @@ def test_create_clickable_test_wrong_text_value():
assert e.type == AssertionError
assert 'The URL list and the text list must be the same length.' in e.value.args
+
+
+@pytest.mark.parametrize("request_log, expected_output", [
+ (
+ "send: b'GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\nmy_authorization: Bearer token123\\r\\n'",
+ "send: b'GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\nmy_authorization: Bearer \\r\\n'"
+ ),
+ (
+ "send: b'GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\nSet_Cookie: session_id=123\\r\\n'",
+ "send: b'GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\nSet_Cookie: \\r\\n'"
+ ),
+ (
+ "send: b'GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\nAuthorization: token123\\r\\n'",
+ "send: b'GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\nAuthorization: \\r\\n'"
+ ),
+ (
+ "GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\nAuthorization: Bearer token123\\r\\n",
+ "GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\nAuthorization: Bearer \\r\\n"
+ ),
+ (
+ "send: b'GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\n'",
+ str("send: b'GET /api/v1/users HTTP/1.1\\r\\nHost: example.com\\r\\n'")
+ ),
+])
+def test_censor_request_logs(request_log, expected_output):
+ """
+ Given:
+ A request log.
+ case 1: A request log with a sensitive data under the 'Authorization' header, but the 'Authorization' is not capitalized and within a string.
+ case 2: A request log with a sensitive data under the 'Cookie' header, but with a 'Set_Cookie' prefix.
+ case 3: A request log with a sensitive data under the 'Authorization' header, but with no 'Bearer' prefix.
+ case 4: A request log with a sensitive data under the 'Authorization' header, but with no 'send b' prefix at the beginning.
+ case 5: A request log with no sensitive data.
+ When:
+ Running censor_request_logs function.
+ Then:
+ Assert the function returns the exactly same log with the sensitive data masked.
+ """
+ assert censor_request_logs(request_log) == expected_output
+
+
+@pytest.mark.parametrize("request_log", [
+ ('send: hello\n'),
+ ('header: Authorization\n')
+])
+def test_logger_write__censor_request_logs_has_been_called(mocker, request_log):
+ """
+ Given:
+ A request log that starts with 'send' or 'header' that may contains sensitive data.
+ When:
+ Running logger.write function when using debug-mode.
+ Then:
+ Assert the censor_request_logs function has been called.
+ """
+ mocker.patch.object(demisto, 'params', return_value={
+ 'credentials': {'password': 'my_password'},
+ })
+ mocker.patch.object(demisto, 'info')
+ mocker.patch('CommonServerPython.is_debug_mode', return_value=True)
+ mock_censor = mocker.patch('CommonServerPython.censor_request_logs')
+ mocker.patch('CommonServerPython.IntegrationLogger.build_curl')
+ ilog = IntegrationLogger()
+ ilog.set_buffering(False)
+ ilog.write(request_log)
+ assert mock_censor.call_count == 1
diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.py b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.py
index c198b2208fb9..c7eded235f00 100644
--- a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.py
+++ b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.py
@@ -4,6 +4,7 @@
import warnings
import numpy as np
import re
+from copy import deepcopy
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.base import BaseEstimator, TransformerMixin
import json
@@ -11,6 +12,8 @@
from scipy.spatial.distance import cdist
from typing import Any
+from GetIncidentsApiModule import * # noqa: E402
+
warnings.simplefilter("ignore")
warnings.filterwarnings('ignore', category=UserWarning)
@@ -29,7 +32,7 @@
MESSAGE_NO_CURRENT_INCIDENT = f"- {INCIDENT_ALIAS.capitalize()} %s does not exist within the given time range. " \
f"Please check incidentId value or that you are running the command within an {INCIDENT_ALIAS}."
MESSAGE_NO_FIELD = f"- %s field(s) does not exist in the current {INCIDENT_ALIAS}."
-MESSAGE_INCORRECT_FIELD = "- %s field(s) don't/doesn't exist within the fetched {INCIDENT_ALIAS}s."
+MESSAGE_INCORRECT_FIELD = f"- %s field(s) don't/doesn't exist within the fetched {INCIDENT_ALIAS}s."
SIMILARITY_COLUNM_NAME = f'similarity {INCIDENT_ALIAS}'
SIMILARITY_COLUNM_NAME_INDICATOR = 'similarity indicators'
@@ -72,15 +75,39 @@ def keep_high_level_field(incidents_field: list[str]) -> list[str]:
return [x.split('.')[0] if '.' in x else x for x in incidents_field]
-def wrapped_list(obj: list) -> list:
- """
- Wrapped object into a list if not list
- :param obj:
- :return:
- """
- if not isinstance(obj, list):
- return [obj]
- return obj
+def extract_values(data: dict | list, path: str, values_to_exclude: list) -> list:
+ """Recursively extracts values from nested object by path (dot notation).
+
+ For example: extract_values(
+ data={"A": [
+ {"B": 1, "C": 0},
+ {"B": 2},
+ {"B": None},
+ {"B": "N/A"},
+ ]},
+ path="A.B",
+ values_to_exclude=[None, "N/A"],
+ ) == [1, 2]
+
+ Args:
+ data (dict | list): The object to extract values from.
+ path (str): The path (dot notation) to the values to extract.
+ values_to_exclude (list): A list of values to exclude from result.
+
+ Returns:
+ list: The extracted values.
+ """
+ def recurse(obj: Any, keys: list[str]):
+ if not keys:
+ result = obj if isinstance(obj, list) else [obj]
+ return [val for val in result if val not in values_to_exclude]
+ if isinstance(obj, dict):
+ if keys[0] in obj:
+ return recurse(obj[keys[0]], keys[1:])
+ elif isinstance(obj, list):
+ return [result for item in obj for result in recurse(item, keys)]
+ return []
+ return recurse(data, path.split("."))
def preprocess_incidents_field(incidents_field: str, prefix_to_remove: list[str]) -> str:
@@ -190,20 +217,13 @@ def normalize_command_line(command: str) -> str:
return ''
-def fill_nested_fields(incidents_df: pd.DataFrame, incidents: pd.DataFrame, *list_of_field_list: list[str]) -> \
+def fill_nested_fields(incidents_df: pd.DataFrame, incidents: dict | list, *list_of_field_list: list[str]) -> \
pd.DataFrame:
for field_type in list_of_field_list:
for field in field_type:
if '.' in field:
- if isinstance(incidents, list):
- value_list = [wrapped_list(demisto.dt(incident, field)) for incident in incidents]
- value_list = [' '.join(set(filter(lambda x: x not in ['None', None, 'N/A'], x))) for x in
- value_list]
- else:
- value_list = wrapped_list(demisto.dt(incidents, field))
- value_list = ' '.join( # type: ignore
- set(filter(lambda x: x not in ['None', None, 'N/A'], value_list))) # type: ignore
- incidents_df[field] = value_list
+ value_list = extract_values(incidents, field, values_to_exclude=['None', None, 'N/A'])
+ incidents_df[field] = ' '.join(value_list)
return incidents_df
@@ -537,20 +557,14 @@ def get_incident_by_id(incident_id: str, populate_fields: list[str], from_date:
"""
populate_fields_value = ' , '.join(populate_fields)
message_of_values = build_message_of_values([incident_id, populate_fields_value, from_date, to_date])
- demisto.debug(f'Executing GetIncidentsByQuery, {message_of_values}')
- res = demisto.executeCommand('GetIncidentsByQuery', {
- 'query': "id:(%s)" % incident_id,
+ demisto.debug(f'Calling get_incidents_by_query, {message_of_values}')
+ incidents = get_incidents_by_query({
+ 'query': f"id:({incident_id})",
'populateFields': populate_fields_value,
'fromDate': from_date,
'toDate': to_date,
})
- if is_error(res):
- return_error(res)
- if not json.loads(res[0]['Contents']):
- return None
- else:
- incident = json.loads(res[0]['Contents'])
- return incident[0]
+ return incidents[0] if incidents else None
def get_all_incidents_for_time_window_and_exact_match(exact_match_fields: list[str], populate_fields: list[str],
@@ -580,17 +594,15 @@ def get_all_incidents_for_time_window_and_exact_match(exact_match_fields: list[s
query += " %s" % query_sup
populate_fields_value = ' , '.join(populate_fields)
- demisto.debug(f'Executing GetIncidentsByQuery, {build_message_of_values([populate_fields_value, from_date, to_date, limit])}')
- res = demisto.executeCommand('GetIncidentsByQuery', {
+ msg_of_values = build_message_of_values([populate_fields_value, from_date, to_date, limit])
+ demisto.debug(f'Calling get_incidents_by_query, {msg_of_values}')
+ incidents = get_incidents_by_query({
'query': query,
'populateFields': populate_fields_value,
'fromDate': from_date,
'toDate': to_date,
'limit': limit
})
- if is_error(res):
- return_error(res)
- incidents = json.loads(res[0]['Contents'])
if len(incidents) == 0:
msg += "%s \n" % MESSAGE_NO_INCIDENT_FETCHED
return None, msg
@@ -954,7 +966,7 @@ def main():
incorrect_fields=incorrect_fields)
# Dumps all dict in the current incident
- incident_df = dumps_json_field_in_incident(incident)
+ incident_df = dumps_json_field_in_incident(deepcopy(incident))
incident_df = fill_nested_fields(incident_df, incident, similar_text_field, similar_categorical_field)
# Model prediction
diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.yml b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.yml
index 64fb0bb89c84..2a114966535d 100644
--- a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.yml
+++ b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.yml
@@ -84,7 +84,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/ml:1.0.0.84027
+dockerimage: demisto/ml:1.0.0.88591
runas: DBotWeakRole
runonce: true
tests:
diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents_test.py b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents_test.py
index d4f5c314af98..28cbde4f277b 100644
--- a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents_test.py
+++ b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents_test.py
@@ -1,8 +1,8 @@
import demistomock as demisto
-import json
import numpy as np
import pandas as pd
import pytest
+from copy import deepcopy
CURRENT_INCIDENT_NOT_EMPTY = [
{'id': '123', 'commandline': 'powershell IP=1.1.1.1', 'CustomFields': {"nested_field": 'value_nested_field'},
@@ -45,11 +45,11 @@ def executeCommand(command, args):
global SIMILAR_INDICATORS, FETCHED_INCIDENT, CURRENT_INCIDENT
if command == 'DBotFindSimilarIncidentsByIndicators':
return [[], {'Contents': SIMILAR_INDICATORS, 'Type': 'note', 'Tags': [TAG_SCRIPT_INDICATORS]}]
- if command == 'GetIncidentsByQuery':
- if 'limit' in args:
- return [{'Contents': json.dumps(FETCHED_INCIDENT), 'Type': 'note'}]
- else:
- return [{'Contents': json.dumps(CURRENT_INCIDENT), 'Type': 'note'}]
+ if command == 'getIncidents':
+ if '-id:' in args.get("query"): # query for similar incidents
+ return [{'Contents': {"data": FETCHED_INCIDENT}, 'Type': 'note'}]
+ else: # query for current incident
+ return [{'Contents': {"data": CURRENT_INCIDENT}, 'Type': 'note'}]
return None
@@ -102,9 +102,9 @@ def test_euclidian_similarity_capped():
def test_main_regular(mocker):
from DBotFindSimilarIncidents import SIMILARITY_COLUNM_NAME_INDICATOR, SIMILARITY_COLUNM_NAME, main, COLUMN_ID, COLUMN_TIME
global SIMILAR_INDICATORS, FETCHED_INCIDENT, CURRENT_INCIDENT
- FETCHED_INCIDENT = FETCHED_INCIDENT_NOT_EMPTY
- CURRENT_INCIDENT = CURRENT_INCIDENT_NOT_EMPTY
- SIMILAR_INDICATORS = SIMILAR_INDICATORS_NOT_EMPTY
+ FETCHED_INCIDENT = deepcopy(FETCHED_INCIDENT_NOT_EMPTY)
+ CURRENT_INCIDENT = deepcopy(CURRENT_INCIDENT_NOT_EMPTY)
+ SIMILAR_INDICATORS = deepcopy(SIMILAR_INDICATORS_NOT_EMPTY)
mocker.patch.object(demisto, 'args',
return_value={
'incidentId': 12345,
@@ -122,7 +122,6 @@ def test_main_regular(mocker):
'aggreagateIncidentsDifferentDate': 'False',
'includeIndicatorsSimilarity': 'True'
})
- mocker.patch.object(demisto, 'dt', return_value=None)
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
res, _ = main()
assert ('empty_current_incident_field' not in res.columns)
@@ -142,9 +141,9 @@ def test_main_no_indicators_found(mocker):
"""
from DBotFindSimilarIncidents import SIMILARITY_COLUNM_NAME_INDICATOR, SIMILARITY_COLUNM_NAME, main, COLUMN_ID, COLUMN_TIME
global SIMILAR_INDICATORS, FETCHED_INCIDENT, CURRENT_INCIDENT
- FETCHED_INCIDENT = FETCHED_INCIDENT_NOT_EMPTY
- CURRENT_INCIDENT = CURRENT_INCIDENT_NOT_EMPTY
- SIMILAR_INDICATORS = SIMILAR_INDICATORS_EMPTY
+ FETCHED_INCIDENT = deepcopy(FETCHED_INCIDENT_NOT_EMPTY)
+ CURRENT_INCIDENT = deepcopy(CURRENT_INCIDENT_NOT_EMPTY)
+ SIMILAR_INDICATORS = deepcopy(SIMILAR_INDICATORS_EMPTY)
mocker.patch.object(demisto, 'args',
return_value={
'incidentId': 12345,
@@ -162,7 +161,6 @@ def test_main_no_indicators_found(mocker):
'aggreagateIncidentsDifferentDate': 'False',
'includeIndicatorsSimilarity': 'True'
})
- mocker.patch.object(demisto, 'dt', return_value=None)
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
res, _ = main()
assert ('empty_current_incident_field' not in res.columns)
@@ -180,9 +178,9 @@ def test_main_no_fetched_incidents_found(mocker):
"""
from DBotFindSimilarIncidents import MESSAGE_NO_INCIDENT_FETCHED, main
global SIMILAR_INDICATORS, FETCHED_INCIDENT, CURRENT_INCIDENT
- FETCHED_INCIDENT = FETCHED_INCIDENT_EMPTY
- CURRENT_INCIDENT = CURRENT_INCIDENT_NOT_EMPTY
- SIMILAR_INDICATORS = SIMILAR_INDICATORS_NOT_EMPTY
+ FETCHED_INCIDENT = deepcopy(FETCHED_INCIDENT_EMPTY)
+ CURRENT_INCIDENT = deepcopy(CURRENT_INCIDENT_NOT_EMPTY)
+ SIMILAR_INDICATORS = deepcopy(SIMILAR_INDICATORS_NOT_EMPTY)
mocker.patch.object(demisto, 'args',
return_value={
'incidentId': 12345,
@@ -200,7 +198,6 @@ def test_main_no_fetched_incidents_found(mocker):
'aggreagateIncidentsDifferentDate': 'False',
'includeIndicatorsSimilarity': 'True'
})
- mocker.patch.object(demisto, 'dt', return_value=None)
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
res = main()
assert (not res[0])
@@ -229,9 +226,9 @@ def test_main_all_incorrect_field(mocker):
"""
from DBotFindSimilarIncidents import MESSAGE_INCORRECT_FIELD, main
global SIMILAR_INDICATORS, FETCHED_INCIDENT, CURRENT_INCIDENT
- FETCHED_INCIDENT = FETCHED_INCIDENT_NOT_EMPTY
- CURRENT_INCIDENT = CURRENT_INCIDENT_NOT_EMPTY
- SIMILAR_INDICATORS = SIMILAR_INDICATORS_NOT_EMPTY
+ FETCHED_INCIDENT = deepcopy(FETCHED_INCIDENT_NOT_EMPTY)
+ CURRENT_INCIDENT = deepcopy(CURRENT_INCIDENT_NOT_EMPTY)
+ SIMILAR_INDICATORS = deepcopy(SIMILAR_INDICATORS_NOT_EMPTY)
wrong_field_1 = 'wrong_field_1'
wrong_field_2 = 'wrong_field_2'
wrong_field_3 = 'wrong_field_3'
@@ -252,7 +249,6 @@ def test_main_all_incorrect_field(mocker):
'aggreagateIncidentsDifferentDate': 'False',
'includeIndicatorsSimilarity': 'True'
})
- mocker.patch.object(demisto, 'dt', return_value=None)
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
df, msg = main()
assert (not df)
@@ -268,9 +264,9 @@ def test_main_incident_truncated(mocker):
"""
from DBotFindSimilarIncidents import main, MESSAGE_WARNING_TRUNCATED
global SIMILAR_INDICATORS, FETCHED_INCIDENT, CURRENT_INCIDENT
- FETCHED_INCIDENT = FETCHED_INCIDENT_NOT_EMPTY
- CURRENT_INCIDENT = CURRENT_INCIDENT_NOT_EMPTY
- SIMILAR_INDICATORS = SIMILAR_INDICATORS_NOT_EMPTY
+ FETCHED_INCIDENT = deepcopy(FETCHED_INCIDENT_NOT_EMPTY)
+ CURRENT_INCIDENT = deepcopy(CURRENT_INCIDENT_NOT_EMPTY)
+ SIMILAR_INDICATORS = deepcopy(SIMILAR_INDICATORS_NOT_EMPTY)
correct_field_1 = 'commandline'
wrong_field_2 = 'wrong_field_2'
wrong_field_3 = 'wrong_field_3'
@@ -291,7 +287,6 @@ def test_main_incident_truncated(mocker):
'aggreagateIncidentsDifferentDate': 'False',
'includeIndicatorsSimilarity': 'True'
})
- mocker.patch.object(demisto, 'dt', return_value=None)
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
df, msg = main()
limit = demisto.args()['limit']
@@ -301,29 +296,27 @@ def test_main_incident_truncated(mocker):
def test_main_incident_nested(mocker):
"""
- Test if fetched incident truncated - Should return MESSAGE_WARNING_TRUNCATED in the message
- :param mocker:
- :return:
+ Given: Same test case as in test_main_regular but with a nested field as a similarTextField
+ When: Running main()
+ Then: Ensure the nested field exists in the results
"""
from DBotFindSimilarIncidents import main
global SIMILAR_INDICATORS, FETCHED_INCIDENT, CURRENT_INCIDENT
- FETCHED_INCIDENT = FETCHED_INCIDENT_NOT_EMPTY
- CURRENT_INCIDENT = CURRENT_INCIDENT_NOT_EMPTY
- SIMILAR_INDICATORS = SIMILAR_INDICATORS_NOT_EMPTY
- wrong_field_2 = 'wrong_field_2'
- wrong_field_3 = 'wrong_field_3'
- wrong_field_4 = 'wrong_field_4'
- nested_field = 'xdralerts.cmd'
+ FETCHED_INCIDENT = deepcopy(FETCHED_INCIDENT_NOT_EMPTY)
+ CURRENT_INCIDENT = deepcopy(CURRENT_INCIDENT_NOT_EMPTY)
+ SIMILAR_INDICATORS = deepcopy(SIMILAR_INDICATORS_NOT_EMPTY)
+ nested_field = 'CustomFields.nested_field'
mocker.patch.object(demisto, 'args',
return_value={
'incidentId': 12345,
- 'similarTextField': nested_field,
- 'similarCategoricalField': wrong_field_2,
- 'similarJsonField': wrong_field_3,
- 'limit': 3,
+ 'similarTextField': f'{nested_field},incident.commandline, commandline, command, '
+ 'empty_current_incident_field, empty_fetched_incident_field',
+ 'similarCategoricalField': 'signature, filehash, incident.commandline',
+ 'similarJsonField': '',
+ 'limit': 10000,
'fieldExactMatch': '',
- 'fieldsToDisplay': wrong_field_4,
+ 'fieldsToDisplay': 'filehash, destinationip, closeNotes, sourceip, alertdescription',
'showIncidentSimilarityForAllFields': True,
'minimunIncidentSimilarity': 0.2,
'maxIncidentsToDisplay': 100,
@@ -331,11 +324,10 @@ def test_main_incident_nested(mocker):
'aggreagateIncidentsDifferentDate': 'False',
'includeIndicatorsSimilarity': 'True'
})
- mocker.patch.object(demisto, 'dt', return_value=['nested_val_1', 'nested_val_2'])
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
df, _ = main()
assert not df.empty
- assert (df['similarity %s' % nested_field] == [1.0, 1.0, 1.0]).all()
+ assert (df[f"similarity {nested_field}"] > 0).all()
def test_get_get_data_from_indicators_automation():
diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators.py b/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators.py
index 3777f15814cb..a179962d42aa 100644
--- a/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators.py
+++ b/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators.py
@@ -9,6 +9,8 @@
import re
import math
+from GetIncidentsApiModule import * # noqa: E402
+
SEARCH_INDICATORS_LIMIT = 10000
SEARCH_INDICATORS_PAGE_SIZE = 500
@@ -236,8 +238,8 @@ def get_related_incidents(
"fromDate": from_date,
}
demisto.debug(f"Executing GetIncidentsByQuery with {args=}")
- res = execute_command("GetIncidentsByQuery", args, fail_on_error=True)
- incident_ids = [incident[INCIDENT_ID_FIELD] for incident in json.loads(res)]
+ incidents = get_incidents_by_query(args)
+ incident_ids = [incident[INCIDENT_ID_FIELD] for incident in incidents]
demisto.debug(f"Found {len(incident_ids)} related incidents: {incident_ids}")
return incident_ids
@@ -364,8 +366,8 @@ def enrich_incidents(
"populateFields": ",".join(fields_to_display),
}
demisto.debug(f"Executing GetIncidentsByQuery with {args=}")
- res = execute_command("GetIncidentsByQuery", args, fail_on_error=True)
- incidents_map: dict[str, dict] = {incident[INCIDENT_ID_FIELD]: incident for incident in json.loads(res)}
+ res = get_incidents_by_query(args)
+ incidents_map: dict[str, dict] = {incident[INCIDENT_ID_FIELD]: incident for incident in res}
if CREATED_FIELD in fields_to_display:
incidents[CREATED_FIELD] = [
dateparser.parse(incidents_map[inc_id][CREATED_FIELD]).strftime(DATE_FORMAT) # type: ignore
diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators.yml b/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators.yml
index 4fae308f3e32..a13bce442cf4 100644
--- a/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators.yml
+++ b/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators.yml
@@ -42,7 +42,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/ml:1.0.0.86706
+dockerimage: demisto/ml:1.0.0.88591
runas: DBotWeakRole
tests:
- DBotFindSimilarIncidentsByIndicators - Test
diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators_test.py b/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators_test.py
index b4fb0457697f..45ebbb4a9a38 100644
--- a/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators_test.py
+++ b/Packs/Base/Scripts/DBotFindSimilarIncidentsByIndicators/DBotFindSimilarIncidentsByIndicators_test.py
@@ -1,4 +1,3 @@
-import json
import numpy as np
import pandas as pd
import pytest
@@ -35,16 +34,17 @@ def get_related_indicators(incident_id: str):
def mock_execute_command(command: str, args: dict):
- query: str = args.get("query") or ""
- from_date: str = args.get("fromDate") or ""
match command:
- case "GetIncidentsByQuery":
- match = re.search(r"incident\.id:\((.*)\)", query)
+ case "getIncidents":
+ query: str = args.get("query") or ""
+ from_date: str = args.get("fromdate") or ""
+ match = re.search(r"incident\.id:\(([^\)]*)\)", query)
incident_ids = set(match.group(1).split(" ") if match and match.group(1) else [])
- res = json.dumps([
+ res = {"data": [
{k: v for k, v in i.items() if k in args["populateFields"] or k == "id"} for i in INCIDENTS_LIST
- if i["id"] in incident_ids and (not from_date or parse(i["created"]) >= parse(from_date))
- ])
+ if i["id"] in incident_ids
+ and (not from_date or parse(i["created"]) >= parse(from_date).replace(tzinfo=None))
+ ]}
case _:
raise Exception(f"Unmocked command: {command}")
return [{"Contents": res, "Type": "json"}]
diff --git a/Packs/Base/Scripts/DBotShowClusteringModelInfo/DBotShowClusteringModelInfo.yml b/Packs/Base/Scripts/DBotShowClusteringModelInfo/DBotShowClusteringModelInfo.yml
index 190b39bd285b..89e4ee18a040 100644
--- a/Packs/Base/Scripts/DBotShowClusteringModelInfo/DBotShowClusteringModelInfo.yml
+++ b/Packs/Base/Scripts/DBotShowClusteringModelInfo/DBotShowClusteringModelInfo.yml
@@ -29,6 +29,6 @@ type: python
fromversion: 6.2.0
tests:
- No tests (auto formatted)
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
marketplaces:
- xsoar
diff --git a/Packs/Base/Scripts/DeleteIndicatorRelationships/DeleteIndicatorRelationships.yml b/Packs/Base/Scripts/DeleteIndicatorRelationships/DeleteIndicatorRelationships.yml
index e7b94c5032cd..390d70004561 100644
--- a/Packs/Base/Scripts/DeleteIndicatorRelationships/DeleteIndicatorRelationships.yml
+++ b/Packs/Base/Scripts/DeleteIndicatorRelationships/DeleteIndicatorRelationships.yml
@@ -15,7 +15,7 @@ tags:
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
fromversion: 6.2.0
tests:
- Relationships scripts - Test
diff --git a/Packs/Base/Scripts/DrawRelatedIncidentsCanvas/DrawRelatedIncidentsCanvas.yml b/Packs/Base/Scripts/DrawRelatedIncidentsCanvas/DrawRelatedIncidentsCanvas.yml
index 7bc75ecff922..8ae465269b6a 100644
--- a/Packs/Base/Scripts/DrawRelatedIncidentsCanvas/DrawRelatedIncidentsCanvas.yml
+++ b/Packs/Base/Scripts/DrawRelatedIncidentsCanvas/DrawRelatedIncidentsCanvas.yml
@@ -34,7 +34,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/sklearn:1.0.0.49796
+dockerimage: demisto/sklearn:1.0.0.86554
runas: DBotWeakRole
tests:
- No tests (auto formatted)
diff --git a/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery.py b/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery.py
index 5edc4ee984f5..f290e6d84708 100644
--- a/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery.py
+++ b/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery.py
@@ -2,244 +2,43 @@
import pickle
import uuid
-from dateutil import parser
-PREFIXES_TO_REMOVE = ['incident.']
-PAGE_SIZE = int(demisto.args().get('pageSize', 100))
-PYTHON_MAGIC = "$$##"
-
-
-def parse_datetime(datetime_str):
- try:
- return parser.parse(datetime_str).isoformat()
- except Exception:
- return datetime_str
-
-
-def parse_relative_time(datetime_str):
- if datetime_str:
- datetime_str = datetime_str.lower()
- try:
- res = re.search("([0-9]+) (minute|minutes|hour|hours|day|days|week|weeks|month|months|year|years) ago",
- datetime_str)
- if res:
- number = int(res.group(1))
- unit = res.group(2)
- if unit in ['minute', 'hour', 'day', 'week', 'month', 'year']:
- unit += "s"
- if unit == 'years':
- unit = 'days'
- number *= 365
- elif unit == 'months':
- number *= 43800
- unit = 'minutes'
-
- kargs = {}
- kargs[unit] = int(number)
- result = datetime.now() - timedelta(**kargs)
- return result
- except Exception:
- return None
-
-
-def get_context(incident_id):
- res = demisto.executeCommand("getContext", {'id': incident_id})
- try:
- return res[0]['Contents'].get('context') or {}
- except Exception:
- return {}
-
-
-def build_incidents_query(extra_query, incident_types, time_field, from_date, to_date, non_empty_fields):
- query_parts = []
- if extra_query:
- query_parts.append(extra_query)
- if incident_types:
- types = ['"{}"'.format(x.strip()) if '*' not in x else '{}'.format(x.strip())
- for x in incident_types.split(",")]
- types_part = "type:({})".format(' '.join(types))
- query_parts.append(types_part)
- if from_date:
- from_part = '%s:>="%s"' % (time_field, parse_datetime(from_date))
- query_parts.append(from_part)
- if to_date:
- to_part = '%s:<"%s"' % (time_field, parse_datetime(to_date))
- query_parts.append(to_part)
- if len(non_empty_fields) > 0:
- non_empty_fields_part = " and ".join("{}:*".format(x) for x in non_empty_fields)
- query_parts.append(non_empty_fields_part)
- if len(query_parts) == 0:
- raise Exception("Incidents query is empty - please fill one of the arguments")
- query = " and ".join('({})'.format(x) for x in query_parts)
- return query
-
-
-def handle_incident(inc, fields_to_populate, include_context):
- # we flat the custom field to the incident structure, like in the context
- custom_fields = inc.get('CustomFields', {}) or {}
- inc.update(custom_fields)
- if fields_to_populate and len(fields_to_populate) > 0:
- inc = {k: v for k, v in inc.items() if k.lower() in {val.lower() for val in fields_to_populate}}
- if include_context:
- inc['context'] = get_context(inc['id'])
- return inc
-
-
-def is_incident_contains_python_magic(inc):
- return PYTHON_MAGIC in json.dumps(inc)
-
-
-def get_fields_to_populate_arg(fields_to_populate):
- incidents_fields_to_populate = []
- for field in fields_to_populate:
- if "." in field:
- # handle complex field case
- incidents_fields_to_populate.append(field[:field.find(".")])
- else:
- incidents_fields_to_populate.append(field)
- return ",".join(incidents_fields_to_populate)
-
-
-def get_incidents_by_page(args, page, fields_to_populate, include_context):
- args['page'] = page
- if is_demisto_version_ge('6.2.0') and len(fields_to_populate) > 0:
- args['populateFields'] = get_fields_to_populate_arg(fields_to_populate)
- res = demisto.executeCommand("getIncidents", args)
- if is_error(res):
- error_message = get_error(res)
- raise Exception("Failed to get incidents by query args: %s error: %s" % (args, error_message))
- if res[0]['Contents'].get('data') is None:
- return []
- incidents = res[0]['Contents'].get('data') or []
-
- parsed_incidents = []
- for inc in incidents:
- new_incident = handle_incident(inc, fields_to_populate, include_context)
- if is_incident_contains_python_magic(new_incident):
- demisto.debug("Warning: skip incident [id:%s] that contains python magic" % str(inc['id']))
- continue
- parsed_incidents.append(new_incident)
- return parsed_incidents
-
-
-def get_demisto_datetme_format(date_string):
- if date_string:
- date_object = None
- # try to parse date string
- try:
- date_object = parser.parse(date_string)
- except Exception:
- pass
- # try to parse relative time
- if date_object is None and date_string.strip().endswith("ago"):
- date_object = parse_relative_time(date_string)
-
- if date_object:
- return date_object.astimezone().isoformat('T')
- else:
- return None
-
-
-def get_incidents(query, time_field, size, from_date, to_date, fields_to_populate, include_context):
- query_size = min(PAGE_SIZE, size)
- args = {"query": query, "size": query_size, "sort": "%s.%s" % (time_field, "desc")}
- # apply only when created time field
- if time_field == 'created':
- if from_date:
- from_datetime = get_demisto_datetme_format(from_date)
- if from_datetime:
- args['fromdate'] = from_datetime
- else:
- demisto.results("did not set from date due to a wrong format: " + from_date)
-
- if to_date:
- to_datetime = get_demisto_datetme_format(to_date)
- if to_datetime:
- args['todate'] = to_datetime
- else:
- demisto.results("did not set to date due to a wrong format: " + from_date)
-
- incident_list = [] # type: ignore
- page = 0
- while len(incident_list) < size:
- incidents = get_incidents_by_page(args, page, fields_to_populate, include_context)
- if not incidents:
- break
- incident_list += incidents
- page += 1
- return incident_list[:size]
-
-
-def get_comma_sep_list(value):
- value = value.replace('|', ',')
- return map(lambda x: x.strip(), value.split(","))
-
-
-def preprocess_incidents_fields_list(incidents_fields):
- res = []
- for field in incidents_fields:
- field = field.strip()
- for prefix in PREFIXES_TO_REMOVE:
- if field.startswith(prefix):
- field = field[len(prefix):]
- res.append(field)
- return res
+from GetIncidentsApiModule import *
+
+
+def encode_outputs(incidents: list[dict], output_format: str) -> str | bytes:
+ match output_format:
+ case "pickle":
+ return pickle.dumps(incidents, protocol=2) # guardrails-disable-line
+ case "json":
+ return json.dumps(incidents)
+ case _:
+ raise DemistoException(f"Invalid output format: {output_format}")
+
+
+def to_file_entry(incidents: list[dict], output_format: str) -> dict[str, Any]:
+ file_name = str(uuid.uuid4())
+ encoded_data = encode_outputs(incidents, output_format)
+ return fileResult(file_name, encoded_data) | {
+ "Contents": incidents,
+ "HumanReadable": f"Fetched {len(incidents)} incidents successfully",
+ "EntryContext": {
+ "GetIncidentsByQuery": {
+ "Filename": file_name,
+ "FileFormat": output_format,
+ },
+ }
+ }
def main():
try:
- # fetch query
- d_args = dict(demisto.args())
- for arg_name in ['NonEmptyFields', 'populateFields']:
- split_argument_list = get_comma_sep_list(d_args.get(arg_name, ''))
- split_argument_list = [x for x in split_argument_list if len(x) > 0]
- if 'openDuration' in split_argument_list: # pragma: no cover
- split_argument_list.append('openduration') # pragma: no cover
- split_argument_list.remove('openDuration') # pragma: no cover
- d_args[arg_name] = preprocess_incidents_fields_list(split_argument_list)
- query = build_incidents_query(d_args.get('query'),
- d_args.get('incidentTypes'),
- d_args['timeField'],
- d_args.get('fromDate'),
- d_args.get('toDate'),
- d_args.get('NonEmptyFields'))
- fields_to_populate = d_args.get('populateFields') # type: ignore
- if len(fields_to_populate) > 0: # type: ignore
- fields_to_populate += d_args['NonEmptyFields']
- fields_to_populate.append('id')
- fields_to_populate = set([x for x in fields_to_populate if x]) # type: ignore
- include_context = d_args['includeContext'] == 'true'
- incidents = get_incidents(query, d_args['timeField'],
- int(d_args['limit']),
- d_args.get('fromDate'),
- d_args.get('toDate'),
- fields_to_populate,
- include_context)
-
- # output
- file_name = str(uuid.uuid4())
- output_format = d_args['outputFormat']
- if output_format == 'pickle':
- data_encoded = pickle.dumps(incidents, protocol=2)
- elif output_format == 'json':
- data_encoded = json.dumps(incidents) # type: ignore
- else:
- raise Exception("Invalid output format: %s" % output_format)
-
- entry = fileResult(file_name, data_encoded)
- entry['Contents'] = incidents
- entry['HumanReadable'] = "Fetched %d incidents successfully by the query: %s" % (len(incidents), query)
- entry['EntryContext'] = {
- 'GetIncidentsByQuery': {
- 'Filename': file_name,
- 'FileFormat': output_format,
- }
- }
- return entry
+ args = demisto.args()
+ incidents = get_incidents_by_query(args)
+ return_results(to_file_entry(incidents, args["outputFormat"]))
except Exception as e:
return_error(str(e))
-if __name__ in ['builtins', '__main__']:
- entry = main()
- demisto.results(entry)
+if __name__ in ["builtins", "__main__"]:
+ main()
diff --git a/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery.yml b/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery.yml
index d1a0cfa3e75d..609d11ea0fe2 100644
--- a/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery.yml
+++ b/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery.yml
@@ -12,7 +12,8 @@ args:
name: limit
- auto: PREDEFINED
defaultValue: 'false'
- description: Whether to query and fetch incident context. Can be "true" or "false". The default is "false".
+ deprecated: true
+ description: Deprecated due to performance considerations. Rather than using this argument, it is recommended to retrieve the context of the incidents separately, preferably for a limited number of incidents.
name: includeContext
predefined:
- 'true'
@@ -36,7 +37,7 @@ args:
- description: A comma-separated list of fields in the object to poplulate.
name: populateFields
- defaultValue: '100'
- description: Incidents query batch size
+ description: Incidents query batch size.
name: pageSize
comment: |-
Gets a list of incident objects and the associated incident outputs that
@@ -62,7 +63,7 @@ tags:
- ml
timeout: 60µs
type: python
-dockerimage: demisto/python3:3.10.12.66339
+dockerimage: demisto/python3:3.10.13.87159
tests:
- Create Phishing Classifier V2 ML Test
fromversion: 5.0.0
diff --git a/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery_test.py b/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery_test.py
index 0017c037b7cb..182710e062ce 100644
--- a/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery_test.py
+++ b/Packs/Base/Scripts/GetIncidentsByQuery/GetIncidentsByQuery_test.py
@@ -1,191 +1,64 @@
-from GetIncidentsByQuery import build_incidents_query, get_incidents, parse_relative_time, main, \
- preprocess_incidents_fields_list, get_demisto_datetme_format, get_fields_to_populate_arg, PYTHON_MAGIC, \
- get_comma_sep_list
-
-from CommonServerPython import *
-
-incident1 = {
- 'id': 1,
- 'name': 'This is incident1',
- 'type': 'Phishing',
- 'severity': 0,
- 'status': 1,
- 'created': '2019-01-02',
- 'CustomFields': {
- 'testField': "testValue"
- },
- 'closed': '0001-01-01T00:00:00Z',
- 'labels': [{'type': 'subject', 'value': 'This subject1'}, {'type': 'unique', 'value': 'This subject1'}],
- 'attachment': [{'name': 'Test word1 word2'}]
-}
-incident2 = dict(incident1)
-incident2['id'] = 2
-
-incident_with_magic = dict(incident1)
-incident_with_magic['id'] = 3
-incident_with_magic['name'] = PYTHON_MAGIC
-
-
-def get_args():
- args = {}
- args['incidentTypes'] = 'Phishing,Malware'
- args['timeField'] = 'created'
- args['fromDate'] = '2019-10-01'
- args['toDate'] = '3 days ago'
- args['limit'] = '10'
- args['includeContext'] = 'false'
- args['outputFormat'] = 'json'
- args['pageSize'] = '10'
- return args
-
-
-def test_build_query(mocker):
- mocker.patch.object(demisto, 'args', side_effect=get_args)
- query = build_incidents_query("Extra part", "Phishing,Malware", "modified", "2019-01-10", "3 days ago",
- ["status", "closeReason"])
- assert query == '(Extra part) and (type:("Phishing" "Malware")) and (modified:>="2019-01-10T00:00:00") ' \
- 'and (modified:<"3 days ago") and (status:* and closeReason:*)'
- query = build_incidents_query("Extra part", "Phishing", "modified", "2019-01-10", "3 days ago",
- ["status"])
- assert query == '(Extra part) and (type:("Phishing")) and (modified:>="2019-01-10T00:00:00") ' \
- 'and (modified:<"3 days ago") and (status:*)'
-
-
-def test_get_incidents(mocker):
- mocker.patch.object(demisto, 'args', side_effect=get_args)
- size = 100
- query = 'query'
-
- def validate_args(command, args):
- assert args.get('fromdate')
- assert len(args.get('fromdate')) > 5
- assert args.get('todate')
- assert len(args.get('todate')) > 5
- assert args['size'] == size
- assert args['query'] == query
- return [{'Type': entryTypes['note'], 'Contents': {'data': []}}]
-
- mocker.patch.object(demisto, 'executeCommand', side_effect=validate_args)
- get_incidents(query, "created", size, "3 days ago", "1 days ago", None, False)
- get_incidents(query, "created", size, "3 months ago", "1 month ago", None, False)
- get_incidents(query, "created", size, "3 weeks ago", "1 weeks ago", None, False)
- get_incidents(query, "created", size, "2020-02-16T17:45:53.179489", "2020-02-20", None, False)
-
- def validate_args_without_from(command, args):
- assert args.get('fromdate') is None
- return [{'Type': entryTypes['note'], 'Contents': {'data': []}}]
-
- mocker.patch.object(demisto, 'executeCommand', side_effect=validate_args_without_from)
-
- get_incidents(query, "created", size, None, None, None, False)
- get_incidents(query, "created", size, "3 min ago", None, None, False)
-
-
-def test_parse_relative_time():
- threshold = 2
- t1 = parse_relative_time("3 days ago")
- t2 = datetime.now() - timedelta(days=3)
- assert abs((t2 - t1)).total_seconds() < threshold
-
- t1 = parse_relative_time("3 minutes ago")
- t2 = datetime.now() - timedelta(minutes=3)
- assert abs((t2 - t1)).total_seconds() < threshold
-
- t1 = parse_relative_time("1 months ago")
- t2 = datetime.now() - timedelta(minutes=43800)
- assert abs((t2 - t1)).total_seconds() < threshold
-
- t1 = parse_relative_time("1 month ago")
- t2 = datetime.now() - timedelta(minutes=43800)
- assert abs((t2 - t1)).total_seconds() < threshold
-
- t1 = parse_relative_time("2 weeks ago")
- t2 = datetime.now() - timedelta(weeks=2)
- assert abs((t2 - t1)).total_seconds() < threshold
-
- t1 = parse_relative_time("2 week ago")
- t2 = datetime.now() - timedelta(weeks=2)
- assert abs((t2 - t1)).total_seconds() < threshold
-
- t1 = parse_relative_time("2 years ago")
- t2 = datetime.now() - timedelta(days=365 * 2)
- assert abs((t2 - t1)).total_seconds() < threshold
-
-
-GET_INCIDENTS_COUNTER = 0
-
-
-def execute_command_get_incidents(command, args):
- global GET_INCIDENTS_COUNTER
- if GET_INCIDENTS_COUNTER % 2 == 0:
- res = [{'Type': entryTypes['note'], 'Contents': {'data': [incident1, incident2]}}]
- else:
- res = [{'Type': entryTypes['note'], 'Contents': {'data': None}}]
- GET_INCIDENTS_COUNTER += 1
- return res
-
-
-def execute_command_get_incidents_with_magic(command, args):
- global GET_INCIDENTS_COUNTER
- if GET_INCIDENTS_COUNTER % 2 == 0:
- res = [{'Type': entryTypes['note'], 'Contents': {'data': [incident1, incident_with_magic]}}]
- else:
- res = [{'Type': entryTypes['note'], 'Contents': {'data': None}}]
- GET_INCIDENTS_COUNTER += 1
- return res
-
-
-def test_main(mocker):
- args = dict(get_args())
- mocker.patch.object(demisto, 'args', return_value=args)
- mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command_get_incidents)
-
- entry = main()
- assert "Fetched 2 incidents successfully" in entry['HumanReadable']
- assert 'GetIncidentsByQuery' in entry['EntryContext']
- assert 'status' in entry['Contents'][0]
- assert 'context' not in entry['Contents'][0]
- assert 'testValue' == entry['Contents'][0]['testField']
-
- args['includeContext'] = 'true'
- entry = main()
- assert {} == entry['Contents'][0]['context']
-
- args['populateFields'] = 'testField,status'
- args['NonEmptyFields'] = 'severity'
- entry = main()
- assert set(entry['Contents'][0].keys()) == set(['testField', 'status', 'severity', 'id', 'context'])
- args.pop('fromDate')
- entry = main()
- assert set(entry['Contents'][0].keys()) == set(['testField', 'status', 'severity', 'id', 'context'])
-
-
-def test_skip_python_magic(mocker):
- args = dict(get_args())
- mocker.patch.object(demisto, 'args', return_value=args)
- mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command_get_incidents_with_magic)
-
- entry = main()
- assert entry['Contents'][0]['id'] == 1
- assert len(entry['Contents']) == 1
-
-
-def test_preprocess_incidents_fields_list():
- incidents_fields = ['incident.emailbody', ' incident.emailsbuject']
- assert preprocess_incidents_fields_list(incidents_fields) == ['emailbody', 'emailsbuject']
-
-
-def test_get_demisto_datetme_format():
- assert "2020-01-01T00:00:00+00:00" == get_demisto_datetme_format("2020-01-01 00:00:00+00:00")
-
-
-def test_get_fields_to_populate_arg():
- assert get_fields_to_populate_arg(["field1", "grid_field.test1"]) == "field1,grid_field"
- assert get_fields_to_populate_arg(["field1", "field2"]) == "field1,field2"
- assert get_fields_to_populate_arg([]) == ""
-
-
-def test_get_comma_sep_list():
- split_argument_list = get_comma_sep_list("t1,t2,t3")
- split_argument_list = [x for x in split_argument_list if len(x) > 0]
- assert split_argument_list == ["t1", "t2", "t3"]
+import pytest
+from CommonServerPython import DemistoException, EntryType
+import demistomock as demisto
+import GetIncidentsByQuery
+
+import json
+import pickle
+
+
+def test_encode_outputs():
+ """
+ Given: Search incidents results
+ When: Running encode_outputs():
+ - once with "json" format
+ - once with "pickle" format
+ - once with unexpected format
+ Then: Ensure the results are encoded correctly, or an error is raised in case of unexpected format
+ """
+ from GetIncidentsByQuery import encode_outputs
+ incidents = [{"id": 1}]
+ assert json.loads(encode_outputs(incidents, "json")) == incidents
+ assert pickle.loads(encode_outputs(incidents, "pickle")) == incidents # guardrails-disable-line
+ with pytest.raises(DemistoException):
+ encode_outputs(incidents, "oyvey")
+
+
+def test_to_file_entry(mocker):
+ """
+ Given: Search incidents results
+ When: Running to_file_entry() with "json" format
+ Then: Ensure a file entry is returned in the expected format
+ """
+ incidents = [{"id": 1}]
+ mocker.patch.object(demisto, "investigation", return_value={"id": "inv"})
+ res = GetIncidentsByQuery.to_file_entry(incidents, "json")
+ assert res["Type"] == EntryType.FILE
+ assert res["EntryContext"]["GetIncidentsByQuery"]["FileFormat"] == "json"
+ assert res["Contents"] == incidents
+
+
+def test_get_incidents_by_query_sanity_test(mocker):
+ """
+ Given: Search incidents query arguments
+ When: Running main()
+ Then: Ensure the expected incident is returned
+ """
+ mocker.patch.object(demisto, "args", return_value={"query": "oyvey", "outputFormat": "json"})
+ mocker.patch.object(demisto, "executeCommand", return_value=[{"Contents": {"data": [{"id": 1}]}, "Type": "json"}])
+ demisto_results = mocker.patch.object(demisto, "results")
+ GetIncidentsByQuery.main()
+ incidents = demisto_results.call_args[0][0]["Contents"]
+ assert len(incidents) == 1
+ assert incidents[0]["id"] == 1
+
+
+def test_get_incidents_by_query_bad_inputs(mocker):
+ """
+ Given: Search incidents with no query arguments
+ When: Running main()
+ Then: Ensure an error entry is returned
+ """
+ return_error = mocker.patch.object(GetIncidentsByQuery, "return_error")
+ GetIncidentsByQuery.main()
+ assert "Incidents query is empty" in return_error.call_args[0][0]
diff --git a/Packs/Base/Scripts/GetIncidentsByQuery/README.md b/Packs/Base/Scripts/GetIncidentsByQuery/README.md
index 6f0a3a521d78..5a98c38e52aa 100644
--- a/Packs/Base/Scripts/GetIncidentsByQuery/README.md
+++ b/Packs/Base/Scripts/GetIncidentsByQuery/README.md
@@ -28,7 +28,7 @@ This script is used in the following playbooks and scripts.
| fromDate | The start date by which to filter incidents. Date format will be the same as in the incidents query page, for example: "3 days ago", ""2019-01-01T00:00:00 \+0200"\). |
| toDate | The end date by which to filter incidents. Date format will be the same as in the incidents query page, for example: "3 days ago", ""2019-01-01T00:00:00 \+0200"\). |
| limit | The maximum number of incidents to fetch. |
-| includeContext | Whether to query and fetch incident context. Can be "true" or "false". The default is "false". |
+| includeContext | Deprecated due to performance considerations. Rather than using this argument, it is recommended to retrieve the context of the incidents separately, preferably for a limited number of incidents. |
| timeField | The incident field to specify for the date range. Can be "created" or "modified". The default is "created". Due to performance considerations, you should only use "modified" if you have a large number of incidents. |
| NonEmptyFields | A comma-separated list of non-empty value incident field names by which to filter incidents. |
| outputFormat | The output file format. |
diff --git a/Packs/Base/Scripts/GetMLModelEvaluation/GetMLModelEvaluation.py b/Packs/Base/Scripts/GetMLModelEvaluation/GetMLModelEvaluation.py
index ad47d7470a1d..31713f61858e 100644
--- a/Packs/Base/Scripts/GetMLModelEvaluation/GetMLModelEvaluation.py
+++ b/Packs/Base/Scripts/GetMLModelEvaluation/GetMLModelEvaluation.py
@@ -46,14 +46,19 @@ def generate_metrics_df(y_true, y_true_per_class, y_pred, y_pred_per_class, thre
df = pd.DataFrame(columns=['Class', 'Precision', 'Recall', 'TP', 'FP', 'Coverage', 'Total'])
for class_ in sorted(y_pred_per_class):
row = calculate_df_row(class_, threshold, y_true_per_class, y_pred_per_class)
- df = df.append(row, ignore_index=True)
- df = df.append({'Class': 'All',
- 'Precision': df["Precision"].mean(),
- 'Recall': df["Recall"].mean(),
- 'TP': df["TP"].sum(),
- 'FP': df["FP"].sum(),
- 'Coverage': df["Coverage"].sum(),
- 'Total': df["Total"].sum()}, ignore_index=True)
+ df = pd.concat([df, pd.DataFrame([row])], ignore_index=True)
+ df = pd.concat([
+ df,
+ pd.DataFrame([{
+ 'Class': 'All',
+ 'Precision': df["Precision"].mean(),
+ 'Recall': df["Recall"].mean(),
+ 'TP': df["TP"].sum(),
+ 'FP': df["FP"].sum(),
+ 'Coverage': df["Coverage"].sum(),
+ 'Total': df["Total"].sum()}
+ ]),
+ ], ignore_index=True)
df = df[['Class', 'Precision', 'TP', 'FP', 'Coverage', 'Total']]
explained_metrics = ['Precision', 'TP (true positive)', 'FP (false positive)', 'Coverage', 'Total']
explanation = ['{} {}'.format(bold_hr(metric), METRICS[metric]) for metric in explained_metrics]
@@ -289,7 +294,7 @@ def calculate_per_class_report_entry(class_to_arrs, labels, y_pred_per_class, y_
for threshold in sorted(class_to_thresholds[class_]):
row = calculate_df_row(class_, threshold, y_true_per_class, y_pred_per_class)
row['Threshold'] = threshold
- class_threshold_df = class_threshold_df.append(row, ignore_index=True)
+ class_threshold_df = pd.concat([class_threshold_df, pd.DataFrame([row])], ignore_index=True)
class_threshold_df = reformat_df_fractions_to_percentage(class_threshold_df)
class_threshold_df['Threshold'] = class_threshold_df['Threshold'].apply(lambda p: '{:.2f}'.format(p))
class_threshold_df = class_threshold_df[['Threshold', 'Precision', 'TP', 'FP', 'Coverage', 'Total']]
diff --git a/Packs/Base/Scripts/GetMLModelEvaluation/GetMLModelEvaluation.yml b/Packs/Base/Scripts/GetMLModelEvaluation/GetMLModelEvaluation.yml
index 32ab8b476f8e..4d59a291421a 100644
--- a/Packs/Base/Scripts/GetMLModelEvaluation/GetMLModelEvaluation.yml
+++ b/Packs/Base/Scripts/GetMLModelEvaluation/GetMLModelEvaluation.yml
@@ -1,27 +1,27 @@
args:
-- description: A list of labels of the test set
+- description: A list of labels of the test set.
isArray: true
name: yTrue
required: true
-- description: A list of dictionaries contain probability predictions for all classes
+- description: A list of dictionaries contain probability predictions for all classes.
isArray: true
name: yPred
required: true
- defaultValue: '0.5'
- description: minimum precision of all classes, ranges 0-1
+ description: minimum precision of all classes, ranges 0-1.
name: targetPrecision
- defaultValue: '0.0'
- description: minimum recall of all classes, ranges 0-1
+ description: minimum recall of all classes, ranges 0-1.
isArray: true
name: targetRecall
- defaultValue: 'true'
- description: if set to 'true', the output will include a full exaplanation of the confidence threshold meaning
+ description: if set to 'true', the output will include a full exaplanation of the confidence threshold meaning.
isArray: true
name: detailedOutput
predefined:
- 'true'
- 'false'
-comment: Finds a threshold for ML model, and performs an evaluation based on it
+comment: Finds a threshold for ML model, and performs an evaluation based on it.
commonfields:
id: GetMLModelEvaluation
version: -1
@@ -29,7 +29,7 @@ enabled: true
name: GetMLModelEvaluation
outputs:
- contextPath: GetMLModelEvaluation.Threshold
- description: The found thresholds which meets the conditions of precision and recall
+ description: The found thresholds which meets the conditions of precision and recall.
type: String
- contextPath: GetMLModelEvaluation.ConfusionMatrixAtThreshold
description: The model evaluation confusion matrix for mails above the threhsold.
@@ -43,7 +43,7 @@ tags:
- ml
timeout: 60µs
type: python
-dockerimage: demisto/ml:1.0.0.30541
+dockerimage: demisto/ml:1.0.0.88591
tests:
- Create Phishing Classifier V2 ML Test
fromversion: 5.0.0
diff --git a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
index ed28e85c8ad0..581fe8c33d70 100644
--- a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
+++ b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
@@ -45,7 +45,7 @@ tags:
- pdf
timeout: '0'
type: python
-dockerimage: demisto/sane-pdf-reports:1.0.0.84389
+dockerimage: demisto/sane-pdf-reports:1.0.0.88753
runas: DBotWeakRole
tests:
- No Test
diff --git a/Packs/Base/pack_metadata.json b/Packs/Base/pack_metadata.json
index 875097b5a2e5..a8e7b938fd7f 100644
--- a/Packs/Base/pack_metadata.json
+++ b/Packs/Base/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Base",
"description": "The base pack for Cortex XSOAR.",
"support": "xsoar",
- "currentVersion": "1.33.30",
+ "currentVersion": "1.33.42",
"author": "Cortex XSOAR",
"serverMinVersion": "6.0.0",
"url": "https://www.paloaltonetworks.com/cortex",
diff --git a/Packs/BeyondTrust_Password_Safe/.secrets-ignore b/Packs/BeyondTrust_Password_Safe/.secrets-ignore
index e69de29bb2d1..5c3c0a7d2481 100644
--- a/Packs/BeyondTrust_Password_Safe/.secrets-ignore
+++ b/Packs/BeyondTrust_Password_Safe/.secrets-ignore
@@ -0,0 +1,2 @@
+https://www.beyondtrust.com/docs/beyondinsight-password-safe/bi/event-forwarder/pb-ps-events.htm
+https://www.beyondtrust.com/docs/beyondinsight-password-safe/bi/integrations/third-party/snmp-trap-and-syslog.htm
\ No newline at end of file
diff --git a/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.xif b/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.xif
new file mode 100644
index 000000000000..060e8fee5fa0
--- /dev/null
+++ b/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.xif
@@ -0,0 +1,144 @@
+[MODEL: dataset = beyondtrust_passwordsafe_raw]
+/* Supported event formats: Comma delimited and tab delimited Syslog messages. */
+alter // Extract raw data (https://www.beyondtrust.com/docs/beyondinsight-password-safe/bi/event-forwarder/pb-ps-events.htm)
+ agent_description = arrayindex(regextract(_raw_log, "Agent Desc:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ agent_id = arrayindex(regextract(_raw_log, "Agent ID:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ agent_version = arrayindex(regextract(_raw_log, "Agent Ver:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ app_user_id = arrayindex(regextract(_raw_log, "AppUserID:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ applications = arrayindex(regextract(_raw_log, "Applications:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ audit_id = arrayindex(regextract(_raw_log, "AuditID:\s\"?(\w+)"), 0),
+ category = arrayindex(regextract(_raw_log, "(?:,|\t|\s{5})Category:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ client_id = arrayindex(regextract(_raw_log, "Client Id:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ client_ip = arrayindex(regextract(_raw_log, "Client IP Address:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ created_by = coalesce(
+ arrayindex(regextract(_raw_log, "CreatedBy:\s\"([^\"]+)\""), 0),
+ arrayindex(regextract(_raw_log, "CreatedBy:\s(\S.+?)(?:\s{4}|\t)"), 0)),
+ description = arrayindex(regextract(_raw_log, "Description:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ details = arrayindex(regextract(_raw_log, "Details:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ dns_name = arrayindex(regextract(_raw_log, "DNS Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ domain_name = arrayindex(regextract(_raw_log, "Domain Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ elevation_command = arrayindex(regextract(_raw_log, "Elevation Command:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)") , 0),
+ email = arrayindex(regextract(_raw_log, "Email:\s\"?(\S+\@[\w\-\.]+)"), 0),
+ event_desc = arrayindex(regextract(_raw_log, "Event Desc:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ event_name = arrayindex(regextract(_raw_log, "Event Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ event_target = arrayindex(regextract(_raw_log, "Target:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ file_name = arrayindex(regextract(_raw_log, "FileName:\s\"?\w\S+\"?"), 0),
+ folder = arrayindex(regextract(_raw_log, "Folder:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ folder_path = arrayindex(regextract(_raw_log, "FolderPath:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ group_id = arrayindex(regextract(_raw_log, "GroupId:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ group_name = arrayindex(regextract(_raw_log, "Group:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ hostname = arrayindex(regextract(_raw_log, "(?:\t|,)Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ id = arrayindex(regextract(_raw_log, "\"Id:\s\"(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ is_failed = arrayindex(regextract(_raw_log, "Failed:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ jump_host = arrayindex(regextract(_raw_log, "Jumphost:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ log_id = arrayindex(regextract(_raw_log, "LogID:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ login_account_id = arrayindex(regextract(_raw_log, "Login Account ID:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ managed_account_id = arrayindex(regextract(_raw_log, "Account ID:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ managed_entity_type = arrayindex(regextract(_raw_log, "Managed System Type:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ managed_system_id = arrayindex(regextract(_raw_log, "Managed System ID:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ managed_system_name = arrayindex(regextract(_raw_log, "Managed System Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ message = arrayindex(regextract(_raw_log, "Message:\s\"([^\"]+)"), 0),
+ modified_by = coalesce(
+ arrayindex(regextract(_raw_log, "ModifiedBy:\s\"([^\"]+)\""), 0),
+ arrayindex(regextract(_raw_log, "ModifiedBy:\s(\S.+?)(?:\s{4}|\t)"), 0)),
+ netbios_name = arrayindex(regextract(_raw_log, "NetBIOS Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ object_id = arrayindex(regextract(_raw_log, "ObjectID:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ object_type = arrayindex(regextract(_raw_log, "ObjectType:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ operation = arrayindex(regextract(_raw_log, "Operation:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ port = arrayindex(regextract(_raw_log, "Port:\s*\"?(\d{1,5})"), 0),
+ target_host_os = arrayindex(regextract(_raw_log, "OS:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ target_host_platform = arrayindex(regextract(_raw_log, "Platform name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ owner = coalesce(
+ arrayindex(regextract(_raw_log, "Owner:\s\"([^\"]+)\""), 0),
+ arrayindex(regextract(_raw_log, "Owner:\s(\S.+?)(?:\s{4}|\t)"), 0)),
+ owner_id = coalesce(
+ arrayindex(regextract(_raw_log, "OwnerId:\s\"([^\"]+)\""), 0),
+ arrayindex(regextract(_raw_log, "OwnerId:\s(\S.+?)(?:\s{4}|\t)"), 0)),
+ report_name = arrayindex(regextract(_raw_log, "Report Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ response_code = arrayindex(regextract(_raw_log, "Code:\s\"?(\w+)"), 0),
+ result = arrayindex(regextract(_raw_log, "Result:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ role_used = arrayindex(regextract(_raw_log, "RoleUsed:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ source_host = arrayindex(regextract(_raw_log, "Source Host:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ source_ip = arrayindex(regextract(_raw_log, "(?:Source IP|IPAddress):\s\"?([\da-fA-F\:\.]+)"), 0),
+ title = arrayindex(regextract(_raw_log, "Title:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ user = coalesce(
+ arrayindex(regextract(_raw_log, "User:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ arrayindex(regextract(_raw_log, "UserName:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ arrayindex(regextract(_raw_log, "SAM Account Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ arrayindex(regextract(_raw_log, "User Principal Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ arrayindex(regextract(_raw_log, "SSO User Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0)),
+ user_id = arrayindex(regextract(_raw_log, "UserID:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ workgroup_description = arrayindex(regextract(_raw_log, "Workgroup Desc:\s?\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ workgroup_id = arrayindex(regextract(_raw_log, "Workgroup ID:\s?\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ workgroup_location = arrayindex(regextract(_raw_log, "Workgroup Location:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0)
+| alter // Post extraction processing
+ auth_type = coalesce(arrayindex(regextract(details, "Type=(\w+)"), 0), arrayindex(regextract(_raw_log, "Authentication Type:\s*\"?(\w+)"), 0)),
+ is_event_outcome_successful = if(is_failed = "0" or result = "S" or event_desc ~= "Success" or response_code = "NoError", to_boolean("TRUE")),
+ is_event_outcome_failure = if(is_failed = "1" or result = "F" or event_desc ~= "(?i)Failed" or category ~= "Failure" or response_code ~= "Failed", to_boolean("TRUE")),
+ os_platform = coalesce(target_host_os, target_host_platform),
+ owner_details = concat(owner, " (", owner_id, ")"),
+ request_id = arrayindex(regextract(details, "(?:Request \#|ReleaseRequestId=)(\w+)"), 0),
+ target_application = arrayindex(regextract(details, "Application=(\w+)"), 0),
+ target_account = coalesce(
+ arrayindex(regextract(event_target, "Account\:(\S+)"), 0),
+ arrayindex(regextract(_raw_log, "Username:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0),
+ arrayindex(regextract(_raw_log, "Account\s?Name:\s\"?(\w.*?)\"?(?:,|\s{4}|\t)"), 0)),
+ target_asset = arrayindex(regextract(event_target, "Asset(?:\=|\:)(\S+)"), 0),
+ target_netbios_name = arrayindex(regextract(message, "NetBiosName=([^,]+)"), 0),
+ user_domain = coalesce(arrayindex(regextract(user, "(.+)\\.+"), 0), arrayindex(split(user, "@"), 1), arrayindex(split(email, "@"), 1)),
+ user_name = coalesce(arrayindex(regextract(user, "\\(.+)"), 0), arrayindex(regextract(user, "(.+)\@"), 0), user),
+ workgroup = concat(workgroup_description, " (", workgroup_id, ")")
+| alter
+ os_uppercase = uppercase(os_platform),
+ client_ipv4 = if(client_ip ~= "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", client_ip),
+ client_ipv6 = if(client_ip ~= ":", client_ip),
+ src_ipv4 = if(source_ip ~= "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", source_ip),
+ src_ipv6 = if(source_ip ~= ":", source_ip),
+ target_user_domain = arrayindex(regextract(target_account, "(.+)\\.+"), 0),
+ target_user_name = coalesce(arrayindex(regextract(target_account, "\\(.+)"), 0), target_account)
+| alter // XDM Mapping
+ xdm.auth.auth_method = auth_type,
+ xdm.email.recipients = if(email != null, arraycreate(email)),
+ xdm.event.description = arraystring(arraycreate(event_desc, description, details, message, title), ". "),
+ xdm.event.id = coalesce(log_id, audit_id),
+ xdm.event.operation = operation,
+ xdm.event.original_event_type = event_name,
+ xdm.event.outcome = if(is_event_outcome_successful, XDM_CONST.OUTCOME_SUCCESS, is_event_outcome_failure, XDM_CONST.OUTCOME_FAILED),
+ xdm.event.outcome_reason = if(is_event_outcome_failure, arraystring(arraycreate(response_code, message), ". ")),
+ xdm.event.type = concat(category, ": ", event_name),
+ xdm.intermediate.host.hostname = jump_host,
+ xdm.observer.name = agent_description,
+ xdm.observer.type = agent_id,
+ xdm.observer.version = agent_version,
+ xdm.session_context_id = request_id,
+ xdm.source.agent.identifier = agent_id,
+ xdm.source.agent.version = agent_version,
+ xdm.source.host.hostname = if(dns_name != null and dns_name != "UNKNOWN", dns_name, coalesce(source_host, hostname, netbios_name)),
+ xdm.source.ipv4 = coalesce(src_ipv4, client_ipv4),
+ xdm.source.ipv6 = coalesce(src_ipv6, client_ipv6),
+ xdm.source.host.ipv4_addresses = arraydistinct(arraycreate(src_ipv4, client_ipv4)),
+ xdm.source.host.ipv6_addresses = arraydistinct(arraycreate(src_ipv6, client_ipv6)),
+ xdm.source.location.region = workgroup_location,
+ xdm.source.user.domain = coalesce(user_domain, domain_name),
+ xdm.source.user.groups = arraycreate(role_used),
+ xdm.source.user.identifier = coalesce(login_account_id, client_id),
+ xdm.source.user.ou = workgroup,
+ xdm.source.user.username = user_name,
+ xdm.target.application.name = coalesce(target_application, to_string(applications)),
+ xdm.target.process.command_line = elevation_command,
+ xdm.target.file.directory = folder,
+ xdm.target.file.filename = file_name,
+ xdm.target.file.path = folder_path,
+ xdm.target.host.hostname = coalesce(target_asset, target_netbios_name),
+ xdm.target.host.os = target_host_os,
+ xdm.target.host.os_family = if(os_uppercase ~= "WINDOWS|ACTIVE DIRECTORY", XDM_CONST.OS_FAMILY_WINDOWS, os_uppercase ~= "MAC", XDM_CONST.OS_FAMILY_MACOS, os_uppercase ~= "LINUX", XDM_CONST.OS_FAMILY_LINUX, os_uppercase ~= "ANDROID", XDM_CONST.OS_FAMILY_ANDROID, os_uppercase ~= "IOS", XDM_CONST.OS_FAMILY_IOS, os_uppercase ~= "UBUNTU", XDM_CONST.OS_FAMILY_UBUNTU, os_uppercase ~= "DEBIAN", XDM_CONST.OS_FAMILY_DEBIAN, os_uppercase ~= "FEDORA", XDM_CONST.OS_FAMILY_FEDORA, os_uppercase ~= "CENTOS", XDM_CONST.OS_FAMILY_CENTOS, os_uppercase ~= "CHROME", XDM_CONST.OS_FAMILY_CHROMEOS, os_uppercase ~= "SOLARIS", XDM_CONST.OS_FAMILY_SOLARIS, os_uppercase ~= "SCADA", XDM_CONST.OS_FAMILY_SCADA, os_uppercase),
+ xdm.target.port = to_integer(port),
+ xdm.target.resource.id = coalesce(object_id, id, managed_system_id),
+ xdm.target.resource.name = coalesce(report_name, managed_system_name),
+ xdm.target.resource.type = coalesce(object_type, managed_entity_type),
+ xdm.target.resource.value = event_target,
+ xdm.target.resource.parent_id = coalesce(owner_details, modified_by, created_by),
+ xdm.target.user.domain = target_user_domain,
+ xdm.target.user.identifier = coalesce(user_id, app_user_id, managed_account_id),
+ xdm.target.user.groups = arraycreate(group_name, group_id),
+ xdm.target.user.username = target_user_name;
\ No newline at end of file
diff --git a/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml b/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml
new file mode 100644
index 000000000000..b7b81b439688
--- /dev/null
+++ b/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml
@@ -0,0 +1,6 @@
+fromversion: 8.4.0
+id: BeyondTrust_Password_Safe_ModelingRule
+name: BeyondTrust Password Safe Modeling Rule
+rules: ''
+schema: ''
+tags: ''
\ No newline at end of file
diff --git a/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_schema.json b/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_schema.json
new file mode 100644
index 000000000000..66402590c15b
--- /dev/null
+++ b/Packs/BeyondTrust_Password_Safe/ModelingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_schema.json
@@ -0,0 +1,9 @@
+{
+ "beyondtrust_passwordsafe_raw": {
+ "_raw_log": {
+ "type": "string",
+ "is_array": false
+ }
+ }
+ }
+
\ No newline at end of file
diff --git a/Packs/BeyondTrust_Password_Safe/ParsingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.xif b/Packs/BeyondTrust_Password_Safe/ParsingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.xif
new file mode 100644
index 000000000000..1406a117428a
--- /dev/null
+++ b/Packs/BeyondTrust_Password_Safe/ParsingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.xif
@@ -0,0 +1,18 @@
+[INGEST:vendor="beyondtrust", product="passwordsafe", target_dataset="beyondtrust_passwordsafe_raw", no_hit=keep]
+/* This filter supports events which include a LogTime/ChangeDt or CreatedDate timestamp fields,
+ for PBPS (PowerBroker Password Safe) agent events & AppAudit agent events, respectively.
+ The supported timestamp format for these if events is %m/%d/%Y %I:%M:%S %p.
+ Examples: ChangeDt: "1/15/2024 11:00:00 AM", LogTime: "12/30/2023 10:42:22 PM", "CreateDate: 1/22/2024 5:32:19 PM". */
+filter _raw_log ~= "(?:LogTime|ChangeDt|CreateDate):\s\"?\d{1,2}\/\d{1,2}\/\d{4}\s\d{1,2}:\d{1,2}:\d{1,2}\s(?:A|P)M"
+| alter tmp_raw_timestamp = arrayindex(regextract(_raw_log, "(?:LogTime|ChangeDt|CreateDate):\s\"?(\w.*?)\"?(?:,|\s{4}|\t)") , 0)
+| alter _time = parse_timestamp("%m/%d/%Y %I:%M:%S %p", tmp_raw_timestamp)
+| fields - tmp*;
+
+/* This filter is provided as a fallback for events that do not include LogTime/ChangeDt/CreateDate fields.
+ It Supports RFC 5424 compatible syslog header timestamps (RFC3339 format).
+ Examples: "2024-22-20T11:18:59.123Z", "2024-22-20T11:18:59Z", "2024-02-22T11:18:59.152+03:00". */
+filter _raw_log !~= "LogTime|ChangeDt|CreateDate" and _raw_log ~= "\d{4}\-\d{2}\-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:[+-]\d{2}:\d{2}|Z)"
+| alter tmp_raw_timestamp = arrayindex(regextract(_raw_log, "\d{4}\-\d{2}\-\d{2}T\d{2}:\d{2}:\d{2}\S+"), 0)
+| alter tmp_timestamp = replace(to_string(tmp_raw_timestamp), "Z", "+00:00")
+| alter _time = parse_timestamp("%FT%H:%M:%E*S%Ez", tmp_timestamp)
+| fields - tmp*;
\ No newline at end of file
diff --git a/Packs/BeyondTrust_Password_Safe/ParsingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml b/Packs/BeyondTrust_Password_Safe/ParsingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml
new file mode 100644
index 000000000000..6ee0bbb6e2af
--- /dev/null
+++ b/Packs/BeyondTrust_Password_Safe/ParsingRules/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml
@@ -0,0 +1,6 @@
+id: BeyondTrust_Password_Safe_ParsingRule
+name: BeyondTrust Password Safe Parsing Rule
+fromversion: 8.4.0
+tags: []
+rules: ''
+samples: ''
diff --git a/Packs/BeyondTrust_Password_Safe/README.md b/Packs/BeyondTrust_Password_Safe/README.md
index 51150bf326ec..d58c7c2aad7b 100644
--- a/Packs/BeyondTrust_Password_Safe/README.md
+++ b/Packs/BeyondTrust_Password_Safe/README.md
@@ -1,2 +1,88 @@
-# Beyond Trust Password Safe
-Unified password and session management for seamless accountability and control over privileged accounts.
\ No newline at end of file
+# BeyondTrust Password Safe
+Unified password and session management for seamless accountability and control over privileged accounts.
+
+<~XSIAM>
+
+## Cortex XSIAM SIEM Content
+
+This pack includes Cortex XSIAM SIEM content for parsing and modeling the syslog events that are forwarded from BeyondTrust Beyond Safe.
+
+In addition, data is normalized to the Cortex Data Model (XDM).
+
+Follow the configuration sections below for forwarding syslog events from BeyondTrust Password Safe and ingesting them in Cortex XSIAM.
+
+### Configuration on BeyondTrust BeyondInsight
+This section describes the configuration that needs to be done on the BeyondTrust BeyondInsight platform in order to forward its event logs to Cortex XSIAM Broker VM via syslog.
+
+Follow the steps below:
+1. In BeyondInsight, go to *Configuration* → *General* → *Connectors*.
+2. From the *Connectors* pane, click **Create New Connector**.
+3. Enter a name for the connector, for e.g., "*Cortex XSIAM*".
+4. Select **Syslog Event Forwarder** under the *Connector Type* list.
+5. Click **Create Connector** to open the *Syslog Event Forwarder* pane.
+6. Leave **Active (yes)** enabled.
+7. Provide the required details of the target Cortex XSIAM Broker VM syslog server:
+ - *`Available Output Pipelines`* - Select the requested transmission protocol for forwarding the syslog messages: *TCP*, *TCP-SSL*, or *UDP*.
+ - *`Host Name`* - Enter the IP address or hostname of the target Cortex XSIAM Broker VM syslog server.
+ - *`Port`* - enter the port number that the target Broker VM Syslog service is listening on for receiving syslog messages from BeyondTrust Password Safe.
+8. Select one of the following output formats: *Comma Delimited* or *Tab Delimited*. Other formats are currently unsupported.
+9. Select an optional syslog *Facility* from the list.
+10. Select **Format Specification**.
+11. Select the events that you want to forward to Cortex XSIAM.
+12. Click **Test Connector** to determine if the event forwarding configuration is successful.
+13. Click **Create Connector**.
+
+See BeyondTrust Password Safe [Enable Syslog Event Forwarding](https://www.beyondtrust.com/docs/beyondinsight-password-safe/bi/integrations/third-party/snmp-trap-and-syslog.htm#:~:text=Enable%20Syslog%20Event%20Forwarding) guide for additional details. Remark: The timestamps extracted from the BeyondTrust Password Safe events are interpreted in UTC timezone.
+
+### Configuration on Cortex XSIAM
+
+This section describes the configuration that needs to be done on Cortex XSIAM for receiving forwarded syslog events from BeyondTrust Password Safe.
+
+In order to use the collector, use the [Broker VM](#broker-vm) option.
+
+#### Broker VM
+You will need to use the information described [here](https://docs-cortex.paloaltonetworks.com/r/Cortex-XDR/Cortex-XDR-Pro-Administrator-Guide/Configure-the-Broker-VM).
+
+You can configure the specific vendor and product for this instance.
+
+1. Navigate to **Settings** → **Configuration** → **Data Broker** → **Broker VMs**.
+2. Go to the **APPS** column under the **Brokers** tab and add the **Syslog** app for the relevant broker instance. If the Syslog app already exists, hover over it and click **Configure**.
+3. Click **Add New**.
+3. When configuring the Syslog Collector, set the following parameters:
+ | Parameter | Value
+ | :--- | :---
+ | `Protocol` | Select the syslog forwarding transmission protocol in correspondence to the [output pipeline](https://www.beyondtrust.com/docs/beyondinsight-password-safe/bi/integrations/third-party/snmp-trap-and-syslog.htm#:~:text=Select%20the%20Available%20Output%20Pipeline%3ATCP%2C%20TCP%2DSSL%2C%20or%20UDP) configured on the BeyondTrust BeyondInsight platform for the Cortex XSIAM connector - **UDP**, **TCP** or **Secure TCP (for TCP-SSL)**.
+ | `Port` | Enter the syslog service port that Cortex XSIAM Broker VM should listen on for receiving forwarded events from BeyondTrust Password Safe.
+ | `Vendor` | Enter **beyondtrust**.
+ | `Product` | Enter **passwordsafe**.
+
+
+### Sample XQL Queries
+After completing the configurations above, the forwarded event logs are searchable on Cortex XSIAM via an XQL Search on the *beyondtrust_passwordsafe_raw* dataset.
+
+The following XQL Queries demonstrate the parsing and XDM modeling for the BeyondTrust Password Safe events:
+
+1. **AppAudit Login Events**
+ ```javascript
+ config timeframe = 1H
+ | datamodel dataset = beyondtrust_passwordsafe_raw
+ | filter xdm.observer.type ~= "AppAudit" and xdm.event.type ~= "Login"
+ | fields xdm.observer.type, xdm.event.id, xdm.event.type, xdm.auth.auth_method, xdm.source.user.username, xdm.source.user.domain, xdm.source.user.groups, xdm.source.ipv4, xdm.event.description, xdm.event.outcome, xdm.event.outcome_reason
+ ```
+2. **PowerBroker Password Safe (PBPS) Events**
+ ```javascript
+ config timeframe = 1H
+ | datamodel dataset = beyondtrust_passwordsafe_raw
+ | filter xdm.observer.type = "PBPS"
+ | fields xdm.observer.type, xdm.observer.version, xdm.event.id, xdm.event.type, xdm.event.original_event_type, xdm.event.operation, xdm.event.description, xdm.event.outcome, xdm.event.outcome_reason, xdm.source.host.hostname, xdm.source.ipv4, xdm.source.user.username, xdm.source.user.domain, xdm.source.user.groups, xdm.target.resource.value
+ ```
+
+3. **All XDM Mapped Fields**
+ ```javascript
+ config timeframe = 1H
+ | datamodel dataset = beyondtrust_passwordsafe_raw
+ | fields xdm.auth.auth_method, xdm.email.recipients, xdm.event.description, xdm.event.id, xdm.event.operation, xdm.event.original_event_type, xdm.event.outcome, xdm.event.outcome_reason, xdm.event.type, xdm.intermediate.host.hostname, xdm.observer.name, xdm.observer.type, xdm.observer.version, xdm.session_context_id, xdm.source.agent.identifier, xdm.source.agent.version, xdm.source.host.hostname, xdm.source.ipv4, xdm.source.ipv6, xdm.source.host.ipv4_addresses, xdm.source.host.ipv6_addresses, xdm.source.location.region, xdm.source.user.domain, xdm.source.user.groups, xdm.source.user.identifier, xdm.source.user.ou, xdm.source.user.username, xdm.target.application.name, xdm.target.process.command_line, xdm.target.file.directory, xdm.target.file.filename, xdm.target.file.path, xdm.target.host.hostname, xdm.target.host.os, xdm.target.host.os_family, xdm.target.port, xdm.target.resource.id, xdm.target.resource.name, xdm.target.resource.type, xdm.target.resource.value, xdm.target.resource.parent_id, xdm.target.user.domain, xdm.target.user.identifier, xdm.target.user.groups, xdm.target.user.username
+ | view column order = populated
+ ```
+
+~XSIAM>
diff --git a/Packs/BeyondTrust_Password_Safe/ReleaseNotes/1_1_6.md b/Packs/BeyondTrust_Password_Safe/ReleaseNotes/1_1_6.md
new file mode 100644
index 000000000000..cef7f87c03b5
--- /dev/null
+++ b/Packs/BeyondTrust_Password_Safe/ReleaseNotes/1_1_6.md
@@ -0,0 +1,12 @@
+
+#### Modeling Rules
+
+##### New: BeyondTrust Password Safe Modeling Rule
+
+Added a Modeling Rule for normalizing BeyondTrust Password Safe event logs (Available from Cortex XSIAM 2.1).
+
+#### Parsing Rules
+
+##### New: BeyondTrust Password Safe Parsing Rule
+
+Added a Parsing Rule for extracting the Beyond Trust Password Safe event logs timestamp (Available from Cortex XSIAM 2.1).
diff --git a/Packs/BeyondTrust_Password_Safe/pack_metadata.json b/Packs/BeyondTrust_Password_Safe/pack_metadata.json
index 10dbadc034f3..8d7b1e09e136 100644
--- a/Packs/BeyondTrust_Password_Safe/pack_metadata.json
+++ b/Packs/BeyondTrust_Password_Safe/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "BeyondTrust Password Safe",
"description": "Unified password and session management for seamless accountability and control over privileged accounts.",
"support": "xsoar",
- "currentVersion": "1.1.5",
+ "currentVersion": "1.1.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -12,7 +12,12 @@
],
"tags": [],
"useCases": [],
- "keywords": [],
+ "keywords": [
+ "BeyondTrust",
+ "Password",
+ "Safe",
+ "Password Safe"
+ ],
"marketplaces": [
"xsoar",
"marketplacev2"
diff --git a/Packs/BigFix/Integrations/BigFix/BigFix.yml b/Packs/BigFix/Integrations/BigFix/BigFix.yml
index db8f0e25d94f..85c30cbe6c98 100644
--- a/Packs/BigFix/Integrations/BigFix/BigFix.yml
+++ b/Packs/BigFix/Integrations/BigFix/BigFix.yml
@@ -478,5 +478,5 @@ script:
script: '-'
subtype: python3
type: python
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.13.86272
fromversion: 5.0.0
diff --git a/Packs/BigFix/ReleaseNotes/1_0_15.md b/Packs/BigFix/ReleaseNotes/1_0_15.md
new file mode 100644
index 000000000000..2e4f58b4f67e
--- /dev/null
+++ b/Packs/BigFix/ReleaseNotes/1_0_15.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### BigFix
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/BigFix/pack_metadata.json b/Packs/BigFix/pack_metadata.json
index 82adee64fa3b..894d103827f6 100644
--- a/Packs/BigFix/pack_metadata.json
+++ b/Packs/BigFix/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "HCL BigFix",
"description": "HCL BigFix Patch provides an automated, simplified patching process that is administered from a single console.",
"support": "xsoar",
- "currentVersion": "1.0.14",
+ "currentVersion": "1.0.15",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -17,4 +17,4 @@
"xsoar",
"marketplacev2"
]
-}
+}
\ No newline at end of file
diff --git a/Packs/Binalyze/Integrations/BinalyzeAIR/BinalyzeAIR.yml b/Packs/Binalyze/Integrations/BinalyzeAIR/BinalyzeAIR.yml
index 8fdd7b93c27d..d3723297c473 100644
--- a/Packs/Binalyze/Integrations/BinalyzeAIR/BinalyzeAIR.yml
+++ b/Packs/Binalyze/Integrations/BinalyzeAIR/BinalyzeAIR.yml
@@ -97,7 +97,7 @@ script:
description: Organization Id of endpoint.
type: number
description: Acquire evidence from an endpoint.
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
subtype: python3
fromversion: 6.2.0
tests:
diff --git a/Packs/Binalyze/ReleaseNotes/1_1_6.md b/Packs/Binalyze/ReleaseNotes/1_1_6.md
new file mode 100644
index 000000000000..a6286a29b343
--- /dev/null
+++ b/Packs/Binalyze/ReleaseNotes/1_1_6.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Binalyze AIR
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/Binalyze/pack_metadata.json b/Packs/Binalyze/pack_metadata.json
index 1b7e39b396a5..6cadc513e1ed 100644
--- a/Packs/Binalyze/pack_metadata.json
+++ b/Packs/Binalyze/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Binalyze AIR",
"description": "Collect over 300 different types of evidence under 10 minutes.",
"support": "partner",
- "currentVersion": "1.1.5",
+ "currentVersion": "1.1.6",
"author": "Binalyze Integration Team",
"url": "https://kb.binalyze.com/air/integrations/cortex-xsoar-integration",
"email": "support@binalyze.com",
diff --git a/Packs/BluecatAddressManager/Integrations/BluecatAddressManager/BluecatAddressManager.yml b/Packs/BluecatAddressManager/Integrations/BluecatAddressManager/BluecatAddressManager.yml
index 4b8aa6b092e7..60ca23d554eb 100644
--- a/Packs/BluecatAddressManager/Integrations/BluecatAddressManager/BluecatAddressManager.yml
+++ b/Packs/BluecatAddressManager/Integrations/BluecatAddressManager/BluecatAddressManager.yml
@@ -162,7 +162,7 @@ script:
- contextPath: BlueCat.AddressManager.Range.Parents.CIDR
description: Classless Inter-Domain Routing.
type: String
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.13.86272
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/BluecatAddressManager/ReleaseNotes/1_1_13.md b/Packs/BluecatAddressManager/ReleaseNotes/1_1_13.md
new file mode 100644
index 000000000000..ff9e050e0f23
--- /dev/null
+++ b/Packs/BluecatAddressManager/ReleaseNotes/1_1_13.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Bluecat Address Manager
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/BluecatAddressManager/pack_metadata.json b/Packs/BluecatAddressManager/pack_metadata.json
index fc2e8db67b8d..41fbbce4aab6 100644
--- a/Packs/BluecatAddressManager/pack_metadata.json
+++ b/Packs/BluecatAddressManager/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Bluecat Address Manager",
"description": "Use the BlueCat Address Manager integration to enrich IP addresses and manage response policies.",
"support": "xsoar",
- "currentVersion": "1.1.12",
+ "currentVersion": "1.1.13",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Blueliv/Integrations/Blueliv/Blueliv.yml b/Packs/Blueliv/Integrations/Blueliv/Blueliv.yml
index b01d908285a7..c75086c7fd36 100644
--- a/Packs/Blueliv/Integrations/Blueliv/Blueliv.yml
+++ b/Packs/Blueliv/Integrations/Blueliv/Blueliv.yml
@@ -37,7 +37,7 @@ script:
name: blueliv-get-attackingips-feed
- description: 'Data related to the number of hacktivism tweets recently created. Blueliv provides two types of feeds: the first one contains the most popular hacktivism hashtags and the second one contains the countries where more number of hacktivism tweets are coming from.'
name: blueliv-get-hacktivism-feed
- dockerimage: demisto/blueliv:1.0.0.52588
+ dockerimage: demisto/blueliv:1.0.0.76921
runonce: false
script: ''
type: python
diff --git a/Packs/Blueliv/ReleaseNotes/1_0_3.md b/Packs/Blueliv/ReleaseNotes/1_0_3.md
new file mode 100644
index 000000000000..b79460975a3f
--- /dev/null
+++ b/Packs/Blueliv/ReleaseNotes/1_0_3.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Blueliv (Beta)
+
+- Updated the Docker image to: *demisto/blueliv:1.0.0.76921*.
diff --git a/Packs/Blueliv/pack_metadata.json b/Packs/Blueliv/pack_metadata.json
index eeda387fb835..69cf2e10ef17 100644
--- a/Packs/Blueliv/pack_metadata.json
+++ b/Packs/Blueliv/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Blueliv (Beta)",
"description": "Blueliv reduces risk through actionable, dynamic and targeted threat intelligence, trusted by your organization.",
"support": "xsoar",
- "currentVersion": "1.0.2",
+ "currentVersion": "1.0.3",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/BmcHelixRemedyForce/Integrations/BmcHelixRemedyForce/BmcHelixRemedyForce.yml b/Packs/BmcHelixRemedyForce/Integrations/BmcHelixRemedyForce/BmcHelixRemedyForce.yml
index 84b397f39bc4..9e62a04cc052 100644
--- a/Packs/BmcHelixRemedyForce/Integrations/BmcHelixRemedyForce/BmcHelixRemedyForce.yml
+++ b/Packs/BmcHelixRemedyForce/Integrations/BmcHelixRemedyForce/BmcHelixRemedyForce.yml
@@ -797,7 +797,7 @@ script:
- contextPath: BmcRemedyforce.ServiceRequest.Type
description: The type of the service request.
type: String
- dockerimage: demisto/python3:3.10.13.86272
+ dockerimage: demisto/python3:3.10.13.88772
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/BmcHelixRemedyForce/ReleaseNotes/1_0_41.md b/Packs/BmcHelixRemedyForce/ReleaseNotes/1_0_41.md
new file mode 100644
index 000000000000..a09e18db0900
--- /dev/null
+++ b/Packs/BmcHelixRemedyForce/ReleaseNotes/1_0_41.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### BMC Helix Remedyforce
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/BmcHelixRemedyForce/pack_metadata.json b/Packs/BmcHelixRemedyForce/pack_metadata.json
index 6b00c344594a..3d70b9a3216c 100644
--- a/Packs/BmcHelixRemedyForce/pack_metadata.json
+++ b/Packs/BmcHelixRemedyForce/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Bmc Helix Remedyforce",
"description": "Integration of BMC Helix Remedyforce with Cortex XSOAR. BMC Helix Remedyforce integration allows customers to create/update service requests and incidents. It also allows to update status, resolve service requests and incidents with customer notes. This integration exposes standard ticketing capabilities that can be utilized as part of automation & orchestration.",
"support": "xsoar",
- "currentVersion": "1.0.40",
+ "currentVersion": "1.0.41",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/BmcITSM/.secrets-ignore b/Packs/BmcITSM/.secrets-ignore
index 242bec54188d..e0820e1b4b4c 100644
--- a/Packs/BmcITSM/.secrets-ignore
+++ b/Packs/BmcITSM/.secrets-ignore
@@ -1,3 +1,3 @@
company1@xsoar.com
http://www.company1.com
-info@bmc.com
\ No newline at end of file
+info@bmc.com
diff --git a/Packs/BmcITSM/IncidentFields/BMC_Display_ID.json b/Packs/BmcITSM/IncidentFields/BMC_Display_ID.json
index 58b52c937640..d9d637fb6bd1 100644
--- a/Packs/BmcITSM/IncidentFields/BMC_Display_ID.json
+++ b/Packs/BmcITSM/IncidentFields/BMC_Display_ID.json
@@ -27,7 +27,8 @@
"BMC Incident",
"BMC Service Request",
"BMC Problem – Known Error",
- "BMC Task"
+ "BMC Task",
+ "BMC Work Order"
],
"associatedToAll": false,
"unmapped": false,
diff --git a/Packs/BmcITSM/IncidentFields/BMC_Request_ID.json b/Packs/BmcITSM/IncidentFields/BMC_Request_ID.json
index 8a6d8d2a5668..45a7bea79f0d 100644
--- a/Packs/BmcITSM/IncidentFields/BMC_Request_ID.json
+++ b/Packs/BmcITSM/IncidentFields/BMC_Request_ID.json
@@ -27,7 +27,8 @@
"BMC Problem Investigation incident",
"BMC Problem – Known Error",
"BMC Service Request",
- "BMC Task"
+ "BMC Task",
+ "BMC Work Order"
],
"associatedToAll": false,
"unmapped": false,
diff --git a/Packs/BmcITSM/IncidentFields/BMC_Status_Reason.json b/Packs/BmcITSM/IncidentFields/BMC_Status_Reason.json
index a224f67652a0..8983b9d41025 100644
--- a/Packs/BmcITSM/IncidentFields/BMC_Status_Reason.json
+++ b/Packs/BmcITSM/IncidentFields/BMC_Status_Reason.json
@@ -95,7 +95,18 @@
"Pending PIR",
"Funding Not Available",
"Pending Infrastructure Change",
- "Pending Third Party Vendor"
+ "Pending Third Party Vendor",
+ "Initial Status",
+ "Awaiting Request Assignee",
+ "Client Additional Information Requested",
+ "Third Party Vendor Action Required",
+ "Infrastructure Change",
+ "Work not started",
+ "Cancelled by Requester",
+ "Cancelled by Support",
+ "Customer Close",
+ "System Close",
+ "System Close with Issues"
],
"useAsKpi": false,
"locked": false,
@@ -110,7 +121,8 @@
"BMC Problem Investigation incident",
"BMC Problem – Known Error",
"BMC Service Request",
- "BMC Task"
+ "BMC Task",
+ "BMC Work Order"
],
"associatedToAll": false,
"unmapped": false,
diff --git a/Packs/BmcITSM/IncidentFields/BMC_Submitter.json b/Packs/BmcITSM/IncidentFields/BMC_Submitter.json
index d6ffa3bdee28..009361b8c73f 100644
--- a/Packs/BmcITSM/IncidentFields/BMC_Submitter.json
+++ b/Packs/BmcITSM/IncidentFields/BMC_Submitter.json
@@ -27,7 +27,8 @@
"BMC Problem Investigation incident",
"BMC Problem – Known Error",
"BMC Service Request",
- "BMC Task"
+ "BMC Task",
+ "BMC Work Order"
],
"associatedToAll": false,
"unmapped": false,
diff --git a/Packs/BmcITSM/IncidentFields/BMC_VIP_Flag.json b/Packs/BmcITSM/IncidentFields/BMC_VIP_Flag.json
index 28cd7c8fa2b8..a75e5952826d 100644
--- a/Packs/BmcITSM/IncidentFields/BMC_VIP_Flag.json
+++ b/Packs/BmcITSM/IncidentFields/BMC_VIP_Flag.json
@@ -23,7 +23,8 @@
"hidden": false,
"openEnded": false,
"associatedTypes": [
- "BMC Incident"
+ "BMC Incident",
+ "BMC Work Order"
],
"associatedToAll": false,
"unmapped": false,
diff --git a/Packs/BmcITSM/IncidentTypes/BMC_Work_Order.json b/Packs/BmcITSM/IncidentTypes/BMC_Work_Order.json
new file mode 100644
index 000000000000..b6379ae243e2
--- /dev/null
+++ b/Packs/BmcITSM/IncidentTypes/BMC_Work_Order.json
@@ -0,0 +1,29 @@
+{
+ "id": "BMC Work Order",
+ "version": -1,
+ "vcShouldIgnore": false,
+ "locked": false,
+ "name": "BMC Work Order",
+ "prevName": "BMC Work Order",
+ "color": "#229BDC",
+ "hours": 0,
+ "days": 0,
+ "weeks": 0,
+ "hoursR": 0,
+ "daysR": 0,
+ "weeksR": 0,
+ "system": false,
+ "readonly": false,
+ "default": false,
+ "autorun": false,
+ "disabled": false,
+ "reputationCalc": 0,
+ "onChangeRepAlg": 0,
+ "layout": "BMC ITSM Layout",
+ "detached": false,
+ "extractSettings": {
+ "mode": "Specific",
+ "fieldCliNameToExtractSettings": {}
+ },
+ "fromVersion": "6.2.0"
+}
\ No newline at end of file
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.py b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.py
index 9e844511c652..ecb3b8105e6f 100644
--- a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.py
+++ b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.py
@@ -13,6 +13,7 @@
TASK = "task"
PROBLEM_INVESTIGATION = "problem investigation"
KNOWN_ERROR = "known error"
+WORK_ORDER = "work order"
SERVICE_REQUEST_CONTEXT_MAPPER = {
"SysRequestID": "RequestID",
@@ -100,6 +101,24 @@
"View Access": "ViewAccess",
"Stastus_Reason": "StatusReason", # The product has typo in the response
}
+
+WORK_ORDER_CONTEXT_MAPPER = {
+ "Request ID": "RequestID",
+ "Work Order ID": "DisplayID",
+ "Submit Date": "CreateDate",
+ "Status": "Status",
+ "Description": "Summary",
+ "Last Modified Date": "LastModifiedDate",
+ "Detailed Description": "Details",
+ "VIP": "VIP",
+ "Reported Source": "ReportedSource",
+ "Status Reason": "StatusReason",
+ "ASCHG": "Assignee",
+ "ASGRP": "Assigned Group",
+ "ASCPY": "Assigned Support Company",
+ "Support Organization": "Assigned Support Organization",
+}
+
COMMON_PROPERTIES = [
"Submitter",
"Urgency",
@@ -136,6 +155,7 @@
TASK: "TMS:Task",
PROBLEM_INVESTIGATION: "PBM:ProblemInterface",
KNOWN_ERROR: "PBM:KnownErrorInterface",
+ WORK_ORDER: "WOI:WorkOrderInterface",
}
TICKET_TYPE_TO_DELETE_FORM = {
@@ -144,6 +164,7 @@
TASK: "TMS:Task",
PROBLEM_INVESTIGATION: "PBM:Problem Investigation",
KNOWN_ERROR: "PBM:Known Error",
+ WORK_ORDER: "WOI:WorkOrderInterface",
}
TICKET_TYPE_TO_STATUS_FIELD = {
@@ -153,6 +174,7 @@
PROBLEM_INVESTIGATION: "Investigation Status",
KNOWN_ERROR: "Known Error Status",
TASK: "Status",
+ WORK_ORDER: "Status",
}
TICKET_TYPE_TO_CONTEXT_MAPPER = {
@@ -162,6 +184,7 @@
TASK: TASK_CONTEXT_MAPPER,
PROBLEM_INVESTIGATION: PROBLEM_INVESTIGATION_CONTEXT_MAPPER,
KNOWN_ERROR: KNOWN_ERROR_CONTEXT_MAPPER,
+ WORK_ORDER: WORK_ORDER_CONTEXT_MAPPER,
}
TICKET_TYPE_TO_STATUS_KEY = {
@@ -171,6 +194,7 @@
TASK: "Status",
PROBLEM_INVESTIGATION: "Investigation Status",
KNOWN_ERROR: "Known Error Status",
+ WORK_ORDER: "Status",
}
TICKET_TYPE_TO_SUMMARY_KEY = {
@@ -180,6 +204,7 @@
TASK: "Summary",
PROBLEM_INVESTIGATION: "Description",
KNOWN_ERROR: "Description",
+ WORK_ORDER: "Description",
}
TICKET_TYPE_TO_REQUEST_ID_KEY = {
@@ -189,6 +214,7 @@
TASK: "Task ID",
PROBLEM_INVESTIGATION: "Request ID",
KNOWN_ERROR: "Request ID",
+ WORK_ORDER: "Work Order ID",
}
TICKET_TYPE_TO_CREATE_QUERY = {
@@ -198,6 +224,7 @@
TASK: "values(Task ID,Create Date)",
PROBLEM_INVESTIGATION: "values(Request ID,Problem Investigation ID,Create Date)",
KNOWN_ERROR: "values(Request ID,Known Error ID,Create Date)",
+ WORK_ORDER: "values(Request ID,WorkOrder_ID,Create Date)",
}
FIELD_DELIMITER = ";"
@@ -209,6 +236,7 @@
"TAS": TASK,
"PBI": PROBLEM_INVESTIGATION,
"PKE": KNOWN_ERROR,
+ "WO0": WORK_ORDER,
}
CREATE_CONTEXT_MAPPER = {
@@ -223,12 +251,14 @@
"Submit Date": "CreateDate",
"Create Date": "CreateDate",
"Task ID": "DisplayID",
+ "WorkOrder_ID": "DisplayID",
}
TICKET_TYPE_TO_DISPLAY_ID = {
INCIDENT: "Incident Number",
PROBLEM_INVESTIGATION: "Problem Investigation ID",
KNOWN_ERROR: "Known Error ID",
+ WORK_ORDER: "Work Order ID",
}
ID_QUERY_MAPPER_KEY = "IDS"
EQUAL_QUERY_MAPPER_KEY = "EQUAL"
@@ -246,6 +276,7 @@
TASK,
PROBLEM_INVESTIGATION,
KNOWN_ERROR,
+ WORK_ORDER,
]
TICKET_INCIDENT_TYPES = [
"BMC Change-Request",
@@ -254,6 +285,7 @@
"BMC Problem Investigation incident",
"BMC Service Request",
"BMC Task",
+ "BMC Work Order",
]
TICKET_TYPE_TO_INCIDENT_TYPE = {
@@ -263,6 +295,7 @@
PROBLEM_INVESTIGATION: "BMC Problem Investigation incident",
KNOWN_ERROR: "BMC Problem – Known Error",
TASK: "BMC Task",
+ WORK_ORDER: "BMC Work Order",
}
MIRRORING_COMMON_FIELDS = [
@@ -282,6 +315,7 @@
TASK: ["Priority"],
PROBLEM_INVESTIGATION: ["Priority"],
KNOWN_ERROR: ["Priority"],
+ WORK_ORDER: ["Priority"],
}
MIRROR_DIRECTION_MAPPING = {
@@ -1280,6 +1314,143 @@ def update_known_error_request(
return response
+ def create_work_order_request(
+ self,
+ template_guid: str,
+ first_name: str,
+ last_name: str,
+ customer_person_id: str,
+ customer_first_name: str,
+ customer_last_name: str,
+ customer_company: str,
+ summary: str,
+ detailed_description: str,
+ status: str,
+ priority: str,
+ work_order_type: str,
+ location_company: str,
+ scedulded_start_date: str,
+ scedulded_end_date: str,
+ **additional_fields,
+ ) -> Dict[str, Any]:
+ """
+ Create work order request.
+
+ Args:
+ template_guid (str): Work order template GUID.
+ first_name (str): Requester first name.
+ last_name (str): Requester last name.
+ customer_person_id (str): Customer person id (in case first/last pair in ambiguous),
+ customer_first_name (str): Customer first name
+ customer_last_name (str): Customer last name
+ customer_company (str): Customer company
+ summary (str): Work order summary.
+ detailed_description (str): Work order detailed descirption.
+ status (str): Ticket status.
+ priority (str): Ticket priority.
+ work_order_type (str): Work order type.
+ location_company (str): Company assoiciated with work order process.
+ scedulded_start_date (str): Schedulded start date.
+ scedulded_end_date (str): Schedulded end date.
+
+ Returns:
+ Dict[str, Any]: API respnse from BmcITSM.
+ """
+
+ properties = remove_empty_elements({
+ "TemplateID": template_guid,
+ "First Name": first_name,
+ "Last Name": last_name,
+ "Customer Person ID": customer_person_id,
+ "Customer First Name": customer_first_name,
+ "Customer Last Name": customer_last_name,
+ "Customer Company": customer_company,
+ "Summary": summary,
+ "Detailed Description": detailed_description,
+ "Status": status,
+ "Priority": priority,
+ "Work Order Type": work_order_type,
+ "Location Company": location_company,
+ "Scheduled Start Date": scedulded_start_date,
+ "Scheduled End Date": scedulded_end_date,
+ "z1D_Action": "CREATE",
+ **additional_fields,
+ })
+ data = {"values": properties}
+ params = {"fields": TICKET_TYPE_TO_CREATE_QUERY[WORK_ORDER]}
+ response = self._http_request("POST",
+ "arsys/v1/entry/WOI:WorkOrderInterface_Create",
+ json_data=data,
+ params=params)
+ return response
+
+ def update_work_order_request(
+ self,
+ request_id: str,
+ summary: str,
+ detailed_description: str,
+ status: str,
+ status_reason: str,
+ priority: str,
+ work_order_type: str,
+ company: str,
+ assignee: str,
+ support_organization: str,
+ support_group_name: str,
+ location_company: str,
+ scedulded_start_date: str,
+ schedulded_end_date: str,
+ **additional_fields,
+ ):
+ """
+ Work order update request.
+
+ Args:
+ request_id (str): Work order request ID.
+ summary (str): Work order summary.
+ detailed_description (str): Work order details.
+ status (str): Work order status.
+ status_reason (str): The reason for changing the status.
+ priority (str): Work order priority.
+ work_order_type (str): Work order type.
+ company (str): Work order company.
+ assignee (str): Assignee.
+ support_organization (str): Support organization.
+ support_group_name (str): Support group name.
+ location_company (str): Company assoiciated with ticet process.
+ scedulded_start_date (str): Schedulded start date.
+ scedulded_end_date (str): Schedulded end date.
+ Returns:
+ str: API respnse from BmcITSM.
+ """
+
+ properties = remove_empty_elements({
+ "Summary": summary,
+ "Detailed Description": detailed_description,
+ "Location Company": location_company,
+ "Status": status,
+ "Status Reason": status_reason,
+ "Work Order Type": work_order_type,
+ "Priority": priority,
+ "Support Organization": support_organization,
+ "Support Group Name": support_group_name,
+ "Company": company,
+ "Request Assignee": assignee,
+ "Assigned To": assignee,
+ "Scheduled Start Date": scedulded_start_date,
+ "Scheduled End Date": schedulded_end_date,
+ **additional_fields,
+ })
+ data = {"values": properties}
+ response = self._http_request(
+ "PUT",
+ f"arsys/v1/entry/WOI:WorkOrder/{request_id}",
+ json_data=data,
+ resp_type="text",
+ )
+
+ return response
+
def list_command(
client: Client,
@@ -2513,6 +2684,186 @@ def known_error_update_command(client: Client, args: Dict[str, Any]) -> CommandR
return command_results
+def support_group_list_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+ """List BmcITSM support groups.
+
+ Args:
+ client (Client): BmcITSM API client.
+ args (Dict[str, Any]): command arguments.
+
+ Returns:
+ CommandResults: Command results with raw response, outputs and readable outputs.
+ """
+ context_output_mapper = {
+ "Support Group ID": "SupportGroupID",
+ "Company": "Company",
+ "Support Organization": "SupportOrganization",
+ "Support Group Name": "SupportGroupName"
+ }
+
+ command_results = list_command(
+ client,
+ args,
+ "CTM:Support Group",
+ context_output_mapper,
+ header_prefix="List support groups.",
+ outputs_prefix="BmcITSM.SupportGroup",
+ outputs_key_field="SupportGroupID",
+ record_id_key="SupportGroupID",
+ )
+ return command_results
+
+
+def work_order_template_list_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+ """List BmcITSM work order templates.
+
+ Args:
+ client (Client): BmcITSM API client.
+ args (Dict[str, Any]): command arguments.
+
+ Returns:
+ CommandResults: Command results with raw response, outputs and readable outputs.
+ """
+ context_output_mapper = {
+ "Request ID": "Id",
+ "Template Name": "Name",
+ "GUID": "GUID",
+ }
+
+ args["ids"] = argToList(args.get("template_ids"))
+ command_results = list_command(
+ client,
+ args,
+ "WOI:Template",
+ context_output_mapper,
+ header_prefix="List work order templates.",
+ outputs_prefix="BmcITSM.WorkOrderTemplate",
+ outputs_key_field="Id",
+ record_id_key="GUID",
+ )
+ return command_results
+
+
+def work_order_create_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+ """
+ Create BmcITSM work order.
+
+ Args:
+ client (Client): BmcITSM API client.
+ args (Dict[str, Any]): command arguments.
+
+ Returns:
+ CommandResults: Command results with raw response, outputs and readable outputs.
+ """
+ template_guid = args.get("template_guid")
+ first_name = args.get("first_name")
+ last_name = args.get("last_name")
+ customer_person_id = args.get("customer_person_id")
+ customer_first_name = args.get("customer_first_name")
+ customer_last_name = args.get("customer_last_name")
+ customer_company = args.get("customer_company")
+ summary = args.get("summary")
+ detailed_description = args.get("detailed_description")
+ status = args.get("status")
+ priority = args.get("priority")
+ work_order_type = args.get("work_order_type")
+ location_company = args.get("location_company")
+ scedulded_start_date: datetime = arg_to_datetime(args.get("scedulded_start_date"))
+ scedulded_end_date: datetime = arg_to_datetime(args.get("scedulded_end_date"))
+
+ additional_fields = extract_args_from_additional_fields_arg(args.get("additional_fields"),
+ "additional_fields")
+ response = client.create_work_order_request(
+ template_guid,
+ first_name,
+ last_name,
+ customer_person_id,
+ customer_first_name,
+ customer_last_name,
+ customer_company,
+ summary,
+ detailed_description,
+ status,
+ priority,
+ work_order_type,
+ location_company,
+ scedulded_start_date=scedulded_start_date.isoformat() if scedulded_start_date else None,
+ scedulded_end_date=scedulded_end_date.isoformat() if scedulded_end_date else None,
+ **additional_fields,
+ )
+
+ outputs = format_create_ticket_outputs(response.get("values"))
+ # Fixing API returning RequestID in form 000...NNN instead of WO0...NNN
+ outputs["RequestID"] = "WO0" + outputs["RequestID"][3:]
+ readable_output = tableToMarkdown("Work order ticket successfully created.",
+ outputs,
+ headerTransform=pascalToSpace)
+ command_results = CommandResults(
+ outputs_prefix="BmcITSM.WorkOrder",
+ outputs_key_field="RequestID",
+ outputs=outputs,
+ raw_response=response,
+ readable_output=readable_output,
+ )
+
+ return command_results
+
+
+def work_order_update_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+ """
+ Update BmcITSM work order.
+
+ Args:
+ client (Client): BmcITSM API client.
+ args (Dict[str, Any]): command arguments.
+
+ Returns:
+ CommandResults: Command results with raw response, outputs and readable outputs.
+ """
+
+ request_id = args.get("request_id")
+ summary = args.get("summary")
+ detailed_description = args.get("detailed_description")
+ status = args.get("status")
+ status_reason = args.get("status_reason")
+ priority = args.get("priority")
+ work_order_type = args.get("work_order_type")
+ company = args.get("company")
+ assignee = args.get("assignee")
+ support_organization = args.get("support_organization")
+ support_group = args.get("support_group")
+ location_company = args.get("location_company")
+ scedulded_start_date: datetime = arg_to_datetime(args.get("scedulded_start_date"))
+ schedulded_end_date: datetime = arg_to_datetime(args.get("schedulded_end_date"))
+
+ additional_fields = extract_args_from_additional_fields_arg(args.get("additional_fields"),
+ "additional_fields")
+
+ validate_related_arguments_provided(support_organization=support_organization, support_group=support_group)
+
+ client.update_work_order_request(
+ request_id,
+ summary=summary,
+ detailed_description=detailed_description,
+ status=status,
+ status_reason=status_reason,
+ priority=priority,
+ work_order_type=work_order_type,
+ company=company,
+ assignee=assignee,
+ support_organization=support_organization,
+ support_group_name=support_group,
+ location_company=location_company,
+ scedulded_start_date=scedulded_start_date.isoformat() if scedulded_start_date else None,
+ schedulded_end_date=schedulded_end_date.isoformat if schedulded_end_date else None,
+ **additional_fields,
+ )
+
+ command_results = CommandResults(readable_output=f"Work Order: {request_id} was successfully updated.")
+
+ return command_results
+
+
def format_command_output(records: List[dict],
mapper: Dict[str, Any],
context_data_arranger: Callable = None) -> Dict[str, Any]:
@@ -2833,6 +3184,9 @@ def generate_query_filter_mapper_by_args(args: Dict[str, Any], record_id_key: Op
"Organization": args.get("organization"),
"Company Type": args.get("company_type"),
"TaskName": args.get("task_name"),
+ "Template Name": args.get("template_name"),
+ "Support Organization": args.get("support_organization"),
+ "Support Group Name": args.get("support_group"),
}
return {
ID_QUERY_MAPPER_KEY: ids_filter_mapper,
@@ -3579,6 +3933,10 @@ def main() -> None:
"bmc-itsm-problem-investigation-update": problem_investigation_update_command,
"bmc-itsm-known-error-create": known_error_create_command,
"bmc-itsm-known-error-update": known_error_update_command,
+ "bmc-itsm-support-group-list": support_group_list_command,
+ "bmc-itsm-work-order-template-list": work_order_template_list_command,
+ "bmc-itsm-work-order-create": work_order_create_command,
+ "bmc-itsm-work-order-update": work_order_update_command,
}
if command == "test-module":
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.yml b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.yml
index 960566803b57..d378dd20956b 100644
--- a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.yml
+++ b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.yml
@@ -4,7 +4,7 @@ commonfields:
name: BmcITSM
display: BMC Helix ITSM
category: Utilities
-description: BMC Helix ITSM integration enables customers to manage service request, incident, change request, task, problem investigation and known error tickets.
+description: BMC Helix ITSM integration enables customers to manage service request, incident, change request, task, problem investigation, known error and work order tickets.
configuration:
- name: url
display: Server URL
@@ -43,6 +43,7 @@ configuration:
- change request
- problem investigation
- known error
+ - work order
- All
required: false
- name: ticket_status
@@ -168,7 +169,7 @@ script:
script: ""
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.13.86272
+ dockerimage: demisto/python3:3.10.13.87159
commands:
- name: bmc-itsm-user-list
description: Retrieves a list of user profiles from BMC Helix ITSM. The records are retrieved by the query argument or by the filtering arguments. When using filtering arguments, each one defines a 'LIKE' operation and an 'AND' operator is used between them. To see the entire JSON then you can use the raw_response=true at the end of the command.
@@ -187,53 +188,33 @@ script:
description: A comma-separated list of user IDs. Used as a filtering argument.
isArray: true
defaultValue: ""
- predefined:
- - ""
- name: query
description: 'The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html).'
defaultValue: ""
- predefined:
- - ""
- name: limit
description: The maximum number of records to retrieve.
defaultValue: "50"
- predefined:
- - ""
- name: page_size
description: The maximum number of records to retrieve per page.
defaultValue: ""
- predefined:
- - ""
- name: page
description: The page number of the results to retrieve.
defaultValue: ""
- predefined:
- - ""
- name: first_name
description: The user first name. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: last_name
description: The user first name. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: company
description: The user company name. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: department
description: The user department name. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: organization
description: The user organization name. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: bmc-itsm-company-list
description: Retrieves a list of companies from BMC Helix ITSM. The records are retrieved by the query argument or by the filtering arguments. When using filtering arguments, each one defines a 'LIKE' operation and an 'AND' operator is used between them. To see the entire JSON then you can use the raw_response=true at the end of the command.
outputs:
@@ -251,38 +232,24 @@ script:
description: A comma-separated list of company ID. Filtering argument.
isArray: true
defaultValue: ""
- predefined:
- - ""
- name: query
description: 'The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html).'
defaultValue: ""
- predefined:
- - ""
- name: limit
description: The maximum number of records to retrieve.
defaultValue: "50"
- predefined:
- - ""
- name: page_size
description: The maximum number of records to retrieve per page.
defaultValue: ""
- predefined:
- - ""
- name: page
description: The page number of the results to retrieve.
defaultValue: ""
- predefined:
- - ""
- name: company
description: The user company name. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: company_type
description: The user company type. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: bmc-itsm-service-request-definition-list
description: Retrieves a list of service request definitions. The records are retrieved by the query argument or by the filtering arguments. When using filtering arguments, each one defines a 'LIKE' operation and an 'AND' operator is used between them. To see the entire JSON then you can use the raw_response=true at the end of the command.
outputs:
@@ -300,33 +267,21 @@ script:
description: A comma-separated list of service request definition IDs. Used as a filtering argument.
isArray: true
defaultValue: ""
- predefined:
- - ""
- name: query
description: 'The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html).'
defaultValue: ""
- predefined:
- - ""
- name: limit
description: The maximum number of records to retrieve.
defaultValue: "50"
- predefined:
- - ""
- name: page_size
description: The maximum number of records to retrieve per page.
defaultValue: ""
- predefined:
- - ""
- name: page
description: The page number of the results to retrieve.
defaultValue: ""
- predefined:
- - ""
- name: description
description: The service request ticket definition description. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: bmc-itsm-ticket-list
description: Retrieves a list of BMC Helix ITSM tickets. The records are retrieved by the query argument or by the filtering arguments. When using filtering arguments, each one defines a 'LIKE' operation and an 'AND' operator is used between them. To see the entire JSON then you can use the raw_response=true at the end of the command.
arguments:
@@ -341,32 +296,23 @@ script:
- change request
- problem investigation
- known error
+ - work order
- name: query
description: 'The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html).'
defaultValue: ""
- predefined:
- - ""
- name: limit
description: The maximum number of records to retrieve.
defaultValue: "50"
- predefined:
- - ""
- name: page_size
description: The maximum number of records to retrieve per page.
defaultValue: ""
- predefined:
- - ""
- name: page
description: The page number of the results to retrieve.
defaultValue: ""
- predefined:
- - ""
- name: ticket_ids
- description: A comma-separated list of ticket request IDs. Used as a filtering argument.
+ description: A comma-separated list of ticket request IDs. Used as a filtering argument. Use Display ID for work order type.
isArray: true
defaultValue: ""
- predefined:
- - ""
- name: status
description: The status of the tickets to fetch. Since each ticket type has its own unique set of statuses, select only statuses that match the selected ticket type(s).
defaultValue: ""
@@ -456,9 +402,6 @@ script:
- name: summary
description: The ticket summary. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
-
outputs:
- contextPath: BmcITSM.Ticket.RequestID
description: The ticket ID.
@@ -524,28 +467,18 @@ script:
description: "The instance ID of the service request ticket. It can be retrieved by executing bmc-itsm-service-request-definition-list command."
required: true
defaultValue: ""
- predefined:
- - ""
- name: first_name
description: The requester first name. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together.
defaultValue: ""
- predefined:
- - ""
- name: last_name
description: "The requester last name. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together."
defaultValue: ""
- predefined:
- - ""
- name: login_id
description: The requester login ID. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together.
defaultValue: ""
- predefined:
- - ""
- name: summary
description: The service request ticket summary.
defaultValue: ""
- predefined:
- - ""
- name: status
description: "The service request ticket status."
defaultValue: ""
@@ -584,8 +517,6 @@ script:
- name: additional_fields
description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field.'
defaultValue: ""
- predefined:
- - ""
outputs:
- contextPath: BmcITSM.ServiceRequest.RequestID
description: The service request ticket unique request ID.
@@ -603,18 +534,12 @@ script:
description: The unique identifier of the service request ticket to update.
required: true
defaultValue: ""
- predefined:
- - ""
- name: customer_first_name
description: "The customer first name. By default it is determined by the logged in user."
defaultValue: ""
- predefined:
- - ""
- name: customer_last_name
description: "The customer last name. By default it is determined by the logged in user."
defaultValue: ""
- predefined:
- - ""
- name: status
description: "The service request ticket status."
defaultValue: ""
@@ -670,33 +595,21 @@ script:
- name: location_company
description: The company associated with the service request process.
defaultValue: ""
- predefined:
- - ""
- name: region
description: The region associated with the company location.
defaultValue: ""
- predefined:
- - ""
- name: site_group
description: The site group associated with the region.
defaultValue: ""
- predefined:
- - ""
- name: site
description: The site associated with the site group.
defaultValue: ""
- predefined:
- - ""
- name: assignee
description: The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: additional_fields
description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field.'
defaultValue: ""
- predefined:
- - ""
outputs: []
- name: bmc-itsm-incident-update
description: "Update incident ticket."
@@ -705,23 +618,15 @@ script:
description: The ID of the incident ticket to update.
required: true
defaultValue: ""
- predefined:
- - ""
- name: first_name
description: The customer first name the incident ticket is for.
defaultValue: ""
- predefined:
- - ""
- name: last_name
description: The customer last name the incident ticket is for.
defaultValue: ""
- predefined:
- - ""
- name: summary
description: The incident ticket summary.
defaultValue: ""
- predefined:
- - ""
- name: service_type
description: "The type of the incident ticket."
defaultValue: ""
@@ -786,53 +691,33 @@ script:
- name: detailed_description
description: The incident ticket summary.
defaultValue: ""
- predefined:
- - ""
- name: company
description: The company associated with the requester. By default it is determined by the logged in user.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_company
description: The company for the assignee’s support organization. It makes up the first tier of the assignee’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_organization
description: The organization for the assignee’s support organization. It makes up the second tier of the assignee’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: assigned_group
description: The group for the assignee’s support organization. It makes up the third tier of the assignee’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: assignee
description: The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: assignee_login_id
description: The login ID of the assignee. The assignee and assignee_login_id arguments must be provided together.
defaultValue: ""
- predefined:
- - ""
- name: region
description: The region, which makes up the second tier of the customer’s business organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: site_group
description: The site group associated with the region.
defaultValue: ""
- predefined:
- - ""
- name: site
description: The site associated with the site group.
defaultValue: ""
- predefined:
- - ""
- name: status_reason
description: The reason for updating the ticket status. Required when status is provided.
auto: PREDEFINED
@@ -871,8 +756,6 @@ script:
required: true
isArray: true
defaultValue: ""
- predefined:
- - ""
- name: ticket_type
description: The type of the tickets to delete.
required: true
@@ -883,6 +766,7 @@ script:
- change request
- problem investigation
- known error
+ - work order
outputs: []
- name: bmc-itsm-incident-create
description: "Creates a new incident ticket. An incident is any event that is not part of the standard operation of a service and that causes an interruption to or a reduction in the quality of that service."
@@ -891,24 +775,16 @@ script:
description: The customer first name the incident ticket is for.
required: true
defaultValue: ""
- predefined:
- - ""
- name: last_name
description: The customer last name the incident ticket is for.
required: true
defaultValue: ""
- predefined:
- - ""
- name: template_instance_id
description: The instance ID of the template to use. Required only when the ticket attributes should be based on the template's fields. The instance ID can be retrieved by executing the bmc-itsm-incident-template-list command.
defaultValue: ""
- predefined:
- - ""
- name: summary
description: The incident ticket summary. Required when the template_instance_id argument is not provided.
defaultValue: ""
- predefined:
- - ""
- name: service_type
description: The type of the incident ticket. Required when the template_instance_id argument is not provided.
defaultValue: ""
@@ -976,53 +852,33 @@ script:
- name: details
description: The incident ticket detailed description.
defaultValue: ""
- predefined:
- - ""
- name: company
description: The company associated with the requester. By default it is determined by the logged in user.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_company
description: The company for the assignee’s support organization. It makes up the first tier of the assignee’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_organization
description: The organization for the assignee’s support organization. It makes up the second tier of the assignee’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: assigned_group
description: The group for the assignee’s support organization. It makes up the third tier of the assignee’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: assignee
description: The full name of the employee the ticket will be assigned to. The assignee and assignee_login_id arguments must be provided together. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: assignee_login_id
description: The login ID of the assignee. The assignee and assignee_login_id arguments must be provided together. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: region
description: The region associated with the company.
defaultValue: ""
- predefined:
- - ""
- name: site_group
description: The site group associated with the region.
defaultValue: ""
- predefined:
- - ""
- name: site
description: The site associated with the site group.
defaultValue: ""
- predefined:
- - ""
outputs:
- contextPath: BmcITSM.Incident.RequestID
description: The incident ticket request ID.
@@ -1040,34 +896,22 @@ script:
description: The requester first name.
required: true
defaultValue: ""
- predefined:
- - ""
- name: last_name
description: The requester last name.
required: true
defaultValue: ""
- predefined:
- - ""
- name: customer_first_name
description: "The customer first name."
defaultValue: ""
- predefined:
- - ""
- name: customer_last_name
description: The customer last name.
defaultValue: ""
- predefined:
- - ""
- name: summary
description: The change request ticket title. Required when the template ID argument is not provided.
defaultValue: ""
- predefined:
- - ""
- name: template_id
description: "The instance ID of the template to use. Required only when the ticket attributes should be based on the template's fields. The ID can be retrieved by executing the bmc-itsm-change-request-template-list command."
defaultValue: ""
- predefined:
- - ""
- name: change_type
description: The change request ticket type. Required when the ticket creation is without a template.
defaultValue: ""
@@ -1140,13 +984,9 @@ script:
- name: location_company
description: The company associated with the change request process. Required when template ID argument is not provided.
defaultValue:
- predefined:
- - ""
- name: additional_fields
description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field.'
defaultValue: ""
- predefined:
- - ""
outputs:
- contextPath: BmcITSM.ChangeRequest.RequestID
description: The change request ticket unique request ID.
@@ -1166,18 +1006,12 @@ script:
- name: first_name
description: "The customer first name the change request ticket is for."
defaultValue: ""
- predefined:
- - ""
- name: last_name
description: "The customer last name the change request ticket is for."
defaultValue: ""
- predefined:
- - ""
- name: summary
description: The change request ticket summary.
defaultValue: ""
- predefined:
- - ""
- name: change_type
description: "The change request ticket type."
defaultValue: ""
@@ -1250,53 +1084,33 @@ script:
- name: additional_fields
description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field.'
defaultValue: ""
- predefined:
- - ""
- name: company
description: The company associated with the requester. By default it is determined by the logged in user.
defaultValue: ""
- predefined:
- - ""
- name: organization
description: The organization associated with the requester.
defaultValue: ""
- predefined:
- - ""
- name: department
description: The department associated with the requester.
defaultValue: ""
- predefined:
- - ""
- name: location_company
description: The company associated with the change request process.
defaultValue:
- predefined:
- - ""
- name: region
description: The region associated with the company location.
defaultValue: ""
- predefined:
- - ""
- name: site_group
description: The site group associated with the region.
defaultValue: ""
- predefined:
- - ""
- name: site
description: The site associated with the site group.
defaultValue: ""
- predefined:
- - ""
- name: support_organization
description: The second tier of the change manager’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: support_group_name
description: The third tier of the change manager’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: status_reason
description: "The reason for updating the ticket status. Required when status is provided."
defaultValue: ""
@@ -1336,29 +1150,21 @@ script:
- name: details
description: The change request ticket details.
defaultValue: ""
- predefined:
- - ""
outputs: []
- name: bmc-itsm-task-create
- description: "Creates a new task ticket. By splitting cases into individual tasks (assignments), you can focus on one assignment at a time to resolve cases more efficiently. Task ticket type can be attached only to the following ticket types: change request, incident, problem investigation, and known error."
+ description: "Creates a new task ticket. By splitting cases into individual tasks (assignments), you can focus on one assignment at a time to resolve cases more efficiently. Task ticket type can be attached only to the following ticket types: change request, incident, problem investigation, known error and work order."
arguments:
- name: template_id
description: The instance ID of the template to use. The ID can be retrieved by executing the bmc-itsm-task-template-list command.
defaultValue: ""
- predefined:
- - ""
- name: summary
description: The task ticket summary.
required: true
defaultValue: ""
- predefined:
- - ""
- name: details
description: The task ticket detailed description.
required: true
defaultValue: ""
- predefined:
- - ""
- name: root_ticket_type
description: The parent ticket type.
required: true
@@ -1369,17 +1175,14 @@ script:
- incident
- problem investigation
- known error
+ - work order
- name: root_request_id
- description: "The request ID of the parent ticket. Can be found in the context output of the bmc-itsm-ticket-list command."
+ description: "The request ID of the parent ticket. Can be found in the context output of the bmc-itsm-ticket-list command. Use Display ID for work orders."
required: true
defaultValue: ""
- predefined:
- - ""
- name: root_request_name
description: "The display name of the parent ticket in the task ticket. If not provided, the parent ticket displayID is displayed."
defaultValue: ""
- predefined:
- - ""
- name: root_request_mode
description: "The parent ticket request mode."
defaultValue: Real
@@ -1426,23 +1229,15 @@ script:
description: The company associated with the task process.
required: true
defaultValue: ""
- predefined:
- - ""
- name: support_company
description: The technical support team associated with the company.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_organization
description: The organization for the task's support organization. It makes up the second tier of the task’s support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together.
defaultValue:
- predefined:
- - ""
- name: assigned_support_group
description: The group for the task's support organization. It makes up the third tier of the task's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: impact
description: The task ticket impact.
defaultValue: ""
@@ -1464,18 +1259,12 @@ script:
- name: assignee
description: The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: scedulded_start_date
description: The task ticket scheduled future start date. For example, in 12 hours, in 7 days.
defaultValue: ""
- predefined:
- - ""
- name: scedulded_end_date
description: The task ticket scheduled future end date. For example, in 12 hours, in 7 days.
defaultValue: ""
- predefined:
- - ""
outputs:
- contextPath: BmcITSM.Task.RequestID
description: The task ticket unique Request ID.
@@ -1493,17 +1282,11 @@ script:
description: The ID of the task ticket to update.
required: true
defaultValue: ""
- predefined:
- - ""
- name: summary
description: The task ticket summary.
- predefined:
- - ""
- name: details
description: The task ticket detailed description.
defaultValue: ""
- predefined:
- - ""
- name: priority
description: The task ticket priority.
defaultValue: ""
@@ -1544,33 +1327,21 @@ script:
- name: company
description: The company associated with the requester. By default it is determined by the logged in user.
defaultValue: ""
- predefined:
- - ""
- name: location_company
description: The company associated with the task process.
defaultValue: ""
- predefined:
- - ""
- name: support_company
description: The technical support team associated with the company.
defaultValue: ""
- predefined:
- - ""
- name: assignee
description: The full name of the employee the ticket is assigned to. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_organization
description: The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together.
defaultValue:
- predefined:
- - ""
- name: assigned_group
description: The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: task_type
description: "The task ticket type."
defaultValue: ""
@@ -1586,13 +1357,9 @@ script:
- name: scedulded_start_date
description: The task ticket scheduled future start date. For example, in 12 hours, in 7 days.
defaultValue: ""
- predefined:
- - ""
- name: scedulded_end_date
description: The task ticket scheduled future end date. For example, in 12 hours, in 7 days.
defaultValue: ""
- predefined:
- - ""
outputs: []
- name: bmc-itsm-problem-investigation-create
description: "Creates a problem investigation ticket."
@@ -1601,14 +1368,10 @@ script:
description: The customer first name the ticket request is for.
required: true
defaultValue: ""
- predefined:
- - ""
- name: last_name
description: The customer last name the ticket request is for.
required: true
defaultValue: ""
- predefined:
- - ""
- name: status
description: The problem investigation ticket status.
required: true
@@ -1639,13 +1402,9 @@ script:
description: The problem investigation ticket summary.
required: true
defaultValue: ""
- predefined:
- - ""
- name: details
description: The detailed description on the problem investigation ticket.
defaultValue: ""
- predefined:
- - ""
- name: impact
description: The problem investigation ticket impact.
required: true
@@ -1669,88 +1428,54 @@ script:
- name: target_resolution_date
description: The future resolution date. For example, in 12 hours, in 7 days.
defaultValue: ""
- predefined:
- - ""
- name: company
description: The company associated with the requester. By default it is determined by the logged in user.
defaultValue: ""
- predefined:
- - ""
- name: region
description: The region of the problem investigation location. The arguments region, site_group, and site should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: site_group
description: The site group of the problem investigation location. The arguments region, site_group, and site should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: site
description: The site of the problem investigation location. The arguments region, site_group, and site should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: assignee
description: The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: assignee_pbm_mgr
description: The full name of the employee the ticket will be assigned to as the problem coordinator. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: assigned_group_pbm_mgr
description: The group for the problem coordinator’s support organization, which makes up the third tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: support_company_pbm_mgr
description: The company for the problem coordinator’s support organization, which makes up the first tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: support_organization_pbm_mgr
description: The organization for the problem coordinator’s support organization, which makes up the second tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_company
description: "The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together."
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_organization
description: The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together.
defaultValue:
- predefined:
- - ""
- name: assigned_group
description: The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: investigation_justification
description: The justification for the ticket creation.
defaultValue: ""
- predefined:
- - ""
- name: temporary_workaround
description: The problem workaround.
defaultValue: ""
- predefined:
- - ""
- name: resolution
description: The ticket resolution.
defaultValue: ""
- predefined:
- - ""
- name: additional_fields
description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field.'
defaultValue: ""
- predefined:
- - ""
outputs:
- contextPath: BmcITSM.ProblemInvestigation.RequestID
description: The problem investigation ticket unique Request ID.
@@ -1768,8 +1493,6 @@ script:
description: The problem investigation ticket request ID.
required: true
defaultValue: ""
- predefined:
- - ""
- name: status
description: The problem investigation ticket status.
defaultValue: ""
@@ -1797,8 +1520,6 @@ script:
- name: summary
description: The problem investigation ticket summary.
defaultValue: ""
- predefined:
- - ""
- name: impact
description: The problem investigation ticket impact.
defaultValue: ""
@@ -1820,83 +1541,51 @@ script:
- name: target_resolution_date
description: The problem investigation ticket target resolution date. For example, in 12 hours, in 7 days.
defaultValue: ""
- predefined:
- - ""
- name: details
description: The problem investigation ticket detailed description.
defaultValue: ""
- predefined:
- - ""
- name: company
description: The company associated with the requester. By default it is determined by the logged in user.
defaultValue: ""
- predefined:
- - ""
- name: region
description: The region of the problem investigation location. The arguments region, site_group, and site should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: site_group
description: The site group of the problem investigation location. The arguments region, site_group, and site should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: site
description: The site of the problem investigation location.The arguments region, site_group, and site should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: assigned_to
description: "The technical support person the ticket is assigned to."
defaultValue: ""
- predefined:
- - ""
- name: assigned_group_pbm_mgr
description: The group for the problem coordinator’s support organization, which makes up the third tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: support_company_pbm_mgr
description: The company for the problem coordinator’s support organization, which makes up the first tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: support_organization_pbm_mgr
description: The organization for the problem coordinator’s support organization, which makes up the second tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_company
description: "The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together."
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_organization
description: The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together.
defaultValue:
- predefined:
- - ""
- name: assigned_group
description: The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together.
defaultValue: ""
- predefined:
- - ""
- name: investigation_justification
description: The justification for the ticket creation.
defaultValue: ""
- predefined:
- - ""
- name: temporary_workaround
description: The problem workaround.
defaultValue: ""
- predefined:
- - ""
- name: resolution
description: The ticket resolution.
defaultValue: ""
- predefined:
- - ""
- name: status_reason
description: The reason for changing the status. Required when the status argument is provided.
defaultValue: ""
@@ -1926,14 +1615,10 @@ script:
description: The known error ticket summary.
required: true
defaultValue: ""
- predefined:
- - ""
- name: details
description: The known error ticket Detailed description.
required: true
defaultValue: ""
- predefined:
- - ""
- name: impact
description: The known error ticket impact.
required: true
@@ -1966,74 +1651,46 @@ script:
description: Company associated with the Requester.
required: true
defaultValue: ""
- predefined:
- - ""
- name: target_resolution_date
description: Known error resolution date. Future resolution date. For example, in 12 hours, in 7 days.
required: true
defaultValue: ""
- predefined:
- - ""
- name: resolution
description: Ticket resolution.
defaultValue: ""
- predefined:
- - ""
- name: assigned_group_pbm_mgr
description: It makes up the third tier of the Problem Coordinator’s Support Organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: support_company_pbm_mgr
description: "the Company for the Problem Coordinator’s Support Organization. It makes up the first tier of it."
defaultValue: ""
- predefined:
- - ""
- name: support_organization_pbm_mgr
description: It makes up the second tier of the Problem Coordinator’s Support Organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_company
description: "The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure."
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_organization
description: The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure.
defaultValue:
- predefined:
- - ""
- name: assigned_group
description: The group for the problem assignee’s support organization. It makes up the third tier of the problem assignee’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: investigation_justification
description: The justification for the ticket creation.
defaultValue: ""
- predefined:
- - ""
- name: assignee
description: The full name of the staff member to whom the ticket will be assigned to. It can be retrieved by using the 'bmc-itsm-user-list' command.
defaultValue: ""
- predefined:
- - ""
- name: assignee_pbm_mgr
description: The full name of the staff member to whom the ticket will be assign to as the problem coordinator. It can be retrieved by using the 'bmc-itsm-user-list' command.
defaultValue: ""
- predefined:
- - ""
- name: temporary_workaround
description: Error workaround.
defaultValue: ""
- predefined:
- - ""
- name: additional_fields
description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field.'
defaultValue: ""
- predefined:
- - ""
outputs:
- contextPath: BmcITSM.KnownError.RequestID
description: Known Error unique Request ID.
@@ -2051,8 +1708,6 @@ script:
description: The known error ticket request ID.
required: true
defaultValue: ""
- predefined:
- - ""
- name: status
description: The known error ticket status.
defaultValue: ""
@@ -2068,13 +1723,9 @@ script:
- name: summary
description: The known error ticket summary.
defaultValue: ""
- predefined:
- - ""
- name: details
description: The known error ticket detailed description.
defaultValue: ""
- predefined:
- - ""
- name: impact
description: The known error ticket impact.
defaultValue: ""
@@ -2103,53 +1754,33 @@ script:
- name: company
description: Company associated with the Requester. By default is determined by the logged in user.
defaultValue: ""
- predefined:
- - ""
- name: target_resolution_date
description: Known error resolution date. Future resolution date. For example, in 12 hours, in 7 days.
defaultValue: ""
- predefined:
- - ""
- name: resolution
description: Ticket resolution.
defaultValue: ""
- predefined:
- - ""
- name: assigned_group_pbm_mgr
description: It makes up the third tier of the Problem Coordinator’s Support Organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: support_company_pbm_mgr
description: "the Company for the Problem Coordinator’s Support Organization. It makes up the first tier of it."
defaultValue: ""
- predefined:
- - ""
- name: support_organization_pbm_mgr
description: It makes up the second tier of the Problem Coordinator’s Support Organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_company
description: "The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure."
defaultValue: ""
- predefined:
- - ""
- name: assigned_support_organization
description: The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure.
defaultValue:
- predefined:
- - ""
- name: assigned_group
description: The group for the problem assignee’s support organization. It makes up the third tier of the problem assignee’s support organization data structure.
defaultValue: ""
- predefined:
- - ""
- name: temporary_workaround
description: Error workaround.
defaultValue: ""
- predefined:
- - ""
- name: status_reason
description: "The reason for changing the status. Required when the status is provided."
defaultValue: ""
@@ -2164,18 +1795,12 @@ script:
- name: assignee
description: The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: assignee_pbm_mgr
description: The full name of the employee the ticket will be assign to as the problem coordinator. It can be retrieved by using the bmc-itsm-user-list command.
defaultValue: ""
- predefined:
- - ""
- name: additional_fields
description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field.'
defaultValue: ""
- predefined:
- - ""
outputs: []
- name: bmc-itsm-change-request-template-list
description: Lists all change requests ticket templates. Useful for creating change request tickets. The records are retrieved by the query argument or by the filtering arguments. When using filtering arguments, each one defines a 'LIKE' operation and an 'AND' operator is used between them. To see the entire JSON then you can use the raw_response=true at the end of the command.
@@ -2184,33 +1809,21 @@ script:
description: A comma-separated list of change request template IDs. Used as a filtering argument.
isArray: true
defaultValue: ""
- predefined:
- - ""
- name: query
description: 'The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html).'
defaultValue: ""
- predefined:
- - ""
- name: limit
description: The maximum number of records to retrieve.
defaultValue: "50"
- predefined:
- - ""
- name: page_size
description: The maximum number of records to retrieve per page.
defaultValue: ""
- predefined:
- - ""
- name: page
description: The page number of the results to retrieve.
defaultValue: ""
- predefined:
- - ""
- name: description
description: The change request ticket description. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
outputs:
- type: String
contextPath: BmcITSM.ChangeRequestTemplate.Id
@@ -2227,33 +1840,21 @@ script:
- name: query
description: 'The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html).'
defaultValue: ""
- predefined:
- - ""
- name: limit
description: The maximum number of records to retrieve.
defaultValue: "50"
- predefined:
- - ""
- name: page_size
description: The maximum number of records to retrieve per page.
defaultValue: ""
- predefined:
- - ""
- name: page
description: The page number of the results to retrieve.
defaultValue: ""
- predefined:
- - ""
- name: description
description: The incident ticket template description. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: template_ids
description: A comma-separated list of incident template IDs. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
outputs:
- type: String
contextPath: BmcITSM.IncidentTemplate.Id
@@ -2270,33 +1871,21 @@ script:
- name: query
description: 'The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html).'
defaultValue: ""
- predefined:
- - ""
- name: limit
description: The maximum number of records to retrieve.
defaultValue: "50"
- predefined:
- - ""
- name: page_size
description: The maximum number of records to retrieve per page.
defaultValue: ""
- predefined:
- - ""
- name: page
description: The page number of the results to retrieve.
defaultValue: ""
- predefined:
- - ""
- name: template_ids
description: A comma-separated list of task template IDs. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
- name: task_name
description: The task ticket template name. Used as a filtering argument.
defaultValue: ""
- predefined:
- - ""
outputs:
- type: String
contextPath: BmcITSM.TaskTemplate.Id
@@ -2324,6 +1913,292 @@ script:
name: lastUpdate
description: Gets the list of incidents that were modified since the last update time. Note that this method is here for debugging purposes. The get-modified-remote-data command is used as part of a Mirroring feature, which is available in Cortex XSOAR from version 6.1.
name: get-modified-remote-data
+ - name: bmc-itsm-support-group-list
+ description: Lists all support groups. Useful for getting possible (Company, Support Organization, Support Group) triplets.
+ arguments:
+ - name: limit
+ description: The maximum number of records to retrieve.
+ type: String
+ defaultValue: "50"
+ - name: page_size
+ description: The maximum number of records to retrieve per page.
+ type: String
+ defaultValue: ""
+ - name: page
+ description: The page number of the results to retrieve.
+ type: String
+ defaultValue: ""
+ - name: company
+ description: Company name. Used as a filtering argument.
+ type: String
+ defaultValue: ""
+ - name: support_organization
+ description: Support organization name. Used as a filtering argument.
+ type: String
+ defaultValue: ""
+ - name: support_group
+ description: Support group name. Used as a filtering argument.
+ type: String
+ defaultValue: ""
+ outputs:
+ - type: String
+ contextPath: BmcITSM.SupportGroup.SupportGroupID
+ description: The support group ID.
+ - type: String
+ contextPath: BmcITSM.SupportGroup.Company
+ description: The support company.
+ - type: String
+ contextPath: BmcITSM.SupportGroup.SupportOrganization
+ description: The support organization.
+ - type: String
+ contextPath: BmcITSM.SupportGroup.SupportGroupName
+ description: The support group.
+ - name: bmc-itsm-work-order-template-list
+ description: Lists all work order templates. Useful for creating work orders. The records are retrieved by the query argument or by the filtering arguments. When using filtering arguments, each one defines a 'LIKE' operation and an 'AND' operator is used between them. To see the entire JSON, you can use the raw_response=true at the end of the command.
+ arguments:
+ - name: query
+ description: The query to search by. For example, query="Company like \"BMCOpsMonitoring\"". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html).
+ type: String
+ defaultValue: ""
+ - name: limit
+ description: The maximum number of records to retrieve.
+ type: String
+ defaultValue: "50"
+ - name: page_size
+ description: The maximum number of records to retrieve per page.
+ type: String
+ defaultValue: ""
+ - name: page
+ description: The page number of the results to retrieve.
+ type: String
+ defaultValue: ""
+ - name: template_ids
+ description: A comma-separated list of work order template GUIDs. Used as a filtering argument.
+ type: String
+ defaultValue: ""
+ isArray: true
+ - name: template_name
+ description: The work order template name. Used as a filtering argument.
+ type: String
+ defaultValue: ""
+ outputs:
+ - type: String
+ contextPath: BmcITSM.WorkOrderTemplate.Id
+ description: The work order template ID.
+ - type: String
+ contextPath: BmcITSM.WorkOrderTemplate.Name
+ description: The work order template name.
+ - type: String
+ contextPath: BmcITSM.WorkOrderTemplate.GUID
+ description: The work order template GUID.
+ - name: bmc-itsm-work-order-create
+ description: Creates a new work order ticket.
+ arguments:
+ - name: template_guid
+ description: The instance GUID of the template to use. The GUID can be retrieved by executing the bmc-itsm-work-order-template-list command.
+ type: String
+ defaultValue: ""
+ - name: first_name
+ description: Requester first name.
+ type: String
+ required: false
+ defaultValue: ""
+ - name: last_name
+ description: Requester last name.
+ type: String
+ required: false
+ defaultValue: ""
+ - name: customer_first_name
+ description: Customer first name.
+ type: String
+ required: true
+ defaultValue: ""
+ - name: customer_last_name
+ description: Customer last name.
+ type: String
+ required: true
+ defaultValue: ""
+ - name: customer_company
+ description: Customer company.
+ type: String
+ defaultValue: ""
+ required: true
+ - name: customer_person_id
+ description: Customer person ID. Use it when customer first and last name pair is not unique.
+ type: String
+ required: false
+ defaultValue: ""
+ - name: summary
+ description: The work order summary.
+ type: String
+ required: true
+ defaultValue: ""
+ - name: detailed_description
+ description: The work order ticket detailed description.
+ type: String
+ required: true
+ defaultValue: ""
+ - name: status
+ description: The work order status.
+ required: true
+ defaultValue: ""
+ predefined:
+ - Assigned
+ - Pending
+ - Waiting Approval
+ - Planning
+ - In Progress
+ - Completed
+ - Rejected
+ - Cancelled
+ - Closed
+ auto: PREDEFINED
+ - name: additional_fields
+ description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Example: additional_fields="Support Company=Calbro Services;Support Organization=IT Support;Support Group Name=Service Desk;Request Assignee=Scully Agent".'
+ type: String
+ defaultValue: ""
+ - name: priority
+ description: The work order ticket priority.
+ required: true
+ defaultValue: ""
+ predefined:
+ - Critical
+ - High
+ - Medium
+ - Low
+ auto: PREDEFINED
+ - name: work_order_type
+ description: The work order ticket type.
+ defaultValue: ""
+ predefined:
+ - General
+ - Project
+ auto: PREDEFINED
+ - name: location_company
+ description: The company associated with the task process.
+ type: String
+ required: true
+ defaultValue: ""
+ - name: scedulded_start_date
+ description: The work order ticket scheduled future start date. For example, in 12 hours, in 7 days.
+ type: String
+ defaultValue: ""
+ - name: scedulded_end_date
+ description: The work order ticket scheduled future end date. For example, in 12 hours, in 7 days.
+ type: String
+ defaultValue: ""
+ outputs:
+ - contextPath: BmcITSM.WorkOrder.RequestID
+ description: The work order ticket unique Request ID.
+ type: String
+ - contextPath: BmcITSM.WorkOrder.DisplayID
+ description: The work order ticket unique Display ID.
+ type: String
+ - contextPath: BmcITSM.WorkOrder.CreateDate
+ description: The work order ticket creation date time in UTC.
+ type: Date
+ - name: bmc-itsm-work-order-update
+ description: Updates the work order ticket.
+ arguments:
+ - name: request_id
+ description: The ID of the work order ticket to update.
+ type: String
+ required: true
+ defaultValue: ""
+ - name: summary
+ description: The work order ticket summary.
+ type: String
+ - name: detailed_description
+ description: The work order ticket detailed description.
+ type: String
+ defaultValue: ""
+ - name: priority
+ description: The work order ticket priority.
+ defaultValue: ""
+ predefined:
+ - Critical
+ - High
+ - Medium
+ - Low
+ auto: PREDEFINED
+ - name: status
+ description: The work order ticket status.
+ defaultValue: ""
+ auto: PREDEFINED
+ predefined:
+ - Assigned
+ - Pending
+ - Waiting Approval
+ - Planning
+ - In Progress
+ - Completed
+ - Rejected
+ - Cancelled
+ - Closed
+ - name: status_reason
+ description: The reason for changing the ticket status.
+ defaultValue: ""
+ auto: PREDEFINED
+ predefined:
+ - Initial Status
+ - Awaiting Request Assignee
+ - Client Hold
+ - Client Additional Information Requested
+ - Client Action Required
+ - Support Contact Hold
+ - Local Site Action Required
+ - Purchase Order Approval
+ - Supplier Delivery
+ - Third Party Vendor Action Required
+ - Infrastructure Change
+ - Work not started
+ - Successful
+ - Successful with Issues
+ - Cancelled by Requester
+ - Cancelled by Support
+ - Customer Close
+ - System Close
+ - System Close with Issues
+ - name: company
+ description: The company associated with the requester. By default it is determined by the logged in user.
+ type: String
+ defaultValue: ""
+ - name: location_company
+ description: The company associated with the work order process.
+ type: String
+ defaultValue: ""
+ - name: assignee
+ description: The full name of the employee the work order is assigned to. It can be retrieved by using the bmc-itsm-user-list command.
+ type: String
+ defaultValue: ""
+ - name: support_organization
+ description: The organization for the problem assignee's support organization. It makes up the second tier of the problem assignee's support organization data structure. The arguments support_organization, support_group should be provided together. It can be retrieved by using the bmc-itsm-support-group-list command.
+ type: String
+ defaultValue:
+ - name: support_group
+ description: The group for the problem assignee's support group. It makes up the third tier of the problem assignee's support organization data structure. The arguments support_organization, support_group should be provided together. It can be retrieved by using the bmc-itsm-support-group-list command.
+ type: String
+ defaultValue: ""
+ - name: work_order_type
+ description: The work order ticket type.
+ defaultValue: ""
+ predefined:
+ - General
+ - Project
+ auto: PREDEFINED
+ - name: additional_fields
+ description: 'The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Example: additional_fields="Support Company=Calbro Services;Support Organization=IT Support;Support Group Name=Service Desk;Request Assignee=Scully Agent".'
+ type: String
+ defaultValue: ""
+ - name: scedulded_start_date
+ description: The work order ticket scheduled future start date. For example, in 12 hours, in 7 days.
+ type: String
+ defaultValue: ""
+ - name: scedulded_end_date
+ description: The work order ticket scheduled future end date. For example, in 12 hours, in 7 days.
+ type: String
+ defaultValue: ""
+ outputs: []
isfetch: true
ismappable: true
isremotesyncin: true
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_description.md b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_description.md
index c09705192371..cb2bc103b8f7 100644
--- a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_description.md
+++ b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_description.md
@@ -7,7 +7,8 @@
- Problem investigation
- Known error
- Task
- - Incident
+ - Incident
+ - Work order
**Note:**
To delete a ticket the user must have an Admin role.
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_test.py b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_test.py
index 3a1f8e5ac67b..675c49954956 100644
--- a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_test.py
+++ b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_test.py
@@ -1181,3 +1181,302 @@ def test_gen_fetch_incidents_query():
custom_query,
)
assert query == "'Submit Date' <= \"1657032797\" AND 'Submit Date' >\"1657032797\" AND 'Urgency' = \"4-Low\""
+
+
+@pytest.mark.parametrize(
+ "response_file_name,command_arguments,expected_outputs_len,expected_desc",
+ [
+ (
+ "list_support_group.json",
+ {
+ "limit": "2",
+ },
+ 2,
+ "APX990000000029",
+ ),
+ (
+ "list_support_group.json",
+ {
+ "page": "2",
+ "page_size": "1"
+ },
+ 1,
+ "SGP000000000110",
+ ),
+ (
+ "list_support_group_filter.json",
+ {
+ "limit": "2",
+ "company": "Apex"
+ },
+ 1,
+ "APX990000000029",
+ ),
+ ],
+)
+def test_list_support_group_command(
+ response_file_name,
+ command_arguments,
+ expected_outputs_len,
+ expected_desc,
+ requests_mock,
+ mock_client,
+):
+ """
+ Scenario: List support groups.
+ Given:
+ - User has provided valid credentials.
+ - User may provided pagination args.
+ - User may provided filtering arguments.
+ - User may provided query arguments.
+ When:
+ - bmc-itsm-support-group-list command called.
+ Then:
+ - Ensure outputs prefix is correct.
+ - Ensure number of items is correct.
+ - Validate outputs' fields.
+ """
+ from BmcITSM import support_group_list_command
+
+ mock_response = load_mock_response(response_file_name)
+ url = f"{BASE_URL}/api/arsys/v1/entry/CTM:Support Group"
+ requests_mock.get(url=url, json=mock_response)
+
+ result = support_group_list_command(mock_client, command_arguments)
+ outputs = result.outputs
+
+ assert result.outputs_prefix == "BmcITSM.SupportGroup"
+ assert len(outputs) == expected_outputs_len
+ assert outputs[0]["SupportGroupID"] == expected_desc
+
+
+@pytest.mark.parametrize(
+ "response_file_name,command_arguments,expected_outputs_len,expected_desc",
+ [
+ (
+ "list_work_order_template.json",
+ {
+ "limit": "2",
+ },
+ 2,
+ "IDGCWH5RDMNSBARVRM5ERVRM5EKP11",
+ ),
+ (
+ "list_work_order_template.json",
+ {
+ "page": "2",
+ "page_size": "1"
+ },
+ 1,
+ "IDGCWH5RDMNSBARVRNNGRVRNNGKY0X",
+ ),
+ (
+ "list_work_order_template_filter.json",
+ {
+ "limit": "2",
+ "template_name": "UNIX User"
+ },
+ 1,
+ "IDGCWH5RDMNSBARWFDYBRWFDYBB8NV",
+ ),
+ (
+ "list_work_order_template.json",
+ {
+ "limit": 2,
+ "template_ids": "IDGCWH5RDMNSBARVRM5ERVRM5EKP11,IDGCWH5RDMNSBARVRNNGRVRNNGKY0X"
+ },
+ 2,
+ "IDGCWH5RDMNSBARVRM5ERVRM5EKP11"
+ ),
+ (
+ "list_work_order_template_filter.json",
+ {
+ "limit": 2,
+ "query": "Summary like \"%UNIX%\""
+ },
+ 1,
+ "IDGCWH5RDMNSBARWFDYBRWFDYBB8NV"
+ ),
+ ],
+)
+def test_list_work_order_template_command(
+ response_file_name,
+ command_arguments,
+ expected_outputs_len,
+ expected_desc,
+ requests_mock,
+ mock_client,
+):
+ """
+ Scenario: List work order templates.
+ Given:
+ - User has provided valid credentials.
+ - User may provided pagination args.
+ - User may provided filtering arguments.
+ - User may provided query arguments.
+ When:
+ - bmc-itsm-work-order-template-list command called.
+ Then:
+ - Ensure outputs prefix is correct.
+ - Ensure number of items is correct.
+ - Validate outputs' fields.
+ """
+ from BmcITSM import work_order_template_list_command
+
+ mock_response = load_mock_response(response_file_name)
+ url = f"{BASE_URL}/api/arsys/v1/entry/WOI:Template"
+ requests_mock.get(url=url, json=mock_response)
+
+ result = work_order_template_list_command(mock_client, command_arguments)
+ outputs = result.outputs
+
+ assert result.outputs_prefix == "BmcITSM.WorkOrderTemplate"
+ assert len(outputs) == expected_outputs_len
+ assert outputs[0]["GUID"] == expected_desc
+
+
+@pytest.mark.parametrize(
+ "response_file_name,command_arguments,expected_outputs_len,expected_id",
+ [
+ (
+ "create_work_order.json",
+ {
+ "customer_first_name": "Scully",
+ "customer_last_name": "Agent",
+ "customer_company": "Calbro Services",
+ "summary": "Sample WO 20240205",
+ "detailed_description": "Sample WO 20240205",
+ "status": "Assigned",
+ "priority": "Low",
+ "location_company": "Calbro Services",
+ },
+ 3,
+ "WO0000000000701",
+ ),
+ ],
+)
+def test_work_order_create_command(
+ response_file_name,
+ command_arguments,
+ expected_outputs_len,
+ expected_id,
+ requests_mock,
+ mock_client,
+):
+ """
+ Scenario: Create Work order.
+ Given:
+ - User has provided valid credentials.
+ When:
+ - bmc-itsm-work-order-create command called.
+ Then:
+ - Ensure outputs prefix is correct.
+ - Ensure number of items is correct.
+ - Validate outputs' fields.
+ """
+ from BmcITSM import work_order_create_command
+
+ mock_response = load_mock_response(response_file_name)
+ fields = "values(Request ID,WorkOrder_ID,Create Date)"
+ url = f"{BASE_URL}/api/arsys/v1/entry/WOI:WorkOrderInterface_Create?fields={fields}"
+ requests_mock.post(url=url, json=mock_response)
+
+ request_id = "WO0000000000701"
+ url = f"{BASE_URL}/api/arsys/v1/entry/WOI:WorkOrderInterface/{request_id}"
+ requests_mock.get(url=url, json=load_mock_response("get_work_order.json"))
+
+ result = work_order_create_command(mock_client, command_arguments)
+ outputs = result.outputs
+
+ assert result.outputs_prefix == "BmcITSM.WorkOrder"
+ assert len(outputs) == expected_outputs_len
+ assert outputs["RequestID"] == expected_id
+
+
+@pytest.mark.parametrize(
+ "request_id,command_arguments,expected_msg",
+ [
+ (
+ "WO0000000000701",
+ {
+ "request_id": "WO0000000000701",
+ "status": "In Progress",
+ "summary": "Updated Summary"
+
+ },
+ "Work Order: WO0000000000701 was successfully updated.",
+ ),
+ ],
+)
+def test_work_order_update_command(request_id, command_arguments, expected_msg,
+ requests_mock, mock_client):
+ """
+ Scenario: Update Work error.
+ Given:
+ - User has provided valid credentials.
+ - User has provided updated values
+ When:
+ - bmc-itsm-work-order-update command called.
+ Then:
+ - Ensure the human readable message is correct.
+ """
+ from BmcITSM import work_order_update_command
+
+ url = f"{BASE_URL}/api/arsys/v1/entry/WOI:WorkOrder/{request_id}"
+ requests_mock.put(url=url, text="")
+
+ result = work_order_update_command(mock_client, command_arguments)
+ readable_output = result.readable_output
+
+ assert readable_output == expected_msg
+
+
+@pytest.mark.parametrize(
+ "response_file_name,command_arguments,ticket_form,expected_outputs_len,expected_name",
+ [
+ (
+ "list_tickets_work_order.json",
+ {
+ "limit": "2",
+ "ticket_ids": "WO0000000000009",
+ "ticket_type": "work order",
+ },
+ "WOI:WorkOrderInterface",
+ 1,
+ "WO0000000000009",
+ ),
+ ],
+)
+def test_ticket_list_work_order_command(
+ response_file_name,
+ command_arguments,
+ ticket_form,
+ expected_outputs_len,
+ expected_name,
+ requests_mock,
+ mock_client,
+):
+ """
+ Scenario: List work order tickets.
+ Given:
+ - User has provided valid credentials.
+ - User may Provided filtering arguments.
+ When:
+ - bmc-itsm-ticket-list command called.
+ Then:
+ - Ensure outputs prefix is correct.
+ - Ensure number of items is correct.
+ - Validate outputs' fields.
+ """
+ from BmcITSM import ticket_list_command
+
+ mock_response = load_mock_response(response_file_name)
+ url = f"{BASE_URL}/api/arsys/v1/entry/{ticket_form}"
+ requests_mock.get(url=url, json=mock_response)
+
+ result = ticket_list_command(mock_client, command_arguments)
+ outputs = result.outputs
+
+ assert result.outputs_prefix == "BmcITSM.Ticket"
+ assert len(outputs) == expected_outputs_len
+ assert outputs[0]["DisplayID"] == expected_name
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/README.md b/Packs/BmcITSM/Integrations/BmcITSM/README.md
index 62e821a541c1..8b42b26fce92 100644
--- a/Packs/BmcITSM/Integrations/BmcITSM/README.md
+++ b/Packs/BmcITSM/Integrations/BmcITSM/README.md
@@ -1,5 +1,5 @@
-BMC Helix ITSM integration enables customers to manage service request, incident, change request, task, problem investigation and known error tickets.
-This integration was integrated and tested with version 21.02 of BmcITSM
+BMC Helix ITSM integration enables customers to manage service request, incident, change request, task, problem investigation, known error and work order tickets.
+This integration was integrated and tested with version 22.1.05 of BmcITSM
## Configure BMC Helix ITSM on Cortex XSOAR
@@ -43,16 +43,16 @@ Retrieves a list of user profiles from BMC Helix ITSM. The records are retrieved
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| user_ids | A comma-separated list of user IDs. Used as a filtering argument. Possible values are: . | Optional |
-| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). Possible values are: . | Optional |
-| limit | The maximum number of records to retrieve. Possible values are: . Default is 50. | Optional |
-| page_size | The maximum number of records to retrieve per page. Possible values are: . | Optional |
-| page | The page number of the results to retrieve. Possible values are: . | Optional |
-| first_name | The user first name. Used as a filtering argument. Possible values are: . | Optional |
-| last_name | The user first name. Used as a filtering argument. Possible values are: . | Optional |
-| company | The user company name. Used as a filtering argument. Possible values are: . | Optional |
-| department | The user department name. Used as a filtering argument. Possible values are: . | Optional |
-| organization | The user organization name. Used as a filtering argument. Possible values are: . | Optional |
+| user_ids | A comma-separated list of user IDs. Used as a filtering argument. | Optional |
+| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| first_name | The user first name. Used as a filtering argument. | Optional |
+| last_name | The user first name. Used as a filtering argument. | Optional |
+| company | The user company name. Used as a filtering argument. | Optional |
+| department | The user department name. Used as a filtering argument. | Optional |
+| organization | The user organization name. Used as a filtering argument. | Optional |
#### Context Output
@@ -119,13 +119,13 @@ Retrieves a list of companies from BMC Helix ITSM. The records are retrieved by
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| company_ids | A comma-separated list of company ID. Filtering argument. Possible values are: . | Optional |
-| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). Possible values are: . | Optional |
-| limit | The maximum number of records to retrieve. Possible values are: . Default is 50. | Optional |
-| page_size | The maximum number of records to retrieve per page. Possible values are: . | Optional |
-| page | The page number of the results to retrieve. Possible values are: . | Optional |
-| company | The user company name. Used as a filtering argument. Possible values are: . | Optional |
-| company_type | The user company type. Used as a filtering argument. Possible values are: . | Optional |
+| company_ids | A comma-separated list of company ID. Filtering argument. | Optional |
+| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| company | The user company name. Used as a filtering argument. | Optional |
+| company_type | The user company type. Used as a filtering argument. | Optional |
#### Context Output
@@ -180,12 +180,12 @@ Retrieves a list of service request definitions. The records are retrieved by th
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| srd_ids | A comma-separated list of service request definition IDs. Used as a filtering argument. Possible values are: . | Optional |
-| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). Possible values are: . | Optional |
-| limit | The maximum number of records to retrieve. Possible values are: . Default is 50. | Optional |
-| page_size | The maximum number of records to retrieve per page. Possible values are: . | Optional |
-| page | The page number of the results to retrieve. Possible values are: . | Optional |
-| description | The service request ticket definition description. Used as a filtering argument. Possible values are: . | Optional |
+| srd_ids | A comma-separated list of service request definition IDs. Used as a filtering argument. | Optional |
+| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| description | The service request ticket definition description. Used as a filtering argument. | Optional |
#### Context Output
@@ -240,19 +240,19 @@ Retrieves a list of BMC Helix ITSM tickets. The records are retrieved by the que
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ticket_type | The type of tickets to search for. Possible values are: service request, incident, task, change request, problem investigation, known error. | Required |
-| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). Possible values are: . | Optional |
-| limit | The maximum number of records to retrieve. Possible values are: . Default is 50. | Optional |
-| page_size | The maximum number of records to retrieve per page. Possible values are: . | Optional |
-| page | The page number of the results to retrieve. Possible values are: . | Optional |
-| ticket_ids | A comma-separated list of ticket request IDs. Used as a filtering argument. Possible values are: . | Optional |
+| ticket_type | The type of tickets to search for. Possible values are: service request, incident, task, change request, problem investigation, known error, work order. | Required |
+| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| ticket_ids | A comma-separated list of ticket request IDs. Used as a filtering argument. Use Display ID for work order type. | Optional |
| status | The status of the tickets to fetch. Since each ticket type has its own unique set of statuses, select only statuses that match the selected ticket type(s). Possible values are: Draft, In Cart, In Review, Submitted, Pending, Waiting Approval, Planning, In Progress, Completed, Rejected, Cancelled, Closed, New, Assigned, Resolved, Request For Authorization, Request For Change, Planning In Progress, Scheduled For Review, Scheduled For Approval, Scheduled, Implementation In Progress, Staged, Work In Progress, Waiting, Bypassed, Under Review, Under Investigation, Scheduled For Correction, Assigned To Vendor, No Action Planned, Corrected. | Optional |
| impact | The ticket impact. Used as a filtering argument. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
| urgency | The ticket urgency. Used as a filtering argument. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
| priority | The ticket priority. Used as a filtering argument. Possible values are: Critical, High, Medium, Low. | Optional |
| risk_level | The ticket risk level. Used as a filtering argument. Possible values are: Risk Level 1, Risk Level 2, Risk Level 3, Risk Level 4. | Optional |
| change_type | The ticket change type level. Relevant only for ticket type change requests. Used as a filtering argument. Possible values are: Project, Change, Release, Asset Configuration, Asset Management, Asset Lease, Purchase Requisition, Asset Maintenance. | Optional |
-| summary | The ticket summary. Used as a filtering argument. Possible values are: . | Optional |
+| summary | The ticket summary. Used as a filtering argument. | Optional |
#### Context Output
@@ -387,7 +387,7 @@ Retrieves a list of BMC Helix ITSM tickets. The records are retrieved by the que
### bmc-itsm-service-request-create
***
-Creates a new service request ticket. A service request ticket is the request record that is generated from the service request definition to manage and track the execution. To create it, you need to provide the srd_instance_id argument, which can be retrieved by by executing the bmc-itsm-service-request-definition-list command and extracting the instanceID field. User and company arguments can be retrieved by executing the bmc-itsm-user-list and bmc-itsm-company-list.
+Creates a new service request ticket. A service request ticket is the request record that is generated from the service request definition to manage and track the execution. To create it, you need to provide the srd_instance_id argument, which can be retrieved by by executing the bmc-itsm-service-request-definition-list command and extracting the instanceID field. User and company arguments can be retrieved by executing the bmc-itsm-user-list and bmc-itsm-company-list. To see the entire JSON, you can use the raw_response=true at the end of the command.
#### Base Command
@@ -397,15 +397,15 @@ Creates a new service request ticket. A service request ticket is the request re
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| srd_instance_id | The instance ID of the service request ticket. It can be retrieved by executing bmc-itsm-service-request-definition-list command. . Possible values are: . | Required |
-| first_name | The requester first name. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together. Possible values are: . | Optional |
-| last_name | The requester last name. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together. . Possible values are: . | Optional |
-| login_id | The requester login ID. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together. Possible values are: . | Optional |
-| summary | The service request ticket summary. Possible values are: . | Optional |
-| status | The service request ticket status. . Possible values are: Draft, In Cart, In Review, Submitted, Pending, Waiting Approval, Planning, In Progress, Completed, Rejected, Cancelled, Closed. | Optional |
+| srd_instance_id | The instance ID of the service request ticket. It can be retrieved by executing bmc-itsm-service-request-definition-list command. | Required |
+| first_name | The requester first name. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together. | Optional |
+| last_name | The requester last name. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together. | Optional |
+| login_id | The requester login ID. By default it is determined by the logged in user. If provided, login_id, first_name, and last_name arguments must be provided together. | Optional |
+| summary | The service request ticket summary. | Optional |
+| status | The service request ticket status. Possible values are: Draft, In Cart, In Review, Submitted, Pending, Waiting Approval, Planning, In Progress, Completed, Rejected, Cancelled, Closed. | Optional |
| urgency | The ticket urgency. Required when the ticket creation is without a template. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
| impact | The ticket impact. Required when the ticket creation is without a template. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
-| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. Possible values are: . | Optional |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. | Optional |
#### Context Output
@@ -449,19 +449,19 @@ Updates the details of a service request ticket for a given request ID. User and
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ticket_request_id | The unique identifier of the service request ticket to update. Possible values are: . | Required |
-| customer_first_name | The customer first name. By default it is determined by the logged in user. . Possible values are: . | Optional |
-| customer_last_name | The customer last name. By default it is determined by the logged in user. . Possible values are: . | Optional |
-| status | The service request ticket status. . Possible values are: Draft, In Cart, In Review, Submitted, Pending, Waiting Approval, Planning, In Progress, Completed, Rejected, Cancelled, Closed. | Optional |
+| ticket_request_id | The unique identifier of the service request ticket to update. | Required |
+| customer_first_name | The customer first name. By default it is determined by the logged in user. | Optional |
+| customer_last_name | The customer last name. By default it is determined by the logged in user. | Optional |
+| status | The service request ticket status. Possible values are: Draft, In Cart, In Review, Submitted, Pending, Waiting Approval, Planning, In Progress, Completed, Rejected, Cancelled, Closed. | Optional |
| urgency | The ticket request urgency. Required when the ticket creation is without a template. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
| impact | Incident Request impact. Required when the ticket creation is without a template. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
-| status_reason | The reason for updating the status. Required only if status argument is provided. . Possible values are: Review, Need More Information, Approval, System Error, With Issues, Automatically Closed, Successful, By User, By Provider, System, Cancelled, Reopen By User. | Optional |
-| location_company | The company associated with the service request process. Possible values are: . | Optional |
-| region | The region associated with the company location. Possible values are: . | Optional |
-| site_group | The site group associated with the region. Possible values are: . | Optional |
-| site | The site associated with the site group. Possible values are: . | Optional |
-| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. Possible values are: . | Optional |
+| status_reason | The reason for updating the status. Required only if status argument is provided. Possible values are: Review, Need More Information, Approval, System Error, With Issues, Automatically Closed, Successful, By User, By Provider, System, Cancelled, Reopen By User. | Optional |
+| location_company | The company associated with the service request process. | Optional |
+| region | The region associated with the company location. | Optional |
+| site_group | The site group associated with the region. | Optional |
+| site | The site associated with the site group. | Optional |
+| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. | Optional |
#### Context Output
@@ -485,26 +485,26 @@ Update incident ticket.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ticket_request_id | The ID of the incident ticket to update. Possible values are: . | Required |
-| first_name | The customer first name the incident ticket is for. Possible values are: . | Optional |
-| last_name | The customer last name the incident ticket is for. Possible values are: . | Optional |
-| summary | The incident ticket summary. Possible values are: . | Optional |
-| service_type | The type of the incident ticket. . Possible values are: User Service Restoration, User Service Request, Infrastructure Restoration, Infrastructure Event, Security Incident. | Optional |
+| ticket_request_id | The ID of the incident ticket to update. | Required |
+| first_name | The customer first name the incident ticket is for. | Optional |
+| last_name | The customer last name the incident ticket is for. | Optional |
+| summary | The incident ticket summary. | Optional |
+| service_type | The type of the incident ticket. Possible values are: User Service Restoration, User Service Request, Infrastructure Restoration, Infrastructure Event, Security Incident. | Optional |
| urgency | The ticket urgency. Required when the ticket creation is without a template. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
| impact | The ticket impact. Required when the ticket creation is without a template. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
| status | The incident ticket status. Possible values are: New, Assigned, In Progress, Pending, Resolved, Closed, Cancelled. | Optional |
-| reported_source | The incident ticket reported source. . Possible values are: Direct Input, Email,External Escalation, Fax, Self Service, Systems Management, Phone, Voice Mail, Walk In, Web, Other, BMC Impact Manager Event. | Optional |
+| reported_source | The incident ticket reported source. Possible values are: Direct Input, Email,External Escalation, Fax, Self Service, Systems Management, Phone, Voice Mail, Walk In, Web, Other, BMC Impact Manager Event. | Optional |
| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. Possible values are: The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value".Possible fields: Assigned Group, Assignee, or any other custom field.. | Optional |
-| detailed_description | The incident ticket summary. Possible values are: . | Optional |
-| company | The company associated with the requester. By default it is determined by the logged in user. Possible values are: . | Optional |
-| assigned_support_company | The company for the assignee’s support organization. It makes up the first tier of the assignee’s support organization data structure. Possible values are: . | Optional |
-| assigned_support_organization | The organization for the assignee’s support organization. It makes up the second tier of the assignee’s support organization data structure. Possible values are: . | Optional |
-| assigned_group | The group for the assignee’s support organization. It makes up the third tier of the assignee’s support organization data structure. Possible values are: . | Optional |
-| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| assignee_login_id | The login ID of the assignee. The assignee and assignee_login_id arguments must be provided together. Possible values are: . | Optional |
-| region | The region, which makes up the second tier of the customer’s business organization data structure. Possible values are: . | Optional |
-| site_group | The site group associated with the region. Possible values are: . | Optional |
-| site | The site associated with the site group. Possible values are: . | Optional |
+| detailed_description | The incident ticket summary. | Optional |
+| company | The company associated with the requester. By default it is determined by the logged in user. | Optional |
+| assigned_support_company | The company for the assignee’s support organization. It makes up the first tier of the assignee’s support organization data structure. | Optional |
+| assigned_support_organization | The organization for the assignee’s support organization. It makes up the second tier of the assignee’s support organization data structure. | Optional |
+| assigned_group | The group for the assignee’s support organization. It makes up the third tier of the assignee’s support organization data structure. | Optional |
+| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| assignee_login_id | The login ID of the assignee. The assignee and assignee_login_id arguments must be provided together. | Optional |
+| region | The region, which makes up the second tier of the customer’s business organization data structure. | Optional |
+| site_group | The site group associated with the region. | Optional |
+| site | The site associated with the site group. | Optional |
| status_reason | The reason for updating the ticket status. Required when status is provided. Possible values are: Infrastructure Change Created, Local Site Action Required, Purchase Order Approval, Registration Approval, Supplier Delivery, Support Contact Hold, Third Party Vendor Action Reqd, Client Action Required, Infrastructure Change Request, Future Enhancement, Pending Original Incident, Client Hold, Monitoring Incident, Customer Follow-Up Required, Temporary Corrective Action, No Further Action Required, Resolved by Original Incident, Automated Resolution Reported, No longer a Causal CI, Pending Causal Incident Resolution, Resolved by Causal Incident. | Optional |
| resolution | The ticket resolution description. Required when status is provided. | Optional |
@@ -530,8 +530,8 @@ Deletes a ticket by its request ID. Only admin users can perform this command.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ticket_ids | A comma-separated list of ticket request IDs to delete. Possible values are: . | Required |
-| ticket_type | The type of the tickets to delete. Possible values are: incident, task, change request, problem investigation, known error. | Required |
+| ticket_ids | A comma-separated list of ticket request IDs to delete. | Required |
+| ticket_type | The type of the tickets to delete. Possible values are: incident, task, change request, problem investigation, known error, work order. | Required |
#### Context Output
@@ -555,26 +555,26 @@ Creates a new incident ticket. An incident is any event that is not part of the
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| first_name | The customer first name the incident ticket is for. Possible values are: . | Required |
-| last_name | The customer last name the incident ticket is for. Possible values are: . | Required |
-| template_instance_id | The instance ID of the template to use. Required only when the ticket attributes should be based on the template's fields. The instance ID can be retrieved by executing the bmc-itsm-incident-template-list command. Possible values are: . | Optional |
-| summary | The incident ticket summary. Required when the template_instance_id argument is not provided. Possible values are: . | Optional |
+| first_name | The customer first name the incident ticket is for. | Required |
+| last_name | The customer last name the incident ticket is for. | Required |
+| template_instance_id | The instance ID of the template to use. Required only when the ticket attributes should be based on the template's fields. The instance ID can be retrieved by executing the bmc-itsm-incident-template-list command. | Optional |
+| summary | The incident ticket summary. Required when the template_instance_id argument is not provided. | Optional |
| service_type | The type of the incident ticket. Required when the template_instance_id argument is not provided. Possible values are: User Service Restoration, User Service Request, Infrastructure Restoration, Infrastructure Event, Security Incident. | Optional |
| urgency | The ticket urgency. Required when the ticket creation is without a template. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Required |
| impact | The ticket impact. Required when the creation is without a template. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Required |
| status | Incident status. Possible values are: New, Assigned, In Progress, Pending, Resolved, Closed, Cancelled. | Required |
| reported_source | The incident ticket reported source. Required when the template_instance_id argument is not provided. Possible values are: Direct Input, Email,External Escalation, Fax, Self Service, Systems Management, Phone, Voice Mail, Walk In, Web, Other, BMC Impact Manager Event. | Optional |
| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. Possible values are: The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value".Possible fields: Assigned Group, Assignee, or any other custom field.. | Optional |
-| details | The incident ticket detailed description. Possible values are: . | Optional |
-| company | The company associated with the requester. By default it is determined by the logged in user. Possible values are: . | Optional |
-| assigned_support_company | The company for the assignee’s support organization. It makes up the first tier of the assignee’s support organization data structure. Possible values are: . | Optional |
-| assigned_support_organization | The organization for the assignee’s support organization. It makes up the second tier of the assignee’s support organization data structure. Possible values are: . | Optional |
-| assigned_group | The group for the assignee’s support organization. It makes up the third tier of the assignee’s support organization data structure. Possible values are: . | Optional |
-| assignee | The full name of the employee the ticket will be assigned to. The assignee and assignee_login_id arguments must be provided together. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| assignee_login_id | The login ID of the assignee. The assignee and assignee_login_id arguments must be provided together. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| region | The region associated with the company. Possible values are: . | Optional |
-| site_group | The site group associated with the region. Possible values are: . | Optional |
-| site | The site associated with the site group. Possible values are: . | Optional |
+| details | The incident ticket detailed description. | Optional |
+| company | The company associated with the requester. By default it is determined by the logged in user. | Optional |
+| assigned_support_company | The company for the assignee’s support organization. It makes up the first tier of the assignee’s support organization data structure. | Optional |
+| assigned_support_organization | The organization for the assignee’s support organization. It makes up the second tier of the assignee’s support organization data structure. | Optional |
+| assigned_group | The group for the assignee’s support organization. It makes up the third tier of the assignee’s support organization data structure. | Optional |
+| assignee | The full name of the employee the ticket will be assigned to. The assignee and assignee_login_id arguments must be provided together. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| assignee_login_id | The login ID of the assignee. The assignee and assignee_login_id arguments must be provided together. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| region | The region associated with the company. | Optional |
+| site_group | The site group associated with the region. | Optional |
+| site | The site associated with the site group. | Optional |
#### Context Output
@@ -620,20 +620,20 @@ Creates a change request ticket in BMC Helix ITSM. The ticket is created by usin
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| first_name | The requester first name. Possible values are: . | Required |
-| last_name | The requester last name. Possible values are: . | Required |
-| customer_first_name | The customer first name. . Possible values are: . | Optional |
-| customer_last_name | The customer last name. Possible values are: . | Optional |
-| summary | The change request ticket title. Required when the template ID argument is not provided. Possible values are: . | Optional |
-| template_id | The instance ID of the template to use. Required only when the ticket attributes should be based on the template's fields. The ID can be retrieved by executing the bmc-itsm-change-request-template-list command. Possible values are: . | Optional |
+| first_name | The requester first name. | Required |
+| last_name | The requester last name. | Required |
+| customer_first_name | The customer first name. | Optional |
+| customer_last_name | The customer last name. | Optional |
+| summary | The change request ticket title. Required when the template ID argument is not provided. | Optional |
+| template_id | The instance ID of the template to use. Required only when the ticket attributes should be based on the template's fields. The ID can be retrieved by executing the bmc-itsm-change-request-template-list command. | Optional |
| change_type | The change request ticket type. Required when the ticket creation is without a template. Possible values are: Project, Change, Release, Asset Configuration, Asset Management, Asset Lease, Purchase Requisition, Asset Maintenance. | Optional |
| change_timing | The class of the change request ticket which best describes your scenario. Possible values are: Emergency, Expedited, Latent, Normal, No Impact, Standard. | Optional |
| impact | The change request ticket impact. Required when the ticket creation is without a template. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
| urgency | The change request ticket urgency. Required when the ticket creation is without a template. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
| risk_level | The change request ticket risk level. Required when the ticket creation is without a template. Possible values are: Risk Level 1, Risk Level 2, Risk Level 3, Risk Level 4, Risk Level 5. | Optional |
| status | The change request ticket status. Required when the ticket creation is without a template. Possible values are: Request For Authorization, Request For Change, Planning In Progress, Scheduled For Review, Scheduled For Approval, Scheduled, Implementation In Progress, Pending, Rejected, Completed, Closed, Cancelled. | Optional |
-| location_company | The company associated with the change request process. Required when template ID argument is not provided. Possible values are: . | Optional |
-| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. Possible values are: . | Optional |
+| location_company | The company associated with the change request process. Required when template ID argument is not provided. | Optional |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. | Optional |
#### Context Output
@@ -680,27 +680,27 @@ Updates the details of change request ticket for the specified request ID.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| ticket_request_id | The ID of the change request ticket to update. | Required |
-| first_name | The customer first name the change request ticket is for. . Possible values are: . | Optional |
-| last_name | The customer last name the change request ticket is for. . Possible values are: . | Optional |
-| summary | The change request ticket summary. Possible values are: . | Optional |
-| change_type | The change request ticket type. . Possible values are: Project, Change, Release, Asset Configuration, Asset Management, Asset Lease, Purchase Requisition, Asset Maintenance. | Optional |
+| first_name | The customer first name the change request ticket is for. | Optional |
+| last_name | The customer last name the change request ticket is for. | Optional |
+| summary | The change request ticket summary. | Optional |
+| change_type | The change request ticket type. Possible values are: Project, Change, Release, Asset Configuration, Asset Management, Asset Lease, Purchase Requisition, Asset Maintenance. | Optional |
| change_timing | The class of the change request ticket which best describes your scenario. Possible values are: Emergency, Expedited, Latent, Normal, No Impact, Standard. | Optional |
| impact | The change request ticket impact. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
| urgency | The change request ticket urgency. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
| risk_level | The change request ticket risk level. Possible values are: Risk Level 1, Risk Level 2, Risk Level 3, Risk Level 4, Risk Level 5. | Optional |
| status | The change request ticket status. Possible values are: Request For Authorization, Request For Change, Planning In Progress, Scheduled For Review, Scheduled For Approval, Scheduled, Implementation In Progress, Pending, Rejected, Completed, Closed, Cancelled. | Optional |
-| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. Possible values are: . | Optional |
-| company | The company associated with the requester. By default it is determined by the logged in user. Possible values are: . | Optional |
-| organization | The organization associated with the requester. Possible values are: . | Optional |
-| department | The department associated with the requester. Possible values are: . | Optional |
-| location_company | The company associated with the change request process. Possible values are: . | Optional |
-| region | The region associated with the company location. Possible values are: . | Optional |
-| site_group | The site group associated with the region. Possible values are: . | Optional |
-| site | The site associated with the site group. Possible values are: . | Optional |
-| support_organization | The second tier of the change manager’s support organization data structure. Possible values are: . | Optional |
-| support_group_name | The third tier of the change manager’s support organization data structure. Possible values are: . | Optional |
-| status_reason | The reason for updating the ticket status. Required when status is provided. . Possible values are: No Longer Required, Funding Not Available, To Be Re-Scheduled, Resources Not Available, Successful, Successful with Issues, Unsuccessful, Backed Out, Final Review Complete, Final Review Required, Additional Coding Required, Insufficient Task Data, In Verification, In Rollout, Insufficient Change Data, Schedule Conflicts, In Development, In Test, In Build, In Rollback, In Documentation, Vendor Purchase, Support Group Communication, Task Review, Miscellaneous, Future Enhancement, Manager Intervention, Accepted, Assigned, Built, On Hold. | Optional |
-| details | The change request ticket details. Possible values are: . | Optional |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee, or any other custom field. | Optional |
+| company | The company associated with the requester. By default it is determined by the logged in user. | Optional |
+| organization | The organization associated with the requester. | Optional |
+| department | The department associated with the requester. | Optional |
+| location_company | The company associated with the change request process. | Optional |
+| region | The region associated with the company location. | Optional |
+| site_group | The site group associated with the region. | Optional |
+| site | The site associated with the site group. | Optional |
+| support_organization | The second tier of the change manager’s support organization data structure. | Optional |
+| support_group_name | The third tier of the change manager’s support organization data structure. | Optional |
+| status_reason | The reason for updating the ticket status. Required when status is provided. Possible values are: No Longer Required, Funding Not Available, To Be Re-Scheduled, Resources Not Available, Successful, Successful with Issues, Unsuccessful, Backed Out, Final Review Complete, Final Review Required, Additional Coding Required, Insufficient Task Data, In Verification, In Rollout, Insufficient Change Data, Schedule Conflicts, In Development, In Test, In Build, In Rollback, In Documentation, Vendor Purchase, Support Group Communication, Task Review, Miscellaneous, Future Enhancement, Manager Intervention, Accepted, Assigned, Built, On Hold. | Optional |
+| details | The change request ticket details. | Optional |
#### Context Output
@@ -713,7 +713,7 @@ There is no context output for this command.
>Incident: CRQ000000000313 was successfully updated.
### bmc-itsm-task-create
***
-Creates a new task ticket. By splitting cases into individual tasks (assignments), you can focus on one assignment at a time to resolve cases more efficiently. Task ticket type can be attached only to the following ticket types: change request, incident, problem investigation, and known error.
+Creates a new task ticket. By splitting cases into individual tasks (assignments), you can focus on one assignment at a time to resolve cases more efficiently. Task ticket type can be attached only to the following ticket types: change request, incident, problem investigation, known error and work order.
#### Base Command
@@ -723,26 +723,26 @@ Creates a new task ticket. By splitting cases into individual tasks (assignments
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| template_id | The instance ID of the template to use. The ID can be retrieved by executing the bmc-itsm-task-template-list command. Possible values are: . | Optional |
-| summary | The task ticket summary. Possible values are: . | Required |
-| details | The task ticket detailed description. Possible values are: . | Required |
-| root_ticket_type | The parent ticket type. Possible values are: change request, incident, problem investigation, known error. | Required |
-| root_request_id | The request ID of the parent ticket. Can be found in the context output of the bmc-itsm-ticket-list command. . Possible values are: . | Required |
-| root_request_name | The display name of the parent ticket in the task ticket. If not provided, the parent ticket displayID is displayed. . Possible values are: . | Optional |
-| root_request_mode | The parent ticket request mode. . Possible values are: Real, Simulation. Default is Real. | Optional |
+| template_id | The instance ID of the template to use. The ID can be retrieved by executing the bmc-itsm-task-template-list command. | Optional |
+| summary | The task ticket summary. | Required |
+| details | The task ticket detailed description. | Required |
+| root_ticket_type | The parent ticket type. Possible values are: change request, incident, problem investigation, known error, work order. | Required |
+| root_request_id | The request ID of the parent ticket. Can be found in the context output of the bmc-itsm-ticket-list command. Use Display ID for work orders. | Required |
+| root_request_name | The display name of the parent ticket in the task ticket. If not provided, the parent ticket displayID is displayed. | Optional |
+| root_request_mode | The parent ticket request mode. Possible values are: Real, Simulation. Default is Real. | Optional |
| status | The task status. Possible values are: Staged, Assigned, Pending, Work In Progress, Waiting, Closed, Bypassed. | Required |
| task_type | Whether the task is manual or automatic. Possible values are: Automatic, Manual. | Optional |
| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assignee or any other custom field. Possible values are: The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value".Possible fields: Assignee or any other custom field.. | Optional |
| priority | The task ticket priority. Possible values are: Critical, High, Medium, Low. | Required |
-| location_company | The company associated with the task process. Possible values are: . | Required |
-| support_company | The technical support team associated with the company. Possible values are: . | Optional |
-| assigned_support_organization | The organization for the task's support organization. It makes up the second tier of the task’s support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. Possible values are: . | Optional |
-| assigned_support_group | The group for the task's support organization. It makes up the third tier of the task's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. Possible values are: . | Optional |
+| location_company | The company associated with the task process. | Required |
+| support_company | The technical support team associated with the company. | Optional |
+| assigned_support_organization | The organization for the task's support organization. It makes up the second tier of the task’s support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. | Optional |
+| assigned_support_group | The group for the task's support organization. It makes up the third tier of the task's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. | Optional |
| impact | The task ticket impact. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
| urgency | The task ticket urgency. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
-| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| scedulded_start_date | The task ticket scheduled future start date. For example, in 12 hours, in 7 days. Possible values are: . | Optional |
-| scedulded_end_date | The task ticket scheduled future end date. For example, in 12 hours, in 7 days. Possible values are: . | Optional |
+| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| scedulded_start_date | The task ticket scheduled future start date. For example, in 12 hours, in 7 days. | Optional |
+| scedulded_end_date | The task ticket scheduled future end date. For example, in 12 hours, in 7 days. | Optional |
#### Context Output
@@ -788,22 +788,22 @@ Updates the task ticket.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ticket_request_id | The ID of the task ticket to update. Possible values are: . | Required |
-| summary | The task ticket summary. Possible values are: . | Optional |
-| details | The task ticket detailed description. Possible values are: . | Optional |
+| ticket_request_id | The ID of the task ticket to update. | Required |
+| summary | The task ticket summary. | Optional |
+| details | The task ticket detailed description. | Optional |
| priority | The task ticket priority. Possible values are: Critical, High, Medium, Low. | Optional |
| status | The task ticket status. Possible values are: Staged, Assigned, Pending, Work In Progress, Waiting, Closed, Bypassed. | Optional |
-| status_reason | The reason for changing the ticket status. Required when the status is changed. . Possible values are: Success, Failed, Cancelled, Assignment, Staging in Progress, Staging Complete, Acknowledgment, Another Task, Task Rule, Completion, Error. | Optional |
-| company | The company associated with the requester. By default it is determined by the logged in user. Possible values are: . | Optional |
-| location_company | The company associated with the task process. Possible values are: . | Optional |
-| support_company | The technical support team associated with the company. Possible values are: . | Optional |
-| assignee | The full name of the employee the ticket is assigned to. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. Possible values are: . | Optional |
-| assigned_group | The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. Possible values are: . | Optional |
-| task_type | The task ticket type. . Possible values are: Automatic, Manual. | Optional |
+| status_reason | The reason for changing the ticket status. Required when the status is changed. Possible values are: Success, Failed, Cancelled, Assignment, Staging in Progress, Staging Complete, Acknowledgment, Another Task, Task Rule, Completion, Error. | Optional |
+| company | The company associated with the requester. By default it is determined by the logged in user. | Optional |
+| location_company | The company associated with the task process. | Optional |
+| support_company | The technical support team associated with the company. | Optional |
+| assignee | The full name of the employee the ticket is assigned to. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. | Optional |
+| assigned_group | The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. | Optional |
+| task_type | The task ticket type. Possible values are: Automatic, Manual. | Optional |
| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assignee or any other custom field. Possible values are: The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value".Possible fields: Assignee or any other custom field.. | Optional |
-| scedulded_start_date | The task ticket scheduled future start date. For example, in 12 hours, in 7 days. Possible values are: . | Optional |
-| scedulded_end_date | The task ticket scheduled future end date. For example, in 12 hours, in 7 days. Possible values are: . | Optional |
+| scedulded_start_date | The task ticket scheduled future start date. For example, in 12 hours, in 7 days. | Optional |
+| scedulded_end_date | The task ticket scheduled future end date. For example, in 12 hours, in 7 days. | Optional |
#### Context Output
@@ -827,31 +827,31 @@ Creates a problem investigation ticket.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| first_name | The customer first name the ticket request is for. Possible values are: . | Required |
-| last_name | The customer last name the ticket request is for. Possible values are: . | Required |
+| first_name | The customer first name the ticket request is for. | Required |
+| last_name | The customer last name the ticket request is for. | Required |
| status | The problem investigation ticket status. Possible values are: Draft, Under Review, Request for Authorization, Assigned, Under Investigation, Pending, Completed, Rejected, Closed, Cancelled. | Required |
-| investigation_driver | The problem investigation ticket driver. . Possible values are: High Impact Incident, Re-Occurring Incidents, Non-Routine Incident, Other. | Required |
-| summary | The problem investigation ticket summary. Possible values are: . | Required |
-| details | The detailed description on the problem investigation ticket. Possible values are: . | Optional |
+| investigation_driver | The problem investigation ticket driver. Possible values are: High Impact Incident, Re-Occurring Incidents, Non-Routine Incident, Other. | Required |
+| summary | The problem investigation ticket summary. | Required |
+| details | The detailed description on the problem investigation ticket. | Optional |
| impact | The problem investigation ticket impact. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Required |
| urgency | The problem investigation ticket urgency. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Required |
-| target_resolution_date | The future resolution date. For example, in 12 hours, in 7 days. Possible values are: . | Optional |
-| company | The company associated with the requester. By default it is determined by the logged in user. Possible values are: . | Optional |
-| region | The region of the problem investigation location. The arguments region, site_group, and site should be provided together. Possible values are: . | Optional |
-| site_group | The site group of the problem investigation location. The arguments region, site_group, and site should be provided together. Possible values are: . | Optional |
-| site | The site of the problem investigation location. The arguments region, site_group, and site should be provided together. Possible values are: . | Optional |
-| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| assignee_pbm_mgr | The full name of the employee the ticket will be assigned to as the problem coordinator. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| assigned_group_pbm_mgr | The group for the problem coordinator’s support organization, which makes up the third tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. Possible values are: . | Optional |
-| support_company_pbm_mgr | The company for the problem coordinator’s support organization, which makes up the first tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. Possible values are: . | Optional |
-| support_organization_pbm_mgr | The organization for the problem coordinator’s support organization, which makes up the second tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. Possible values are: . | Optional |
-| assigned_support_company | The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. Possible values are: . | Optional |
-| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. Possible values are: . | Optional |
-| assigned_group | The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. Possible values are: . | Optional |
-| investigation_justification | The justification for the ticket creation. Possible values are: . | Optional |
-| temporary_workaround | The problem workaround. Possible values are: . | Optional |
-| resolution | The ticket resolution. Possible values are: . | Optional |
-| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field. Possible values are: . | Optional |
+| target_resolution_date | The future resolution date. For example, in 12 hours, in 7 days. | Optional |
+| company | The company associated with the requester. By default it is determined by the logged in user. | Optional |
+| region | The region of the problem investigation location. The arguments region, site_group, and site should be provided together. | Optional |
+| site_group | The site group of the problem investigation location. The arguments region, site_group, and site should be provided together. | Optional |
+| site | The site of the problem investigation location. The arguments region, site_group, and site should be provided together. | Optional |
+| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| assignee_pbm_mgr | The full name of the employee the ticket will be assigned to as the problem coordinator. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| assigned_group_pbm_mgr | The group for the problem coordinator’s support organization, which makes up the third tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. | Optional |
+| support_company_pbm_mgr | The company for the problem coordinator’s support organization, which makes up the first tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. | Optional |
+| support_organization_pbm_mgr | The organization for the problem coordinator’s support organization, which makes up the second tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. | Optional |
+| assigned_support_company | The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. | Optional |
+| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. | Optional |
+| assigned_group | The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. | Optional |
+| investigation_justification | The justification for the ticket creation. | Optional |
+| temporary_workaround | The problem workaround. | Optional |
+| resolution | The ticket resolution. | Optional |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field. | Optional |
#### Context Output
@@ -897,28 +897,28 @@ Updates The problem investigation ticket type.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ticket_request_id | The problem investigation ticket request ID. Possible values are: . | Required |
+| ticket_request_id | The problem investigation ticket request ID. | Required |
| status | The problem investigation ticket status. Possible values are: Draft, Under Review, Request for Authorization, Assigned, Under Investigation, Pending, Completed, Rejected, Closed, Cancelled. | Optional |
-| investigation_driver | The problem investigation ticket driver. . Possible values are: High Impact Incident, Re-Occuring Incidents, Non-Routine Incident, Other. | Optional |
-| summary | The problem investigation ticket summary. Possible values are: . | Optional |
+| investigation_driver | The problem investigation ticket driver. Possible values are: High Impact Incident, Re-Occuring Incidents, Non-Routine Incident, Other. | Optional |
+| summary | The problem investigation ticket summary. | Optional |
| impact | The problem investigation ticket impact. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
| urgency | The problem investigation ticket urgency. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
-| target_resolution_date | The problem investigation ticket target resolution date. For example, in 12 hours, in 7 days. Possible values are: . | Optional |
-| details | The problem investigation ticket detailed description. Possible values are: . | Optional |
-| company | The company associated with the requester. By default it is determined by the logged in user. Possible values are: . | Optional |
-| region | The region of the problem investigation location. The arguments region, site_group, and site should be provided together. Possible values are: . | Optional |
-| site_group | The site group of the problem investigation location. The arguments region, site_group, and site should be provided together. Possible values are: . | Optional |
-| site | The site of the problem investigation location.The arguments region, site_group, and site should be provided together. Possible values are: . | Optional |
-| assigned_to | The technical support person the ticket is assigned to. Possible values are: . | Optional |
-| assigned_group_pbm_mgr | The group for the problem coordinator’s support organization, which makes up the third tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. Possible values are: . | Optional |
-| support_company_pbm_mgr | The company for the problem coordinator’s support organization, which makes up the first tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. Possible values are: . | Optional |
-| support_organization_pbm_mgr | The organization for the problem coordinator’s support organization, which makes up the second tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. Possible values are: . | Optional |
-| assigned_support_company | The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. Possible values are: . | Optional |
-| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. Possible values are: . | Optional |
-| assigned_group | The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. Possible values are: . | Optional |
-| investigation_justification | The justification for the ticket creation. Possible values are: . | Optional |
-| temporary_workaround | The problem workaround. Possible values are: . | Optional |
-| resolution | The ticket resolution. Possible values are: . | Optional |
+| target_resolution_date | The problem investigation ticket target resolution date. For example, in 12 hours, in 7 days. | Optional |
+| details | The problem investigation ticket detailed description. | Optional |
+| company | The company associated with the requester. By default it is determined by the logged in user. | Optional |
+| region | The region of the problem investigation location. The arguments region, site_group, and site should be provided together. | Optional |
+| site_group | The site group of the problem investigation location. The arguments region, site_group, and site should be provided together. | Optional |
+| site | The site of the problem investigation location.The arguments region, site_group, and site should be provided together. | Optional |
+| assigned_to | The technical support person the ticket is assigned to. | Optional |
+| assigned_group_pbm_mgr | The group for the problem coordinator’s support organization, which makes up the third tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. | Optional |
+| support_company_pbm_mgr | The company for the problem coordinator’s support organization, which makes up the first tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. | Optional |
+| support_organization_pbm_mgr | The organization for the problem coordinator’s support organization, which makes up the second tier of the problem coordinator’s support organization data structure. The arguments support_organization_pbm_mgr, assigned_group_pbm_mgr, and support_company_pbm_mgr should be provided together. | Optional |
+| assigned_support_company | The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. | Optional |
+| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. The arguments assigned_support_organization, assigned_group, and assigned_support_company should be provided together. | Optional |
+| assigned_group | The group for the problem assignee's support organization. It makes up the third tier of the problem assignee's support organization data structure. The arguments assigned_support_organization, assigned_group, and support_company should be provided together. | Optional |
+| investigation_justification | The justification for the ticket creation. | Optional |
+| temporary_workaround | The problem workaround. | Optional |
+| resolution | The ticket resolution. | Optional |
| status_reason | The reason for changing the status. Required when the status argument is provided. Possible values are: Publish, Reject, Not Applicable. | Optional |
@@ -944,25 +944,25 @@ Create known error ticket.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| status | The known error ticket status. Possible values are: Assigned, Scheduled For Correction, Assigned To Vendor, No Action Planned, Corrected, Closed, Cancelled. | Required |
-| summary | The known error ticket summary. Possible values are: . | Required |
-| details | The known error ticket Detailed description. Possible values are: . | Required |
+| summary | The known error ticket summary. | Required |
+| details | The known error ticket Detailed description. | Required |
| impact | The known error ticket impact. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Required |
| urgency | The known error ticket urgency. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Required |
| view_access | Whether if the ticket is for internal view or public view. Possible values are: Public, Internal. | Required |
-| company | Company associated with the Requester. Possible values are: . | Required |
-| target_resolution_date | Known error resolution date. Future resolution date. For example, in 12 hours, in 7 days. Possible values are: . | Required |
-| resolution | Ticket resolution. Possible values are: . | Optional |
-| assigned_group_pbm_mgr | It makes up the third tier of the Problem Coordinator’s Support Organization data structure. Possible values are: . | Optional |
-| support_company_pbm_mgr | the Company for the Problem Coordinator’s Support Organization. It makes up the first tier of it. . Possible values are: . | Optional |
-| support_organization_pbm_mgr | It makes up the second tier of the Problem Coordinator’s Support Organization data structure. Possible values are: . | Optional |
-| assigned_support_company | The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. Possible values are: . | Optional |
-| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. Possible values are: . | Optional |
-| assigned_group | The group for the problem assignee’s support organization. It makes up the third tier of the problem assignee’s support organization data structure. Possible values are: . | Optional |
-| investigation_justification | The justification for the ticket creation. Possible values are: . | Optional |
-| assignee | The full name of the staff member to whom the ticket will be assigned to. It can be retrieved by using the 'bmc-itsm-user-list' command. Possible values are: . | Optional |
-| assignee_pbm_mgr | The full name of the staff member to whom the ticket will be assign to as the problem coordinator. It can be retrieved by using the 'bmc-itsm-user-list' command. Possible values are: . | Optional |
-| temporary_workaround | Error workaround. Possible values are: . | Optional |
-| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field. Possible values are: . | Optional |
+| company | Company associated with the Requester. | Required |
+| target_resolution_date | Known error resolution date. Future resolution date. For example, in 12 hours, in 7 days. | Required |
+| resolution | Ticket resolution. | Optional |
+| assigned_group_pbm_mgr | It makes up the third tier of the Problem Coordinator’s Support Organization data structure. | Optional |
+| support_company_pbm_mgr | the Company for the Problem Coordinator’s Support Organization. It makes up the first tier of it. | Optional |
+| support_organization_pbm_mgr | It makes up the second tier of the Problem Coordinator’s Support Organization data structure. | Optional |
+| assigned_support_company | The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. | Optional |
+| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. | Optional |
+| assigned_group | The group for the problem assignee’s support organization. It makes up the third tier of the problem assignee’s support organization data structure. | Optional |
+| investigation_justification | The justification for the ticket creation. | Optional |
+| assignee | The full name of the staff member to whom the ticket will be assigned to. It can be retrieved by using the 'bmc-itsm-user-list' command. | Optional |
+| assignee_pbm_mgr | The full name of the staff member to whom the ticket will be assign to as the problem coordinator. It can be retrieved by using the 'bmc-itsm-user-list' command. | Optional |
+| temporary_workaround | Error workaround. | Optional |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field. | Optional |
#### Context Output
@@ -1008,27 +1008,27 @@ Update Known Error ticket type.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ticket_request_id | The known error ticket request ID. Possible values are: . | Required |
+| ticket_request_id | The known error ticket request ID. | Required |
| status | The known error ticket status. Possible values are: Assigned, Scheduled For Correction, Assigned To Vendor, No Action Planned, Corrected, Closed, Cancelled. | Optional |
-| summary | The known error ticket summary. Possible values are: . | Optional |
-| details | The known error ticket detailed description. Possible values are: . | Optional |
+| summary | The known error ticket summary. | Optional |
+| details | The known error ticket detailed description. | Optional |
| impact | The known error ticket impact. Possible values are: 1-Extensive/Widespread, 2-Significant/Large, 3-Moderate/Limited, 4-Minor/Localized. | Optional |
| urgency | The known error ticket urgency. Possible values are: 1-Critical, 2-High, 3-Medium, 4-Low. | Optional |
-| view_access | The known error ticket internal access. . Possible values are: Public, Internal. | Optional |
-| company | Company associated with the Requester. By default is determined by the logged in user. Possible values are: . | Optional |
-| target_resolution_date | Known error resolution date. Future resolution date. For example, in 12 hours, in 7 days. Possible values are: . | Optional |
-| resolution | Ticket resolution. Possible values are: . | Optional |
-| assigned_group_pbm_mgr | It makes up the third tier of the Problem Coordinator’s Support Organization data structure. Possible values are: . | Optional |
-| support_company_pbm_mgr | the Company for the Problem Coordinator’s Support Organization. It makes up the first tier of it. . Possible values are: . | Optional |
-| support_organization_pbm_mgr | It makes up the second tier of the Problem Coordinator’s Support Organization data structure. Possible values are: . | Optional |
-| assigned_support_company | The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. Possible values are: . | Optional |
-| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. Possible values are: . | Optional |
-| assigned_group | The group for the problem assignee’s support organization. It makes up the third tier of the problem assignee’s support organization data structure. Possible values are: . | Optional |
-| temporary_workaround | Error workaround. Possible values are: . | Optional |
-| status_reason | The reason for changing the status. Required when the status is provided. . Possible values are: Duplicate, No Longer Applicable, Pending PIR, Funding Not Available, Pending Infrastructure Change, Pending Third Party Vendor. | Optional |
-| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| assignee_pbm_mgr | The full name of the employee the ticket will be assign to as the problem coordinator. It can be retrieved by using the bmc-itsm-user-list command. Possible values are: . | Optional |
-| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field. Possible values are: . | Optional |
+| view_access | The known error ticket internal access. Possible values are: Public, Internal. | Optional |
+| company | Company associated with the Requester. By default is determined by the logged in user. | Optional |
+| target_resolution_date | Known error resolution date. Future resolution date. For example, in 12 hours, in 7 days. | Optional |
+| resolution | Ticket resolution. | Optional |
+| assigned_group_pbm_mgr | It makes up the third tier of the Problem Coordinator’s Support Organization data structure. | Optional |
+| support_company_pbm_mgr | the Company for the Problem Coordinator’s Support Organization. It makes up the first tier of it. | Optional |
+| support_organization_pbm_mgr | It makes up the second tier of the Problem Coordinator’s Support Organization data structure. | Optional |
+| assigned_support_company | The company for the problem assignee’s support organization. It makes up the first tier of the problem assignee’s support organization data structure. | Optional |
+| assigned_support_organization | The organization for the problem assignee’s support organization. It makes up the second tier of the problem assignee’s support organization data structure. | Optional |
+| assigned_group | The group for the problem assignee’s support organization. It makes up the third tier of the problem assignee’s support organization data structure. | Optional |
+| temporary_workaround | Error workaround. | Optional |
+| status_reason | The reason for changing the status. Required when the status is provided. Possible values are: Duplicate, No Longer Applicable, Pending PIR, Funding Not Available, Pending Infrastructure Change, Pending Third Party Vendor. | Optional |
+| assignee | The full name of the employee the ticket will be assigned to. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| assignee_pbm_mgr | The full name of the employee the ticket will be assign to as the problem coordinator. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Possible fields: Assigned Group, Assignee or any other custom field. | Optional |
#### Context Output
@@ -1052,12 +1052,12 @@ Lists all change requests ticket templates. Useful for creating change request t
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| template_ids | A comma-separated list of change request template IDs. Used as a filtering argument. Possible values are: . | Optional |
-| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). Possible values are: . | Optional |
-| limit | The maximum number of records to retrieve. Possible values are: . Default is 50. | Optional |
-| page_size | The maximum number of records to retrieve per page. Possible values are: . | Optional |
-| page | The page number of the results to retrieve. Possible values are: . | Optional |
-| description | The change request ticket description. Used as a filtering argument. Possible values are: . | Optional |
+| template_ids | A comma-separated list of change request template IDs. Used as a filtering argument. | Optional |
+| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| description | The change request ticket description. Used as a filtering argument. | Optional |
#### Context Output
@@ -1112,12 +1112,12 @@ Lists all incident requests ticket templates. Useful for create incident tickets
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). Possible values are: . | Optional |
-| limit | The maximum number of records to retrieve. Possible values are: . Default is 50. | Optional |
-| page_size | The maximum number of records to retrieve per page. Possible values are: . | Optional |
-| page | The page number of the results to retrieve. Possible values are: . | Optional |
-| description | The incident ticket template description. Used as a filtering argument. Possible values are: . | Optional |
-| template_ids | A comma-separated list of incident template IDs. Used as a filtering argument. Possible values are: . | Optional |
+| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| description | The incident ticket template description. Used as a filtering argument. | Optional |
+| template_ids | A comma-separated list of incident template IDs. Used as a filtering argument. | Optional |
#### Context Output
@@ -1172,12 +1172,12 @@ Lists all task ticket templates. Useful for creating task tickets. The records a
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). Possible values are: . | Optional |
-| limit | The maximum number of records to retrieve. Possible values are: . Default is 50. | Optional |
-| page_size | The maximum number of records to retrieve per page. Possible values are: . | Optional |
-| page | The page number of the results to retrieve. Possible values are: . | Optional |
-| template_ids | A comma-separated list of task template IDs. Used as a filtering argument. Possible values are: . | Optional |
-| task_name | The task ticket template name. Used as a filtering argument. Possible values are: . | Optional |
+| query | The query to search by. For example: Status = "Draft" AND Impact = "1-Extensive/Widespread". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| template_ids | A comma-separated list of task template IDs. Used as a filtering argument. | Optional |
+| task_name | The task ticket template name. Used as a filtering argument. | Optional |
#### Context Output
@@ -1256,9 +1256,10 @@ Gets remote data from a remote incident. This method does not update the current
#### Context Output
There is no context output for this command.
+
### get-modified-remote-data
***
-Gets the list of incidents that were modified since the last update time. Note that this method is here for debugging purposes. The get-modified-remote-data command is used as part of a Mirroring feature, which is available from version 6.1.
+Gets the list of incidents that were modified since the last update time. Note that this method is here for debugging purposes. The get-modified-remote-data command is used as part of a Mirroring feature, which is available in Cortex XSOAR from version 6.1.
#### Base Command
@@ -1270,10 +1271,235 @@ Gets the list of incidents that were modified since the last update time. Note t
| --- | --- | --- |
| lastUpdate | A date string in local time representing the last time the incident was updated. The incident is only returned if it was modified after the last update time. | Optional |
+#### Context Output
+
+There is no context output for this command.
+
+
+### bmc-itsm-support-group-list
+
+***
+Lists all support groups. Useful for getting possible (Company, Support Organization, Support Group) triplets.
+
+#### Base Command
+
+`bmc-itsm-support-group-list`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| company | Company name. Used as a filtering argument. | Optional |
+| support_organization | Support organization name. Used as a filtering argument. | Optional |
+| support_group | Support group name. Used as a filtering argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| BmcITSM.SupportGroup.SupportGroupID | String | The support group ID. |
+| BmcITSM.SupportGroup.Company | String | The support company. |
+| BmcITSM.SupportGroup.SupportOrganization | String | The support organization. |
+| BmcITSM.SupportGroup.SupportGroupName | String | The support group. |
+
+#### Command example
+```!bmc-itsm-support-group-list limit=2```
+#### Context Example
+```json
+{
+ "BmcITSM": {
+ "SupportGroup": [
+ {
+ "Company": "Apex Global",
+ "SupportGroupID": "APX990000000029",
+ "SupportGroupName": "Apex Global - Facilities",
+ "SupportOrganization": "Facilities Support"
+ },
+ {
+ "Company": "Calbro Services",
+ "SupportGroupID": "SGP000000000110",
+ "SupportGroupName": "Application Development / Deployment",
+ "SupportOrganization": "Application Support"
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### List support groups.
+>Showing 2 records out of 15.
+>|Support Group ID|Company|Support Organization|Support Group Name|
+>|---|---|---|---|
+>| APX990000000029 | Apex Global | Facilities Support | Apex Global - Facilities |
+>| SGP000000000110 | Calbro Services | Application Support | Application Development / Deployment |
+
+
+### bmc-itsm-work-order-template-list
+
+***
+Lists all work order templates. Useful for creating work orders. The records are retrieved by the query argument or by the filtering arguments. When using filtering arguments, each one defines a 'LIKE' operation and an 'AND' operator is used between them. To see the entire JSON, you can use the raw_response=true at the end of the command.
+
+#### Base Command
+
+`bmc-itsm-work-order-template-list`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| query | The query to search by. For example, query="Company like \"BMCOpsMonitoring\"". The query is used in addition to the existing arguments. See the BMC documentation for [building search qualifications](https://docs.bmc.com/docs/ars2008/building-qualifications-and-expressions-929630007.html). | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| page_size | The maximum number of records to retrieve per page. | Optional |
+| page | The page number of the results to retrieve. | Optional |
+| template_ids | A comma-separated list of work order template GUIDs. Used as a filtering argument. | Optional |
+| template_name | The work order template name. Used as a filtering argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| BmcITSM.WorkOrderTemplate.Id | String | The work order template ID. |
+| BmcITSM.WorkOrderTemplate.Name | String | The work order template name. |
+| BmcITSM.WorkOrderTemplate.GUID | String | The work order template GUID. |
+
+#### Command example
+```!bmc-itsm-work-order-template-list limit=2```
+#### Context Example
+```json
+{
+ "BmcITSM": {
+ "WorkOrderTemplate": [
+ {
+ "GUID": "IDGCWH5RDMNSBARVRM5ERVRM5EKP11",
+ "Id": "000000000000002",
+ "Name": "Share Folder Access"
+ },
+ {
+ "GUID": "IDGCWH5RDMNSBARVRNNGRVRNNGKY0X",
+ "Id": "000000000000003",
+ "Name": "New Share Folder Access"
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### List work order templates.
+>Showing 2 records out of 9.
+>|Id|Name|GUID|
+>|---|---|---|
+>| 000000000000002 | Share Folder Access | IDGCWH5RDMNSBARVRM5ERVRM5EKP11 |
+>| 000000000000003 | New Share Folder Access | IDGCWH5RDMNSBARVRNNGRVRNNGKY0X |
+
+
+### bmc-itsm-work-order-create
+
+***
+Creates a new work order ticket.
+
+#### Base Command
+
+`bmc-itsm-work-order-create`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| template_guid | The instance GUID of the template to use. The GUID can be retrieved by executing the bmc-itsm-work-order-template-list command. | Optional |
+| first_name | Requester first name. | Optional |
+| last_name | Requester last name. | Optional |
+| customer_first_name | Customer first name. | Required |
+| customer_last_name | Customer last name. | Required |
+| customer_company | Customer company. | Required |
+| customer_person_id | Customer person ID. Use it when customer first and last name pair is not unique. | Optional |
+| summary | The work order summary. | Required |
+| detailed_description | The work order ticket detailed description. | Required |
+| status | The work order status. Possible values are: Assigned, Pending, Waiting Approval, Planning, In Progress, Completed, Rejected, Cancelled, Closed. | Required |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Example: additional_fields="Support Company=Calbro Services;Support Organization=IT Support;Support Group Name=Service Desk;Request Assignee=Scully Agent". | Optional |
+| priority | The work order ticket priority. Possible values are: Critical, High, Medium, Low. | Required |
+| work_order_type | The work order ticket type. Possible values are: General, Project. | Optional |
+| location_company | The company associated with the task process. | Required |
+| scedulded_start_date | The work order ticket scheduled future start date. For example, in 12 hours, in 7 days. | Optional |
+| scedulded_end_date | The work order ticket scheduled future end date. For example, in 12 hours, in 7 days. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| BmcITSM.WorkOrder.RequestID | String | The work order ticket unique Request ID. |
+| BmcITSM.WorkOrder.DisplayID | String | The work order ticket unique Display ID. |
+| BmcITSM.WorkOrder.CreateDate | Date | The work order ticket creation date time in UTC. |
+
+#### Command example
+```!bmc-itsm-work-order-create customer_company="Calbro Services" customer_first_name="Scully" customer_last_name="Agent" detailed_description="Easy peasy work order" location_company="Calbro Services" priority=Low status=Pending summary="Easy peasy work order. No, really." customer_person_id=PPL000000000607 additional_fields="Support Company=Calbro Services;Support Organization=IT Support;Support Group Name=Service Desk;Request Assignee=Scully Agent"```
+#### Context Example
+```json
+{
+ "BmcITSM": {
+ "WorkOrder": {
+ "CreateDate": "2024-02-07T08:08:23",
+ "DisplayID": "WO0000000001002",
+ "RequestID": "WO0000000000702"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Work order ticket successfully created.
+>|Create Date|Display ID|Request ID|
+>|---|---|---|
+>| 2024-02-07T08:08:23 | WO0000000001002 | WO0000000000702 |
+
+
+### bmc-itsm-work-order-update
+
+***
+Updates the work order ticket.
+
+#### Base Command
+
+`bmc-itsm-work-order-update`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| request_id | The ID of the work order ticket to update. | Required |
+| summary | The work order ticket summary. | Optional |
+| detailed_description | The work order ticket detailed description. | Optional |
+| priority | The work order ticket priority. Possible values are: Critical, High, Medium, Low. | Optional |
+| status | The work order ticket status. Possible values are: Assigned, Pending, Waiting Approval, Planning, In Progress, Completed, Rejected, Cancelled, Closed. | Optional |
+| status_reason | The reason for changing the ticket status. Possible values are: Initial Status, Awaiting Request Assignee, Client Hold, Client Additional Information Requested, Client Action Required, Support Contact Hold, Local Site Action Required, Purchase Order Approval, Supplier Delivery, Third Party Vendor Action Required, Infrastructure Change, Work not started, Successful, Successful with Issues, Cancelled by Requester, Cancelled by Support, Customer Close, System Close, System Close with Issues. | Optional |
+| company | The company associated with the requester. By default it is determined by the logged in user. | Optional |
+| location_company | The company associated with the work order process. | Optional |
+| assignee | The full name of the employee the work order is assigned to. It can be retrieved by using the bmc-itsm-user-list command. | Optional |
+| support_organization | The organization for the problem assignee's support organization. It makes up the second tier of the problem assignee's support organization data structure. The arguments support_organization, support_group should be provided together. It can be retrieved by using the bmc-itsm-support-group-list command. | Optional |
+| support_group | The group for the problem assignee's support group. It makes up the third tier of the problem assignee's support organization data structure. The arguments support_organization, support_group should be provided together. It can be retrieved by using the bmc-itsm-support-group-list command. | Optional |
+| work_order_type | The work order ticket type. Possible values are: General, Project. | Optional |
+| additional_fields | The fields which are not present in the current argument list can be added here in the format "fieldname1=value;fieldname2=value". Example: additional_fields="Support Company=Calbro Services;Support Organization=IT Support;Support Group Name=Service Desk;Request Assignee=Scully Agent". | Optional |
+| scedulded_start_date | The work order ticket scheduled future start date. For example, in 12 hours, in 7 days. | Optional |
+| scedulded_end_date | The work order ticket scheduled future end date. For example, in 12 hours, in 7 days. | Optional |
#### Context Output
There is no context output for this command.
+#### Command example
+```!bmc-itsm-work-order-update request_id=WO0000000000701 summary="Updated summary" status="In Progress" support_organization="IT Support" support_group="Service Desk"```
+#### Human Readable Output
+
+>Work Order: WO0000000000701 was successfully updated.
+
+
+
## Incident Mirroring
You can enable incident mirroring between Cortex XSOAR incidents and BMC Helix ITSM corresponding events (available from Cortex XSOAR version 6.0.0).
@@ -1292,3 +1518,4 @@ To set up the mirroring:
Newly fetched incidents will be mirrored in the chosen direction. However, this selection does not affect existing incidents.
**Important Note:** To ensure the mirroring works as expected, mappers are required, both for incoming and outgoing, to map the expected fields in Cortex XSOAR and BMC Helix ITSM.
+
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/test_data/create_work_order.json b/Packs/BmcITSM/Integrations/BmcITSM/test_data/create_work_order.json
new file mode 100644
index 000000000000..26632d657568
--- /dev/null
+++ b/Packs/BmcITSM/Integrations/BmcITSM/test_data/create_work_order.json
@@ -0,0 +1,14 @@
+{
+ "values": {
+ "Request ID": "000000000000701",
+ "WorkOrder_ID": "WO0000000001001",
+ "Create Date": "2024-02-05T09:11:09.000+0000"
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:WorkOrderInterface_Create"
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/test_data/get_work_order.json b/Packs/BmcITSM/Integrations/BmcITSM/test_data/get_work_order.json
new file mode 100644
index 000000000000..54bbc08e647f
--- /dev/null
+++ b/Packs/BmcITSM/Integrations/BmcITSM/test_data/get_work_order.json
@@ -0,0 +1,215 @@
+{
+ "values": {
+ "Request ID": "WO0000000000701|WO0000000000701",
+ "Submitter": "Scully_Agent",
+ "Submit Date": "2024-02-05T09:11:09.000+0000",
+ "Assigned To": null,
+ "Last Modified Date": "2024-02-05T09:11:10.000+0000",
+ "Status": "Assigned",
+ "Work Order Template Used": "?",
+ "Assignee Groups": "1000000005;'Scully_Agent';",
+ "InstanceId": "AGGHKPQIDC46ZASI3L8LSI3L8L7B5G",
+ "Record ID": "AGGHKPQIDC46ZASI3L8LSI3L8L7B5G|AGGHKPQIDC46ZASI3L8LSI3L8L7B5G",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "Department": "Customer Service",
+ "Site Group": "United States",
+ "Region": "Americas",
+ "Business Service": null,
+ "LookupKeyword": "MAINWORKORDER",
+ "Site": "Boston Support Center",
+ "WO Type Field 1": null,
+ "WO Type Field 2": null,
+ "WO Type Field 3": null,
+ "WO Type Field 4": null,
+ "WO Type Field 5": null,
+ "WO Type Field 6": null,
+ "WO Type Field 7": null,
+ "WO Type Field 8": null,
+ "WO Type Field 9": null,
+ "SRAttachment": null,
+ "Customer Middle Name": null,
+ "z1D_Command": null,
+ "z1D_WorklogDetails": null,
+ "SRInstanceID": null,
+ "zTmpEventGUID": null,
+ "CustomerFullName": "Scully Agent",
+ "z1D_Activity Type*": null,
+ "z1D_Summary": null,
+ "z1D_Details": null,
+ "z1D_Secure_Log": null,
+ "z1D_View_Access": null,
+ "Attachment 1": null,
+ "WorkOrderID": "WO0000000000701",
+ "SRMSRegistryInstanceID": null,
+ "SRMSAOIGuid": null,
+ "SRID": null,
+ "TemplateID": null,
+ "z1D_CommunicationSource": null,
+ "z1D_ActivityDate_tab": null,
+ "z1D_WorkInfoViewAccess": null,
+ "z1D Action WO1": null,
+ "z1D Action WO2": null,
+ "Needs Attention": null,
+ "Requestor_By_ID": "Scully_Agent",
+ "ClientLocale": null,
+ "WO Type Field 10": null,
+ "WO Type Field 11": null,
+ "WO Type Field 12": null,
+ "WO Type Field 13": null,
+ "WO Type Field 14": null,
+ "WO Type Field 15": null,
+ "WO Type Field 16": null,
+ "WO Type Field 17": null,
+ "WO Type Field 18": null,
+ "WO Type Field 19": null,
+ "WO Type Field 20": null,
+ "WO Type Field 21": null,
+ "WO Type Field 22": null,
+ "WO Type Field 23": null,
+ "WO Type Field 24": null,
+ "WO Type Field 25": null,
+ "WO Type Field 26": null,
+ "WO Type Field 27": null,
+ "WO Type Field 28": null,
+ "WO Type Field 29": null,
+ "WO Type Field 30": null,
+ "Previous_ServiceCI_ReconID": null,
+ "z1D_SR_Instanceid": null,
+ "WO Type Field 10 Label": null,
+ "WO Type Field 11 Label": null,
+ "WO Type Field 12 Label": null,
+ "WO Type Field 13 Label": null,
+ "WO Type Field 14 Label": null,
+ "WO Type Field 15 Label": null,
+ "WO Type Field 16 Label": null,
+ "WO Type Field 17 Label": null,
+ "WO Type Field 18 Label": null,
+ "WO Type Field 19 Label": null,
+ "WO Type Field 20 Label": null,
+ "WO Type Field 21 Label": null,
+ "WO Type Field 22 Label": null,
+ "WO Type Field 23 Label": null,
+ "WO Type Field 24 Label": null,
+ "WO Type Field 25 Label": null,
+ "WO Type Field 26 Label": null,
+ "WO Type Field 27 Label": null,
+ "WO Type Field 28 Label": null,
+ "WO Type Field 29 Label": null,
+ "WO Type Field 30 Label": null,
+ "z1D_WorkInfoSubmitter": null,
+ "AttachmentSourceFormName": null,
+ "AttachmentSourceGUID": null,
+ "AttachmentSubmitter": null,
+ "SRWorkInfoType": null,
+ "z1D_ConfirmGroup": null,
+ "CreatedFromBackEndSynchWI": null,
+ "WO Type Field 48": null,
+ "WO Type Field 49": null,
+ "WO Type Field 50": null,
+ "WO Type Field 51": null,
+ "WO Type Field 48 Label": null,
+ "WO Type Field 49 Label": null,
+ "WO Type Field 50 Label": null,
+ "WO Type Field 51 Label": null,
+ "Chat Session ID": null,
+ "Broker Vendor Name": null,
+ "NeedsAttentionCCS_Setting": "false",
+ "Automation Status": "Manual",
+ "RequestCreatedFromDWP": "No",
+ "DWP_SRID": null,
+ "DWP_SRInstanceID": null,
+ "WO Type Field 01 Label": null,
+ "WO Type Field 02 Label": null,
+ "WO Type Field 03 Label": null,
+ "WO Type Field 04 Label": null,
+ "WO Type Field 05 Label": null,
+ "WO Type Field 06 Label": null,
+ "WO Type Field 07 Label": null,
+ "WO Type Field 08 Label": null,
+ "WO Type Field 09 Label": null,
+ "z1D Char09": null,
+ "CI_DatasetId": "BMC.ASSET",
+ "CI_ReconId": null,
+ "Description": "Sample WO 20240205",
+ "Location Company": "Calbro Services",
+ "Organization": "Information Technology",
+ "Support Organization": "IT Support",
+ "Support Group Name": "Service Desk",
+ "Last Name": "Agent",
+ "First Name": "Scully",
+ "Middle Initial": null,
+ "VIP": "No",
+ "Chg Location Address": "350 Seventh Avenue, 18th Floor\r\nBoston, Massachusetts 02101\r\nUnited States",
+ "Internet E-mail": null,
+ "Phone Number": "###",
+ "z1D Char01": null,
+ "Categorization Tier 1": null,
+ "Categorization Tier 2": null,
+ "Categorization Tier 3": null,
+ "z1D Char02": null,
+ "z1D Char03": null,
+ "z1D Char04": null,
+ "z1D_Action": null,
+ "z1D Integer01": null,
+ "Support Group ID": "SGP000000000011",
+ "Person ID": "PPL000000000607",
+ "Company": "Calbro Services",
+ "z1D Char05": null,
+ "z1D Char06": null,
+ "Add Request For:": "Individual",
+ "z1D Char07": null,
+ "z1D Char08": null,
+ "z1D Integer02": null,
+ "z1D Integer03": null,
+ "z1D Char10": null,
+ "Status Reason": null,
+ "Detailed Description": "Sample WO 20240205",
+ "Priority": "Low",
+ "Work Order Type": "General",
+ "Work Order ID": "WO0000000001001",
+ "Company3": "Calbro Services",
+ "Requestor ID": "Scully_Agent",
+ "Support Group Name2": null,
+ "Support Organization2": null,
+ "Actual Start Date": null,
+ "Scheduled Start Date": null,
+ "Scheduled End Date": null,
+ "Actual End Date": null,
+ "Number of Attachments": null,
+ "CAB Manager ( Change Co-ord )": "Edward Agent",
+ "CAB Manager Login": "Edward Agent",
+ "Support Group ID 2": "SGP000000000011",
+ "Requested By Person ID": "PPL000000000607",
+ "z1D_Worklog Type": null,
+ "Product Cat Tier 1(2)": null,
+ "Product Cat Tier 2 (2)": null,
+ "Product Cat Tier 3 (2)": null,
+ "Completed Date": null,
+ "Product Name (2)": null,
+ "Product Model/Version (2)": null,
+ "Manufacturer (2)": null,
+ "ASORG": "IT Support",
+ "ASCPY": "Calbro Services",
+ "ASGRP": "Service Desk",
+ "ASCHG": "Cyrus Agent",
+ "ASLOGID": "Cyrus Agent",
+ "ASGRPID": "SGP000000000011",
+ "Customer Person ID": "PPL000000000607",
+ "Customer First Name": "Scully",
+ "Customer Last Name": "Agent",
+ "Customer Company": "Calbro Services",
+ "Customer Organization": "Information Technology",
+ "Customer Department": "Customer Service",
+ "Customer Internet E-mail": null,
+ "Customer Phone Number": "###"
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:WorkOrderInterface/WO0000000000701%7CWO0000000000701"
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_support_group.json b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_support_group.json
new file mode 100644
index 000000000000..8dc5e7871da5
--- /dev/null
+++ b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_support_group.json
@@ -0,0 +1,160 @@
+{
+ "entries": [
+ {
+ "values": {
+ "Support Group ID": "APX990000000029",
+ "Submitter": "Remedy Application Service",
+ "Submit Date": "2023-04-27T20:05:08.000+0000",
+ "Assigned To": null,
+ "Last Modified By": "Demo",
+ "Last Modified Date": "2023-04-28T01:14:00.000+0000",
+ "Status": "Enabled",
+ "Short Description": ".",
+ "Notifier Listening": "Not Listening",
+ "Assignee Groups": "1000000042;1000000043;",
+ "instanceId": "AGGADGJBG3LAPX99SZPOQRSZPOVUT0",
+ "Record ID": "AGGADGJBG3LAPX99SZPOQRSZPOVUT0",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "z1G InitComplete": null,
+ "z1G_DefaultCompany": null,
+ "z1G_UnrestrictedAccessMember": null,
+ "z1G_Global_HNS": null,
+ "Permission_Group_Enabled": "Yes",
+ "DataTags": "BWFData",
+ "z1D_schemaID": null,
+ "z1D_DefaultCompanyAction": null,
+ "z1D_DefaultCompanyInteger": null,
+ "z1D_SupportGroupRole": null,
+ "z1D_Char04": null,
+ "DisableGroupNotification": "No",
+ "GroupNotificationEmail": null,
+ "Notification_Locale": "en_US",
+ "z1D_ConfirmGroup": null,
+ "Parent Group": null,
+ "Confidential Support Group": "No",
+ "Company_SortOrder": 100,
+ "Description": null,
+ "Company": "Apex Global",
+ "Support Organization": "Facilities Support",
+ "Support Group Name": "Apex Global - Facilities",
+ "z1D Char01": null,
+ "z1D Char02": null,
+ "z1D Action": null,
+ "z1D Integer01": null,
+ "z1D Lastcount": null,
+ "z1D Role": null,
+ "Assigned Support Company": null,
+ "z1D Date01": null,
+ "Shifts Flag": "No",
+ "Business Workdays Tag": null,
+ "Business Holidays Tag": null,
+ "Support Group Role": "Line of Business",
+ "Uses SLA": "Yes",
+ "Uses OLA": "Yes",
+ "On Call Group Flag": "No",
+ "z1D Permission Group ID": null,
+ "z1D Permission Group List": null,
+ "z1D Association Action01": null,
+ "Vendor Group": "No",
+ "Navigation Menu01": "Apex Global",
+ "Navigation Menu02": null,
+ "Navigation Menu03": null,
+ "z1D_Display_Locale": null,
+ "z1D Association Action01_locale": null
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/CTM:Support%20Group/APX990000000029"
+ }
+ ]
+ }
+ },
+ {
+ "values": {
+ "Support Group ID": "SGP000000000110",
+ "Submitter": "activationuser",
+ "Submit Date": "2023-05-10T17:34:37.000+0000",
+ "Assigned To": null,
+ "Last Modified By": "Remedy Application Service",
+ "Last Modified Date": "2023-05-10T17:34:37.000+0000",
+ "Status": "Enabled",
+ "Short Description": ".",
+ "Notifier Listening": "Not Listening",
+ "Assignee Groups": "1000000001;",
+ "instanceId": "AGGCWH5RDMNSBARU64APRU64APAVH7",
+ "Record ID": "AGGCWH5RDMNSBARU64APRU64APAVH7",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "z1G InitComplete": null,
+ "z1G_DefaultCompany": null,
+ "z1G_UnrestrictedAccessMember": null,
+ "z1G_Global_HNS": null,
+ "Permission_Group_Enabled": "Yes",
+ "DataTags": null,
+ "z1D_schemaID": null,
+ "z1D_DefaultCompanyAction": null,
+ "z1D_DefaultCompanyInteger": null,
+ "z1D_SupportGroupRole": null,
+ "z1D_Char04": null,
+ "DisableGroupNotification": "No",
+ "GroupNotificationEmail": null,
+ "Notification_Locale": null,
+ "z1D_ConfirmGroup": null,
+ "Parent Group": null,
+ "Confidential Support Group": "No",
+ "Company_SortOrder": 100,
+ "Description": null,
+ "Company": "Calbro Services",
+ "Support Organization": "Application Support",
+ "Support Group Name": "Application Development / Deployment",
+ "z1D Char01": null,
+ "z1D Char02": null,
+ "z1D Action": null,
+ "z1D Integer01": null,
+ "z1D Lastcount": null,
+ "z1D Role": null,
+ "Assigned Support Company": null,
+ "z1D Date01": null,
+ "Shifts Flag": "No",
+ "Business Workdays Tag": null,
+ "Business Holidays Tag": null,
+ "Support Group Role": "Tier 1",
+ "Uses SLA": "Yes",
+ "Uses OLA": "Yes",
+ "On Call Group Flag": "No",
+ "z1D Permission Group ID": null,
+ "z1D Permission Group List": null,
+ "z1D Association Action01": null,
+ "Vendor Group": "No",
+ "Navigation Menu01": "Calbro Services",
+ "Navigation Menu02": null,
+ "Navigation Menu03": null,
+ "z1D_Display_Locale": null,
+ "z1D Association Action01_locale": null
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/CTM:Support%20Group/SGP000000000110"
+ }
+ ]
+ }
+ }
+ ],
+ "_links": {
+ "next": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/CTM:Support%20Group?offset=2&limit=2"
+ }
+ ],
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/CTM:Support%20Group?limit=2"
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_support_group_filter.json b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_support_group_filter.json
new file mode 100644
index 000000000000..dcf6ed9b50fc
--- /dev/null
+++ b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_support_group_filter.json
@@ -0,0 +1,83 @@
+{
+ "entries": [
+ {
+ "values": {
+ "Support Group ID": "APX990000000029",
+ "Submitter": "Remedy Application Service",
+ "Submit Date": "2023-04-27T20:05:08.000+0000",
+ "Assigned To": null,
+ "Last Modified By": "Demo",
+ "Last Modified Date": "2023-04-28T01:14:00.000+0000",
+ "Status": "Enabled",
+ "Short Description": ".",
+ "Notifier Listening": "Not Listening",
+ "Assignee Groups": "1000000042;1000000043;",
+ "instanceId": "AGGADGJBG3LAPX99SZPOQRSZPOVUT0",
+ "Record ID": "AGGADGJBG3LAPX99SZPOQRSZPOVUT0",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "z1G InitComplete": null,
+ "z1G_DefaultCompany": null,
+ "z1G_UnrestrictedAccessMember": null,
+ "z1G_Global_HNS": null,
+ "Permission_Group_Enabled": "Yes",
+ "DataTags": "BWFData",
+ "z1D_schemaID": null,
+ "z1D_DefaultCompanyAction": null,
+ "z1D_DefaultCompanyInteger": null,
+ "z1D_SupportGroupRole": null,
+ "z1D_Char04": null,
+ "DisableGroupNotification": "No",
+ "GroupNotificationEmail": null,
+ "Notification_Locale": "en_US",
+ "z1D_ConfirmGroup": null,
+ "Parent Group": null,
+ "Confidential Support Group": "No",
+ "Company_SortOrder": 100,
+ "Description": null,
+ "Company": "Apex Global",
+ "Support Organization": "Facilities Support",
+ "Support Group Name": "Apex Global - Facilities",
+ "z1D Char01": null,
+ "z1D Char02": null,
+ "z1D Action": null,
+ "z1D Integer01": null,
+ "z1D Lastcount": null,
+ "z1D Role": null,
+ "Assigned Support Company": null,
+ "z1D Date01": null,
+ "Shifts Flag": "No",
+ "Business Workdays Tag": null,
+ "Business Holidays Tag": null,
+ "Support Group Role": "Line of Business",
+ "Uses SLA": "Yes",
+ "Uses OLA": "Yes",
+ "On Call Group Flag": "No",
+ "z1D Permission Group ID": null,
+ "z1D Permission Group List": null,
+ "z1D Association Action01": null,
+ "Vendor Group": "No",
+ "Navigation Menu01": "Apex Global",
+ "Navigation Menu02": null,
+ "Navigation Menu03": null,
+ "z1D_Display_Locale": null,
+ "z1D Association Action01_locale": null
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/CTM:Support%20Group/APX990000000029"
+ }
+ ]
+ }
+ }
+ ],
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/CTM:Support%20Group?q=%27Company%27%20like%20%22%25Apex%25%22"
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_tickets_work_order.json b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_tickets_work_order.json
new file mode 100644
index 000000000000..8f11d65c86bd
--- /dev/null
+++ b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_tickets_work_order.json
@@ -0,0 +1,226 @@
+{
+ "entries": [
+ {
+ "values": {
+ "Request ID": "WO0000000000003|WO0000000000003",
+ "Submitter": "Remedy Application Service",
+ "Submit Date": "2023-06-05T07:25:09.000+0000",
+ "Assigned To": null,
+ "Last Modified Date": "2023-06-05T07:25:10.000+0000",
+ "Status": "Assigned",
+ "Work Order Template Used": "New Share Folder Access",
+ "Assignee Groups": "1000000005;'Arthur Agent';",
+ "InstanceId": "IDGCWH5RDMNSBARVRRJXRVRRJXLQUZ",
+ "Record ID": "IDGCWH5RDMNSBARVRRJXRVRRJXLQUZ|IDGCWH5RDMNSBARVRRJXRVRRJXLQUZ",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "Department": "Customer Service",
+ "Site Group": "Amsterdam",
+ "Region": "Europe",
+ "Business Service": null,
+ "LookupKeyword": "MAINWORKORDER",
+ "Site": "Amsterdam Support Center",
+ "WO Type Field 1": "TestA16",
+ "WO Type Field 2": "Sharefolder",
+ "WO Type Field 3": "read",
+ "WO Type Field 4": null,
+ "WO Type Field 5": null,
+ "WO Type Field 6": null,
+ "WO Type Field 7": null,
+ "WO Type Field 8": null,
+ "WO Type Field 9": null,
+ "SRAttachment": null,
+ "Customer Middle Name": null,
+ "z1D_Command": null,
+ "z1D_WorklogDetails": null,
+ "SRInstanceID": "SRGCWH5RDMNSBARVRRJWRVRRJWLQTI",
+ "zTmpEventGUID": "CAGCWH5RDMNSBARVRRJXRVRRJXLQXE",
+ "CustomerFullName": "Arthur Agent",
+ "z1D_Activity Type*": null,
+ "z1D_Summary": null,
+ "z1D_Details": null,
+ "z1D_Secure_Log": null,
+ "z1D_View_Access": null,
+ "Attachment 1": null,
+ "WorkOrderID": "WO0000000000003",
+ "SRMSRegistryInstanceID": "SR0011439CCAD4ec8UQwCkOLAQlQAA",
+ "SRMSAOIGuid": "AGGCWH5RDMNSBARVRRJXRVRRJXLQUY",
+ "SRID": "REQ000000000007",
+ "TemplateID": "IDGCWH5RDMNSBARVRNNGRVRNNGKY0X",
+ "z1D_CommunicationSource": null,
+ "z1D_ActivityDate_tab": null,
+ "z1D_WorkInfoViewAccess": null,
+ "z1D Action WO1": null,
+ "z1D Action WO2": null,
+ "Needs Attention": null,
+ "Requestor_By_ID": "Arthur Agent",
+ "ClientLocale": null,
+ "WO Type Field 10": null,
+ "WO Type Field 11": null,
+ "WO Type Field 12": null,
+ "WO Type Field 13": null,
+ "WO Type Field 14": null,
+ "WO Type Field 15": null,
+ "WO Type Field 16": null,
+ "WO Type Field 17": null,
+ "WO Type Field 18": null,
+ "WO Type Field 19": null,
+ "WO Type Field 20": null,
+ "WO Type Field 21": null,
+ "WO Type Field 22": null,
+ "WO Type Field 23": null,
+ "WO Type Field 24": null,
+ "WO Type Field 25": null,
+ "WO Type Field 26": null,
+ "WO Type Field 27": null,
+ "WO Type Field 28": null,
+ "WO Type Field 29": null,
+ "WO Type Field 30": null,
+ "Previous_ServiceCI_ReconID": null,
+ "z1D_SR_Instanceid": null,
+ "WO Type Field 10 Label": ".",
+ "WO Type Field 11 Label": ".",
+ "WO Type Field 12 Label": ".",
+ "WO Type Field 13 Label": ".",
+ "WO Type Field 14 Label": ".",
+ "WO Type Field 15 Label": ".",
+ "WO Type Field 16 Label": ".",
+ "WO Type Field 17 Label": ".",
+ "WO Type Field 18 Label": ".",
+ "WO Type Field 19 Label": ".",
+ "WO Type Field 20 Label": ".",
+ "WO Type Field 21 Label": ".",
+ "WO Type Field 22 Label": ".",
+ "WO Type Field 23 Label": ".",
+ "WO Type Field 24 Label": ".",
+ "WO Type Field 25 Label": ".",
+ "WO Type Field 26 Label": ".",
+ "WO Type Field 27 Label": ".",
+ "WO Type Field 28 Label": ".",
+ "WO Type Field 29 Label": ".",
+ "WO Type Field 30 Label": ".",
+ "z1D_WorkInfoSubmitter": null,
+ "AttachmentSourceFormName": null,
+ "AttachmentSourceGUID": null,
+ "AttachmentSubmitter": null,
+ "SRWorkInfoType": null,
+ "z1D_ConfirmGroup": null,
+ "CreatedFromBackEndSynchWI": null,
+ "WO Type Field 48": null,
+ "WO Type Field 49": null,
+ "WO Type Field 50": null,
+ "WO Type Field 51": null,
+ "WO Type Field 48 Label": ".",
+ "WO Type Field 49 Label": ".",
+ "WO Type Field 50 Label": ".",
+ "WO Type Field 51 Label": ".",
+ "Chat Session ID": null,
+ "Broker Vendor Name": null,
+ "NeedsAttentionCCS_Setting": "false",
+ "Automation Status": "Manual",
+ "RequestCreatedFromDWP": "No",
+ "DWP_SRID": null,
+ "DWP_SRInstanceID": null,
+ "WO Type Field 01 Label": "Username",
+ "WO Type Field 02 Label": "Sharefolder Name",
+ "WO Type Field 03 Label": "Access Type",
+ "WO Type Field 04 Label": ".",
+ "WO Type Field 05 Label": ".",
+ "WO Type Field 06 Label": ".",
+ "WO Type Field 07 Label": ".",
+ "WO Type Field 08 Label": ".",
+ "WO Type Field 09 Label": ".",
+ "z1D Char09": null,
+ "CI_DatasetId": "BMC.ASSET",
+ "CI_ReconId": null,
+ "Description": "New Share Folder Access",
+ "Location Company": "Calbro Services",
+ "Organization": "Information Technology",
+ "Support Organization": "IT Support",
+ "Support Group Name": "Service Desk",
+ "Last Name": "Agent",
+ "First Name": "Arthur",
+ "Middle Initial": null,
+ "VIP": "No",
+ "Chg Location Address": "Boeing Avenue 245 \r\nSchiphol-Rijk, Amsterdam 1119 PD \r\nNetherlands",
+ "Internet E-mail": "info@bmc.com",
+ "Phone Number": "###",
+ "z1D Char01": null,
+ "Categorization Tier 1": null,
+ "Categorization Tier 2": null,
+ "Categorization Tier 3": null,
+ "z1D Char02": null,
+ "z1D Char03": null,
+ "z1D Char04": null,
+ "z1D_Action": null,
+ "z1D Integer01": null,
+ "Support Group ID": "SGP000000000011",
+ "Person ID": "PPL000000000106",
+ "Company": "Calbro Services",
+ "z1D Char05": null,
+ "z1D Char06": null,
+ "Add Request For:": "Individual",
+ "z1D Char07": null,
+ "z1D Char08": null,
+ "z1D Integer02": null,
+ "z1D Integer03": null,
+ "z1D Char10": null,
+ "Status Reason": null,
+ "Detailed Description": "New Share Folder Access ",
+ "Priority": "Low",
+ "Work Order Type": "General",
+ "Work Order ID": "WO0000000000009",
+ "Company3": "Calbro Services",
+ "Requestor ID": "Arthur Agent",
+ "Support Group Name2": null,
+ "Support Organization2": null,
+ "Actual Start Date": null,
+ "Scheduled Start Date": null,
+ "Scheduled End Date": null,
+ "Actual End Date": null,
+ "Number of Attachments": null,
+ "CAB Manager ( Change Co-ord )": "Arthur Agent",
+ "CAB Manager Login": "Arthur Agent",
+ "Support Group ID 2": "SGP000000000011",
+ "Requested By Person ID": "PPL000000000106",
+ "z1D_Worklog Type": null,
+ "Product Cat Tier 1(2)": null,
+ "Product Cat Tier 2 (2)": null,
+ "Product Cat Tier 3 (2)": null,
+ "Completed Date": null,
+ "Product Name (2)": null,
+ "Product Model/Version (2)": null,
+ "Manufacturer (2)": null,
+ "ASORG": "IT Support",
+ "ASCPY": "Calbro Services",
+ "ASGRP": "Service Desk",
+ "ASCHG": "Test Admin",
+ "ASLOGID": "testadmin",
+ "ASGRPID": "SGP000000000011",
+ "Customer Person ID": "PPL000000000106",
+ "Customer First Name": "Arthur",
+ "Customer Last Name": "Agent",
+ "Customer Company": "Calbro Services",
+ "Customer Organization": "Information Technology",
+ "Customer Department": "Customer Service",
+ "Customer Internet E-mail": "info@bmc.com",
+ "Customer Phone Number": "###"
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:WorkOrderInterface/WO0000000000003%7CWO0000000000003"
+ }
+ ]
+ }
+ }
+ ],
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:WorkOrderInterface?limit=2&q=%27Work%20Order%20ID%27%20like%20%22WO0000000000009%22"
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_work_order_template.json b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_work_order_template.json
new file mode 100644
index 000000000000..bbc42413de01
--- /dev/null
+++ b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_work_order_template.json
@@ -0,0 +1,522 @@
+{
+ "entries": [
+ {
+ "values": {
+ "Request ID": "000000000000002",
+ "Submitter": "Arthur Agent",
+ "Submit Date": "2023-06-05T05:45:00.000+0000",
+ "Assigned To": null,
+ "Last Modified By": "Arthur Agent",
+ "Last Modified Date": "2023-06-05T06:01:11.000+0000",
+ "Status": "Enabled",
+ "Template Name": "Share Folder Access",
+ "Notifier Listening": "Not Listening",
+ "Assignee Groups": "15031;1000000001;",
+ "GUID": "IDGCWH5RDMNSBARVRM5ERVRM5EKP11",
+ "Record ID": "AGGCWH5RDMNSBARVRMNARVRMNAKQ5G",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "z1G_DefaultCompany": null,
+ "z1G_Global_AST_ProductionDataSetID": null,
+ "z1G_UnrestrictedAccessMember": null,
+ "z1G_TMSDirtyStructureFlag": null,
+ "Notes": null,
+ "zTmpAuditAction": null,
+ "zTmpAuditOldStatus": null,
+ "zTmpAuditWorkspace": null,
+ "zTmpNonAuditable": null,
+ "zTmpTableLoadedMask": null,
+ "zTmpRequestID": null,
+ "Change in Sequence": null,
+ "z1D_Integer": null,
+ "z1D_ChildSequenceChange": null,
+ "z1D_SelectedTaskId": null,
+ "z1D_Task Event Code": null,
+ "z1D_TaskSubtype": null,
+ "z1D_Task instance Id": null,
+ "z1D_Task Id": null,
+ "z1D_Process Id": null,
+ "z1D_Task Event Info 1": null,
+ "z1D_Task Event Info 2": null,
+ "z1D_Task Event Info 3": null,
+ "z1D_Task Event Info 4": null,
+ "z1D_Task Event Info 5": null,
+ "z1D_Task Event Info 6": null,
+ "z1D_NextSequence": null,
+ "z1D_SelectedTaskGroupId": null,
+ "z1D_TypeSelector": null,
+ "z1D_ChildTaskTG": null,
+ "zTmpInstanceNum": null,
+ "zTmpPredecessorID": null,
+ "zTmpSuccessorID": null,
+ "zTmpFlowID": null,
+ "zTmpStart": null,
+ "zTmp_SelectOne": null,
+ "Name": "Share Folder Access",
+ "Category": null,
+ "z1D_TaskGroupType": null,
+ "z1D_Child Type": null,
+ "Product Categorization Tier 1": null,
+ "Product Categorization Tier 2": null,
+ "Product Categorization Tier 3": null,
+ "Site Group": null,
+ "Region": null,
+ "Business Service": null,
+ "Product Name": null,
+ "Site": null,
+ "z1D_Dirty Flag": null,
+ "z1D_DirtyFlagAssociations": null,
+ "Topic": null,
+ "MappedTemplateInstanceId": "IDGCWH5RDMNSBARVRM5ERVRM5EKP11",
+ "MappedTemplate_Name": "Share Folder Access",
+ "MappedTemplatedDescription": "Share Folder Access",
+ "Used_by_SRMS": null,
+ "Assignment_Accelerator_Assignee": null,
+ "Assignment_Accelerator_Manager": null,
+ "DataTags": null,
+ "z1D_StatusQuerySelection": null,
+ "z1D Current Database Status": null,
+ "z1D_TaskTableRefreshDone": null,
+ "z1D_DefaultCompanyAction": null,
+ "z1D_DefaultCompanyInteger": null,
+ "WO Type Field 10": null,
+ "WO Type Field 11": null,
+ "WO Type Field 12": null,
+ "WO Type Field 13": null,
+ "WO Type Field 14": null,
+ "WO Type Field 15": null,
+ "WO Type Field 16": null,
+ "WO Type Field 17": null,
+ "WO Type Field 18": null,
+ "WO Type Field 19": null,
+ "WO Type Field 20": null,
+ "WO Type Field 21": null,
+ "WO Type Field 22": null,
+ "WO Type Field 23": null,
+ "WO Type Field 24": 0,
+ "WO Type Field 25": null,
+ "WO Type Field 26": null,
+ "WO Type Field 27": null,
+ "WO Type Field 28": null,
+ "WO Type Field 29": null,
+ "WO Type Field 30": null,
+ "WO Type Field 10 Label": ".",
+ "WO Type Field 11 Label": ".",
+ "WO Type Field 12 Label": ".",
+ "WO Type Field 13 Label": ".",
+ "WO Type Field 14 Label": ".",
+ "WO Type Field 15 Label": ".",
+ "WO Type Field 16 Label": ".",
+ "WO Type Field 17 Label": ".",
+ "WO Type Field 18 Label": ".",
+ "WO Type Field 19 Label": ".",
+ "WO Type Field 20 Label": ".",
+ "WO Type Field 21 Label": ".",
+ "WO Type Field 22 Label": ".",
+ "WO Type Field 23 Label": ".",
+ "WO Type Field 24 Label": ".",
+ "WO Type Field 25 Label": ".",
+ "WO Type Field 26 Label": ".",
+ "WO Type Field 27 Label": ".",
+ "WO Type Field 28 Label": ".",
+ "WO Type Field 29 Label": ".",
+ "WO Type Field 30 Label": ".",
+ "z1D_ExternalQual": null,
+ "z1D_remove_colcount": 0,
+ "z1D_ImportBypass": null,
+ "z1D_Int_ColCount": null,
+ "z1D_company": "BMCOpsMonitoring",
+ "z1D_OldLocationCompanyGroupID": null,
+ "z1D_NewLocationCompanyGroupID": "1000000001",
+ "ASLOGID": null,
+ "Parent_Job_GUID": null,
+ "z1D_Company_Get": "BMCOpsMonitoring",
+ "z1G_WOTaskDisplayOptionSettings": "Yes",
+ "z1D_ConfirmGroup": null,
+ "z1D_FLagOpenWindow": null,
+ "z1D template": null,
+ "z1D ExtQualification": null,
+ "Mark Records for Delete": null,
+ "ObsoleteDataDelete": null,
+ "WO Type Field 48": null,
+ "WO Type Field 49": null,
+ "WO Type Field 50": null,
+ "WO Type Field 51": null,
+ "WO Type Field 48 Label": ".",
+ "WO Type Field 49 Label": ".",
+ "WO Type Field 50 Label": ".",
+ "WO Type Field 51 Label": ".",
+ "z1D ValidateInfo": null,
+ "z1d_Search_Template_Name": null,
+ "Automation Status": "Manual",
+ "z1D_FTS_ScanTime": -3600,
+ "zTmp_ExpCommonInstanceID": null,
+ "WO Type Field 1 Label": "Username",
+ "WO Type Field 2 Label": "Sharefolder Name",
+ "WO Type Field 3 Label": "Access Type",
+ "WO Type Field 4 Label": ".",
+ "WO Type Field 5 Label": ".",
+ "WO Type Field 6 Label": ".",
+ "WO Type Field 7 Label": ".",
+ "WO Type Field 8 Label": ".",
+ "WO Type Field 9 Label": ".",
+ "WO Type Field 1 Value": null,
+ "WO Type Field 2 Value": null,
+ "WO Type Field 3 Value": null,
+ "WO Type Field 4 Value": null,
+ "WO Type Field 5 Value": null,
+ "WO Type Field 6 Value": null,
+ "WO Type Field 7 Value": null,
+ "WO Type Field 8 Value": null,
+ "WO Type Field 9 Value": null,
+ "z1D Char09": null,
+ "z1D_Integer_2": null,
+ "Application GUID": null,
+ "Property Value": null,
+ "CI_ReconId": null,
+ "Summary": "Share Folder Access",
+ "Location Company": "BMCOpsMonitoring",
+ "Support Organization": null,
+ "Support Group Name": null,
+ "Chg Location Address": null,
+ "z1D Char01": null,
+ "Categorization Tier 1": null,
+ "Categorization Tier 2": null,
+ "Categorization Tier 3": null,
+ "z1D Char02": null,
+ "z1D Char03": null,
+ "z1D Char04": null,
+ "z1D Action": null,
+ "z1D Integer01": null,
+ "Support Group ID": null,
+ "Company": "Calbro Services",
+ "z1D Char05": null,
+ "z1D Char06": null,
+ "Add Request For:": "Organization",
+ "z1D Char07": null,
+ "z1D Char08": null,
+ "z1D Char29": null,
+ "z1D Char10": null,
+ "z1D Char11": null,
+ "z1D Char12": null,
+ "z1D Char13": null,
+ "z1D Char14": null,
+ "z1D Lastcount": null,
+ "z1D Char15": null,
+ "Work Order Type": "General",
+ "Company3": null,
+ "z1D Char16": null,
+ "CAB Manager ( Change Co-ord )": null,
+ "CAB Manager Login": null,
+ "z1D Char17": null,
+ "z1D Date03": null,
+ "z1D Date04": null,
+ "Author Group ID": null,
+ "z1D Char18": null,
+ "z1D Char19": null,
+ "z1D Char20": null,
+ "z1D Char21": null,
+ "z1D Char22": null,
+ "Product Cat Tier 1(2)": null,
+ "Product Cat Tier 2 (2)": null,
+ "Product Cat Tier 3 (2)": null,
+ "Authoring Group": null,
+ "Authoring Organization": null,
+ "Authoring Company": null,
+ "z1D Authoring Company": null,
+ "z1D Authoring Organization": null,
+ "z1D Authoring Group": null,
+ "z1D Char23": null,
+ "z1D Permission Group ID": null,
+ "z1D Permission Group List": null,
+ "z1D Char24": null,
+ "z1D Char25": null,
+ "Product Name (2)": null,
+ "Product Model/Version (2)": null,
+ "Manufacturer (2)": null,
+ "z1D IC Status": null,
+ "z1D Task Action": null,
+ "ASORG": null,
+ "ASCPY": null,
+ "ASGRP": null,
+ "ASCHG": null,
+ "ASGRPID": null,
+ "z1D Task Exist": null,
+ "z1D Task Status": null
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:Template/000000000000002"
+ }
+ ]
+ }
+ },
+ {
+ "values": {
+ "Request ID": "000000000000003",
+ "Submitter": "Arthur Agent",
+ "Submit Date": "2023-06-05T06:07:52.000+0000",
+ "Assigned To": null,
+ "Last Modified By": "Arthur Agent",
+ "Last Modified Date": "2023-06-12T12:14:30.000+0000",
+ "Status": "Enabled",
+ "Template Name": "New Share Folder Access",
+ "Notifier Listening": "Not Listening",
+ "Assignee Groups": "1000000001;",
+ "GUID": "IDGCWH5RDMNSBARVRNNGRVRNNGKY0X",
+ "Record ID": "AGGCWH5RDMNSBARVRNPERVRNPEKYLI",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "z1G_DefaultCompany": null,
+ "z1G_Global_AST_ProductionDataSetID": null,
+ "z1G_UnrestrictedAccessMember": null,
+ "z1G_TMSDirtyStructureFlag": null,
+ "Notes": "New Share Folder Access",
+ "zTmpAuditAction": null,
+ "zTmpAuditOldStatus": null,
+ "zTmpAuditWorkspace": null,
+ "zTmpNonAuditable": null,
+ "zTmpTableLoadedMask": null,
+ "zTmpRequestID": null,
+ "Change in Sequence": null,
+ "z1D_Integer": null,
+ "z1D_ChildSequenceChange": null,
+ "z1D_SelectedTaskId": null,
+ "z1D_Task Event Code": null,
+ "z1D_TaskSubtype": null,
+ "z1D_Task instance Id": null,
+ "z1D_Task Id": null,
+ "z1D_Process Id": null,
+ "z1D_Task Event Info 1": null,
+ "z1D_Task Event Info 2": null,
+ "z1D_Task Event Info 3": null,
+ "z1D_Task Event Info 4": null,
+ "z1D_Task Event Info 5": null,
+ "z1D_Task Event Info 6": null,
+ "z1D_NextSequence": null,
+ "z1D_SelectedTaskGroupId": null,
+ "z1D_TypeSelector": null,
+ "z1D_ChildTaskTG": null,
+ "zTmpInstanceNum": null,
+ "zTmpPredecessorID": null,
+ "zTmpSuccessorID": null,
+ "zTmpFlowID": null,
+ "zTmpStart": null,
+ "zTmp_SelectOne": null,
+ "Name": "New Share Folder Access",
+ "Category": null,
+ "z1D_TaskGroupType": null,
+ "z1D_Child Type": null,
+ "Product Categorization Tier 1": null,
+ "Product Categorization Tier 2": null,
+ "Product Categorization Tier 3": null,
+ "Site Group": null,
+ "Region": null,
+ "Business Service": null,
+ "Product Name": null,
+ "Site": null,
+ "z1D_Dirty Flag": null,
+ "z1D_DirtyFlagAssociations": null,
+ "Topic": null,
+ "MappedTemplateInstanceId": "IDGCWH5RDMNSBARVRNNGRVRNNGKY0X",
+ "MappedTemplate_Name": "New Share Folder Access",
+ "MappedTemplatedDescription": "New Share Folder Access",
+ "Used_by_SRMS": null,
+ "Assignment_Accelerator_Assignee": null,
+ "Assignment_Accelerator_Manager": null,
+ "DataTags": null,
+ "z1D_StatusQuerySelection": null,
+ "z1D Current Database Status": null,
+ "z1D_TaskTableRefreshDone": null,
+ "z1D_DefaultCompanyAction": null,
+ "z1D_DefaultCompanyInteger": null,
+ "WO Type Field 10": null,
+ "WO Type Field 11": null,
+ "WO Type Field 12": null,
+ "WO Type Field 13": null,
+ "WO Type Field 14": null,
+ "WO Type Field 15": null,
+ "WO Type Field 16": null,
+ "WO Type Field 17": null,
+ "WO Type Field 18": null,
+ "WO Type Field 19": null,
+ "WO Type Field 20": null,
+ "WO Type Field 21": null,
+ "WO Type Field 22": null,
+ "WO Type Field 23": null,
+ "WO Type Field 24": null,
+ "WO Type Field 25": null,
+ "WO Type Field 26": null,
+ "WO Type Field 27": null,
+ "WO Type Field 28": null,
+ "WO Type Field 29": null,
+ "WO Type Field 30": null,
+ "WO Type Field 10 Label": ".",
+ "WO Type Field 11 Label": ".",
+ "WO Type Field 12 Label": ".",
+ "WO Type Field 13 Label": ".",
+ "WO Type Field 14 Label": ".",
+ "WO Type Field 15 Label": ".",
+ "WO Type Field 16 Label": ".",
+ "WO Type Field 17 Label": ".",
+ "WO Type Field 18 Label": ".",
+ "WO Type Field 19 Label": ".",
+ "WO Type Field 20 Label": ".",
+ "WO Type Field 21 Label": ".",
+ "WO Type Field 22 Label": ".",
+ "WO Type Field 23 Label": ".",
+ "WO Type Field 24 Label": ".",
+ "WO Type Field 25 Label": ".",
+ "WO Type Field 26 Label": ".",
+ "WO Type Field 27 Label": ".",
+ "WO Type Field 28 Label": ".",
+ "WO Type Field 29 Label": ".",
+ "WO Type Field 30 Label": ".",
+ "z1D_ExternalQual": null,
+ "z1D_remove_colcount": 0,
+ "z1D_ImportBypass": null,
+ "z1D_Int_ColCount": null,
+ "z1D_company": "Calbro Services",
+ "z1D_OldLocationCompanyGroupID": null,
+ "z1D_NewLocationCompanyGroupID": null,
+ "ASLOGID": null,
+ "Parent_Job_GUID": null,
+ "z1D_Company_Get": null,
+ "z1G_WOTaskDisplayOptionSettings": "Yes",
+ "z1D_ConfirmGroup": null,
+ "z1D_FLagOpenWindow": null,
+ "z1D template": null,
+ "z1D ExtQualification": null,
+ "Mark Records for Delete": null,
+ "ObsoleteDataDelete": null,
+ "WO Type Field 48": null,
+ "WO Type Field 49": null,
+ "WO Type Field 50": null,
+ "WO Type Field 51": null,
+ "WO Type Field 48 Label": ".",
+ "WO Type Field 49 Label": ".",
+ "WO Type Field 50 Label": ".",
+ "WO Type Field 51 Label": ".",
+ "z1D ValidateInfo": null,
+ "z1d_Search_Template_Name": null,
+ "Automation Status": null,
+ "z1D_FTS_ScanTime": -3600,
+ "zTmp_ExpCommonInstanceID": null,
+ "WO Type Field 1 Label": "Username",
+ "WO Type Field 2 Label": "Sharefolder Name",
+ "WO Type Field 3 Label": "Access Type",
+ "WO Type Field 4 Label": ".",
+ "WO Type Field 5 Label": ".",
+ "WO Type Field 6 Label": ".",
+ "WO Type Field 7 Label": ".",
+ "WO Type Field 8 Label": ".",
+ "WO Type Field 9 Label": ".",
+ "WO Type Field 1 Value": null,
+ "WO Type Field 2 Value": null,
+ "WO Type Field 3 Value": null,
+ "WO Type Field 4 Value": null,
+ "WO Type Field 5 Value": null,
+ "WO Type Field 6 Value": null,
+ "WO Type Field 7 Value": null,
+ "WO Type Field 8 Value": null,
+ "WO Type Field 9 Value": null,
+ "z1D Char09": null,
+ "z1D_Integer_2": null,
+ "Application GUID": null,
+ "Property Value": null,
+ "CI_ReconId": null,
+ "Summary": "New Share Folder Access",
+ "Location Company": "Calbro Services",
+ "Support Organization": "IT Support",
+ "Support Group Name": "Service Desk",
+ "Chg Location Address": null,
+ "z1D Char01": null,
+ "Categorization Tier 1": null,
+ "Categorization Tier 2": null,
+ "Categorization Tier 3": null,
+ "z1D Char02": null,
+ "z1D Char03": null,
+ "z1D Char04": null,
+ "z1D Action": null,
+ "z1D Integer01": null,
+ "Support Group ID": "SGP000000000011",
+ "Company": "Calbro Services",
+ "z1D Char05": null,
+ "z1D Char06": null,
+ "Add Request For:": "Organization",
+ "z1D Char07": null,
+ "z1D Char08": null,
+ "z1D Char29": null,
+ "z1D Char10": null,
+ "z1D Char11": null,
+ "z1D Char12": null,
+ "z1D Char13": null,
+ "z1D Char14": null,
+ "z1D Lastcount": null,
+ "z1D Char15": null,
+ "Work Order Type": "General",
+ "Company3": "Calbro Services",
+ "z1D Char16": null,
+ "CAB Manager ( Change Co-ord )": null,
+ "CAB Manager Login": null,
+ "z1D Char17": null,
+ "z1D Date03": null,
+ "z1D Date04": null,
+ "Author Group ID": null,
+ "z1D Char18": null,
+ "z1D Char19": null,
+ "z1D Char20": null,
+ "z1D Char21": null,
+ "z1D Char22": null,
+ "Product Cat Tier 1(2)": null,
+ "Product Cat Tier 2 (2)": null,
+ "Product Cat Tier 3 (2)": null,
+ "Authoring Group": null,
+ "Authoring Organization": null,
+ "Authoring Company": null,
+ "z1D Authoring Company": null,
+ "z1D Authoring Organization": null,
+ "z1D Authoring Group": null,
+ "z1D Char23": null,
+ "z1D Permission Group ID": null,
+ "z1D Permission Group List": null,
+ "z1D Char24": null,
+ "z1D Char25": null,
+ "Product Name (2)": null,
+ "Product Model/Version (2)": null,
+ "Manufacturer (2)": null,
+ "z1D IC Status": null,
+ "z1D Task Action": null,
+ "ASORG": "IT Support",
+ "ASCPY": "Calbro Services",
+ "ASGRP": "Service Desk",
+ "ASCHG": null,
+ "ASGRPID": "SGP000000000011",
+ "z1D Task Exist": null,
+ "z1D Task Status": null
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:Template/000000000000003"
+ }
+ ]
+ }
+ }
+ ],
+ "_links": {
+ "next": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:Template?offset=2&limit=2"
+ }
+ ],
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:Template?limit=2"
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_work_order_template_filter.json b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_work_order_template_filter.json
new file mode 100644
index 000000000000..aed4cdf2f82c
--- /dev/null
+++ b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_work_order_template_filter.json
@@ -0,0 +1,264 @@
+{
+ "entries": [
+ {
+ "values": {
+ "Request ID": "000000000000009",
+ "Submitter": "Arthur Agent",
+ "Submit Date": "2023-06-12T12:29:07.000+0000",
+ "Assigned To": null,
+ "Last Modified By": "Arthur Agent",
+ "Last Modified Date": "2023-06-12T12:29:07.000+0000",
+ "Status": "Enabled",
+ "Template Name": "UNIX User Password Reset",
+ "Notifier Listening": "Not Listening",
+ "Assignee Groups": "1000000001;",
+ "GUID": "IDGCWH5RDMNSBARWFDYBRWFDYBB8NV",
+ "Record ID": "AGGCWH5RDMNSBARWFEA9RWFEA9B90F",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "z1G_DefaultCompany": null,
+ "z1G_Global_AST_ProductionDataSetID": null,
+ "z1G_UnrestrictedAccessMember": null,
+ "z1G_TMSDirtyStructureFlag": null,
+ "Notes": "UNIX User Password Reset",
+ "zTmpAuditAction": null,
+ "zTmpAuditOldStatus": null,
+ "zTmpAuditWorkspace": null,
+ "zTmpNonAuditable": null,
+ "zTmpTableLoadedMask": null,
+ "zTmpRequestID": null,
+ "Change in Sequence": null,
+ "z1D_Integer": null,
+ "z1D_ChildSequenceChange": null,
+ "z1D_SelectedTaskId": null,
+ "z1D_Task Event Code": null,
+ "z1D_TaskSubtype": null,
+ "z1D_Task instance Id": null,
+ "z1D_Task Id": null,
+ "z1D_Process Id": null,
+ "z1D_Task Event Info 1": null,
+ "z1D_Task Event Info 2": null,
+ "z1D_Task Event Info 3": null,
+ "z1D_Task Event Info 4": null,
+ "z1D_Task Event Info 5": null,
+ "z1D_Task Event Info 6": null,
+ "z1D_NextSequence": null,
+ "z1D_SelectedTaskGroupId": null,
+ "z1D_TypeSelector": null,
+ "z1D_ChildTaskTG": null,
+ "zTmpInstanceNum": null,
+ "zTmpPredecessorID": null,
+ "zTmpSuccessorID": null,
+ "zTmpFlowID": null,
+ "zTmpStart": null,
+ "zTmp_SelectOne": null,
+ "Name": "UNIX User Password Reset",
+ "Category": null,
+ "z1D_TaskGroupType": null,
+ "z1D_Child Type": null,
+ "Product Categorization Tier 1": null,
+ "Product Categorization Tier 2": null,
+ "Product Categorization Tier 3": null,
+ "Site Group": null,
+ "Region": null,
+ "Business Service": null,
+ "Product Name": null,
+ "Site": null,
+ "z1D_Dirty Flag": null,
+ "z1D_DirtyFlagAssociations": null,
+ "Topic": null,
+ "MappedTemplateInstanceId": "IDGCWH5RDMNSBARWFDYBRWFDYBB8NV",
+ "MappedTemplate_Name": "UNIX User Password Reset",
+ "MappedTemplatedDescription": "UNIX User Password Reset",
+ "Used_by_SRMS": null,
+ "Assignment_Accelerator_Assignee": null,
+ "Assignment_Accelerator_Manager": null,
+ "DataTags": null,
+ "z1D_StatusQuerySelection": null,
+ "z1D Current Database Status": null,
+ "z1D_TaskTableRefreshDone": null,
+ "z1D_DefaultCompanyAction": null,
+ "z1D_DefaultCompanyInteger": null,
+ "WO Type Field 10": null,
+ "WO Type Field 11": null,
+ "WO Type Field 12": null,
+ "WO Type Field 13": null,
+ "WO Type Field 14": null,
+ "WO Type Field 15": null,
+ "WO Type Field 16": null,
+ "WO Type Field 17": null,
+ "WO Type Field 18": null,
+ "WO Type Field 19": null,
+ "WO Type Field 20": null,
+ "WO Type Field 21": null,
+ "WO Type Field 22": null,
+ "WO Type Field 23": null,
+ "WO Type Field 24": null,
+ "WO Type Field 25": null,
+ "WO Type Field 26": null,
+ "WO Type Field 27": null,
+ "WO Type Field 28": null,
+ "WO Type Field 29": null,
+ "WO Type Field 30": null,
+ "WO Type Field 10 Label": ".",
+ "WO Type Field 11 Label": ".",
+ "WO Type Field 12 Label": ".",
+ "WO Type Field 13 Label": ".",
+ "WO Type Field 14 Label": ".",
+ "WO Type Field 15 Label": ".",
+ "WO Type Field 16 Label": ".",
+ "WO Type Field 17 Label": ".",
+ "WO Type Field 18 Label": ".",
+ "WO Type Field 19 Label": ".",
+ "WO Type Field 20 Label": ".",
+ "WO Type Field 21 Label": ".",
+ "WO Type Field 22 Label": ".",
+ "WO Type Field 23 Label": ".",
+ "WO Type Field 24 Label": ".",
+ "WO Type Field 25 Label": ".",
+ "WO Type Field 26 Label": ".",
+ "WO Type Field 27 Label": ".",
+ "WO Type Field 28 Label": ".",
+ "WO Type Field 29 Label": ".",
+ "WO Type Field 30 Label": ".",
+ "z1D_ExternalQual": null,
+ "z1D_remove_colcount": 0,
+ "z1D_ImportBypass": null,
+ "z1D_Int_ColCount": null,
+ "z1D_company": "Calbro Services",
+ "z1D_OldLocationCompanyGroupID": null,
+ "z1D_NewLocationCompanyGroupID": null,
+ "ASLOGID": "Arthur Agent",
+ "Parent_Job_GUID": null,
+ "z1D_Company_Get": null,
+ "z1G_WOTaskDisplayOptionSettings": "Yes",
+ "z1D_ConfirmGroup": null,
+ "z1D_FLagOpenWindow": null,
+ "z1D template": null,
+ "z1D ExtQualification": null,
+ "Mark Records for Delete": null,
+ "ObsoleteDataDelete": null,
+ "WO Type Field 48": null,
+ "WO Type Field 49": null,
+ "WO Type Field 50": null,
+ "WO Type Field 51": null,
+ "WO Type Field 48 Label": ".",
+ "WO Type Field 49 Label": ".",
+ "WO Type Field 50 Label": ".",
+ "WO Type Field 51 Label": ".",
+ "z1D ValidateInfo": null,
+ "z1d_Search_Template_Name": null,
+ "Automation Status": "Manual",
+ "z1D_FTS_ScanTime": -3600,
+ "zTmp_ExpCommonInstanceID": null,
+ "WO Type Field 1 Label": "Username",
+ "WO Type Field 2 Label": "Email ID",
+ "WO Type Field 3 Label": ".",
+ "WO Type Field 4 Label": ".",
+ "WO Type Field 5 Label": ".",
+ "WO Type Field 6 Label": ".",
+ "WO Type Field 7 Label": ".",
+ "WO Type Field 8 Label": ".",
+ "WO Type Field 9 Label": ".",
+ "WO Type Field 1 Value": null,
+ "WO Type Field 2 Value": null,
+ "WO Type Field 3 Value": null,
+ "WO Type Field 4 Value": null,
+ "WO Type Field 5 Value": null,
+ "WO Type Field 6 Value": null,
+ "WO Type Field 7 Value": null,
+ "WO Type Field 8 Value": null,
+ "WO Type Field 9 Value": null,
+ "z1D Char09": null,
+ "z1D_Integer_2": null,
+ "Application GUID": null,
+ "Property Value": null,
+ "CI_ReconId": null,
+ "Summary": "UNIX User Password Reset",
+ "Location Company": "Calbro Services",
+ "Support Organization": "IT Support",
+ "Support Group Name": "Service Desk",
+ "Chg Location Address": null,
+ "z1D Char01": null,
+ "Categorization Tier 1": null,
+ "Categorization Tier 2": null,
+ "Categorization Tier 3": null,
+ "z1D Char02": null,
+ "z1D Char03": null,
+ "z1D Char04": null,
+ "z1D Action": null,
+ "z1D Integer01": null,
+ "Support Group ID": "SGP000000000011",
+ "Company": "Calbro Services",
+ "z1D Char05": null,
+ "z1D Char06": null,
+ "Add Request For:": "Organization",
+ "z1D Char07": null,
+ "z1D Char08": null,
+ "z1D Char29": null,
+ "z1D Char10": null,
+ "z1D Char11": null,
+ "z1D Char12": null,
+ "z1D Char13": null,
+ "z1D Char14": null,
+ "z1D Lastcount": null,
+ "z1D Char15": null,
+ "Work Order Type": "General",
+ "Company3": "Calbro Services",
+ "z1D Char16": null,
+ "CAB Manager ( Change Co-ord )": "Arthur Agent",
+ "CAB Manager Login": "Arthur Agent",
+ "z1D Char17": null,
+ "z1D Date03": null,
+ "z1D Date04": null,
+ "Author Group ID": null,
+ "z1D Char18": null,
+ "z1D Char19": null,
+ "z1D Char20": null,
+ "z1D Char21": null,
+ "z1D Char22": null,
+ "Product Cat Tier 1(2)": null,
+ "Product Cat Tier 2 (2)": null,
+ "Product Cat Tier 3 (2)": null,
+ "Authoring Group": null,
+ "Authoring Organization": null,
+ "Authoring Company": null,
+ "z1D Authoring Company": null,
+ "z1D Authoring Organization": null,
+ "z1D Authoring Group": null,
+ "z1D Char23": null,
+ "z1D Permission Group ID": null,
+ "z1D Permission Group List": null,
+ "z1D Char24": null,
+ "z1D Char25": null,
+ "Product Name (2)": null,
+ "Product Model/Version (2)": null,
+ "Manufacturer (2)": null,
+ "z1D IC Status": null,
+ "z1D Task Action": null,
+ "ASORG": "IT Support",
+ "ASCPY": "Calbro Services",
+ "ASGRP": "Service Desk",
+ "ASCHG": "Arthur Agent",
+ "ASGRPID": "SGP000000000011",
+ "z1D Task Exist": null,
+ "z1D Task Status": null
+ },
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:Template/000000000000009"
+ }
+ ]
+ }
+ }
+ ],
+ "_links": {
+ "self": [
+ {
+ "href": "https://isvpartners-dev-restapi.onbmc.com/api/arsys/v1/entry/WOI:Template?limit=2&q=%27Template%20Name%27%20like%20%22%25UNIX%25%22"
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/Packs/BmcITSM/README.md b/Packs/BmcITSM/README.md
index 0a23d47f4dcd..48597c9155f6 100644
--- a/Packs/BmcITSM/README.md
+++ b/Packs/BmcITSM/README.md
@@ -6,4 +6,4 @@ Cortex XSOAR interfaces with BMC Helix ITSM to help streamline security-related
- Fetches BMC Helix ITSM tickets.
- Mirrors incoming BMC Helix ITSM tickets in Cortex XSOAR.
- Mirrors Cortex XSOAR tickets in BMC Helix ITSM.
- - Supports service request, incident, change request, task, problem investigation and known error ticket types.
+ - Supports service request, incident, change request, task, problem investigation, known error and work order ticket types.
diff --git a/Packs/BmcITSM/ReleaseNotes/1_0_21.md b/Packs/BmcITSM/ReleaseNotes/1_0_21.md
new file mode 100644
index 000000000000..cdbe3660a348
--- /dev/null
+++ b/Packs/BmcITSM/ReleaseNotes/1_0_21.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### BMC Helix ITSM
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
diff --git a/Packs/BmcITSM/ReleaseNotes/1_0_22.md b/Packs/BmcITSM/ReleaseNotes/1_0_22.md
new file mode 100644
index 000000000000..284ba37ae21b
--- /dev/null
+++ b/Packs/BmcITSM/ReleaseNotes/1_0_22.md
@@ -0,0 +1,29 @@
+
+#### Incident Fields
+
+- **BmcITSM Display ID**
+- **BmcITSM Request ID**
+- **BmcITSM Status Reason**
+- **BmcITSM Submitter**
+- **BmcITSM VIP Flag**
+
+
+#### Incident Types
+
+- New: **BMC Work Order**
+
+
+#### Integrations
+
+##### BMC Helix ITSM
+
+- Added the following commands:
+ - ***bmc-itsm-support-group-list***
+ - ***bmc-itsm-work-order-template-list***
+ - ***bmc-itsm-work-order-create***
+ - ***bmc-itsm-work-order-update***
+- Added support for work order in the following commands:
+ - ***bmc-itsm-ticket-list***
+ - ***bmc-itsm-ticket-delete***
+ - ***bmc-itsm-task-create***
+
diff --git a/Packs/BmcITSM/pack_metadata.json b/Packs/BmcITSM/pack_metadata.json
index 8d096e7c5795..75cb746144cf 100644
--- a/Packs/BmcITSM/pack_metadata.json
+++ b/Packs/BmcITSM/pack_metadata.json
@@ -1,8 +1,8 @@
{
"name": "BMC Helix ITSM",
- "description": "BMC Helix ITSM allows customers to manage service request, incident, change request, task, problem investigation and known error tickets.",
+ "description": "BMC Helix ITSM allows customers to manage service request, incident, change request, task, problem investigation, known error and work order tickets.",
"support": "xsoar",
- "currentVersion": "1.0.20",
+ "currentVersion": "1.0.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.yml b/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.yml
index f56068fecc51..486bf1c44f18 100644
--- a/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.yml
+++ b/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.yml
@@ -57,7 +57,7 @@ script:
defaultValue: 3 days
description: Get events.
name: box-get-events
- dockerimage: demisto/auth-utils:1.0.0.87472
+ dockerimage: demisto/auth-utils:1.0.0.88531
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/Box/Integrations/BoxV2/BoxV2.yml b/Packs/Box/Integrations/BoxV2/BoxV2.yml
index f8c7236d3f28..63568855f6eb 100644
--- a/Packs/Box/Integrations/BoxV2/BoxV2.yml
+++ b/Packs/Box/Integrations/BoxV2/BoxV2.yml
@@ -2496,7 +2496,7 @@ script:
- contextPath: Box.Folder.item_status
description: The status of the parent of the item.
type: String
- dockerimage: demisto/auth-utils:1.0.0.87472
+ dockerimage: demisto/auth-utils:1.0.0.88531
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/Box/ReleaseNotes/3_1_43.md b/Packs/Box/ReleaseNotes/3_1_43.md
new file mode 100644
index 000000000000..08bc52942a03
--- /dev/null
+++ b/Packs/Box/ReleaseNotes/3_1_43.md
@@ -0,0 +1,5 @@
+#### Integrations
+##### Box Event Collector
+- Updated the Docker image to: *demisto/auth-utils:1.0.0.88531*.
+##### Box v2
+- Updated the Docker image to: *demisto/auth-utils:1.0.0.88531*.
diff --git a/Packs/Box/pack_metadata.json b/Packs/Box/pack_metadata.json
index eaae58d22a2a..09f60d373052 100644
--- a/Packs/Box/pack_metadata.json
+++ b/Packs/Box/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Box",
"description": "Manage Box users",
"support": "xsoar",
- "currentVersion": "3.1.42",
+ "currentVersion": "3.1.43",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/BruteForce/Playbooks/Brute_Force_Investigation_-_Generic_6_5.yml b/Packs/BruteForce/Playbooks/Brute_Force_Investigation_-_Generic_6_5.yml
index 505d98e17372..e289ad69027b 100644
--- a/Packs/BruteForce/Playbooks/Brute_Force_Investigation_-_Generic_6_5.yml
+++ b/Packs/BruteForce/Playbooks/Brute_Force_Investigation_-_Generic_6_5.yml
@@ -2174,9 +2174,28 @@ inputs:
description: 'The threshold for the severity value from which an automatic remediation takes place. Specify the severity number (default is Critical): 0 - Unknown, 0.5 - Informational. 1 - Low, 2 - Medium, 3 - High, 4 - Critical'
playbookInputQuery:
- key: internal_range
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: ''172.16.0.0/12,10.0.0.0/8,192.168.0.0/16'' (without quotes). If a list is not provided, will use default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: ''172.16.0.0/12,10.0.0.0/8,192.168.0.0/16'' (without quotes).'
playbookInputQuery:
- key: critical_users
value: {}
diff --git a/Packs/BruteForce/Playbooks/Brute_Force_Investigation_-_Generic_6_5_README.md b/Packs/BruteForce/Playbooks/Brute_Force_Investigation_-_Generic_6_5_README.md
index b2d784500546..6a58d56f5242 100644
--- a/Packs/BruteForce/Playbooks/Brute_Force_Investigation_-_Generic_6_5_README.md
+++ b/Packs/BruteForce/Playbooks/Brute_Force_Investigation_-_Generic_6_5_README.md
@@ -20,11 +20,11 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Block Indicators - Generic v3
-* Account Enrichment - Generic v2.1
-* Calculate Severity - Critical Assets v2
* IP Enrichment - Generic v2
* Isolate Endpoint - Generic V2
+* Account Enrichment - Generic v2.1
+* Calculate Severity - Critical Assets v2
+* Block Indicators - Generic v3
### Integrations
@@ -36,12 +36,12 @@ This playbook does not use any integrations.
### Commands
+* closeInvestigation
* ad-disable-account
* send-mail
* setIncident
-* closeInvestigation
-* ad-enable-account
* ad-expire-password
+* ad-enable-account
## Playbook Inputs
@@ -54,7 +54,7 @@ This playbook does not use any integrations.
| traps_endpoint_id | Traps endpoint ID, used for endpoint isolation. | incident.agentid | Optional |
| logins_count_threshold | The threshold for number of logins, from which the investigation and remediation will start automatically without waiting for the user"s reply. Default is 10. | 10 | Optional |
| severity_threshold | The threshold for the severity value from which an automatic remediation takes place. Specify the severity number \(default is Critical\): 0 - Unknown, 0.5 - Informational. 1 - Low, 2 - Medium, 3 - High, 4 - Critical | 4 | Optional |
-| internal_range | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: '172.16.0.0/12,10.0.0.0/8,192.168.0.0/16' \(without quotes\). If a list is not provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | | Optional |
+| internal_range | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: '172.16.0.0/12,10.0.0.0/8,192.168.0.0/16' \(without quotes\). | lists.PrivateIPs | Optional |
| critical_users | Critical users, separated by comma. | | Optional |
| critical_endpoints | Critical endpoints, separated by comma. | | Optional |
| critical_groups | Critical groups, separated by comma. | | Optional |
@@ -79,4 +79,4 @@ There are no outputs for this playbook.
---
-![Brute Force Investigation - Generic](../doc_files/Brute_Force_Investigation_-_Generic_6_5.png)
+![Brute Force Investigation - Generic](../doc_files/Brute_Force_Investigation_-_Generic.png)
diff --git a/Packs/BruteForce/ReleaseNotes/1_2_6.md b/Packs/BruteForce/ReleaseNotes/1_2_6.md
new file mode 100644
index 000000000000..e333eb32aaab
--- /dev/null
+++ b/Packs/BruteForce/ReleaseNotes/1_2_6.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Brute Force Investigation - Generic
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
diff --git a/Packs/BruteForce/pack_metadata.json b/Packs/BruteForce/pack_metadata.json
index cc711d9407ba..047c6a82a486 100644
--- a/Packs/BruteForce/pack_metadata.json
+++ b/Packs/BruteForce/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Brute Force",
"description": "This Content Pack helps you automate the repetitive tasks associated with Brute Force incidents. Custom incident views and layouts aid investigation.",
"support": "xsoar",
- "currentVersion": "1.2.5",
+ "currentVersion": "1.2.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/C2sec/Integrations/C2sec/C2sec.yml b/Packs/C2sec/Integrations/C2sec/C2sec.yml
index 3f71242db8b9..fa6c5eee2f75 100644
--- a/Packs/C2sec/Integrations/C2sec/C2sec.yml
+++ b/Packs/C2sec/Integrations/C2sec/C2sec.yml
@@ -199,6 +199,6 @@ script:
type: string
description: Query Data for specific component for companies in the portfolio
runonce: false
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.13.86272
tests:
- No tests (auto formatted)
diff --git a/Packs/C2sec/ReleaseNotes/1_0_11.md b/Packs/C2sec/ReleaseNotes/1_0_11.md
new file mode 100644
index 000000000000..95d42ee2d45f
--- /dev/null
+++ b/Packs/C2sec/ReleaseNotes/1_0_11.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### C2sec irisk
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/C2sec/pack_metadata.json b/Packs/C2sec/pack_metadata.json
index d760b1599f8a..4696f06328fa 100644
--- a/Packs/C2sec/pack_metadata.json
+++ b/Packs/C2sec/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "C2sec irisk",
"description": "Understand Your Cyber Exposure as Easy as a Google Search",
"support": "xsoar",
- "currentVersion": "1.0.10",
+ "currentVersion": "1.0.11",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CIRCL/Integrations/CIRCL/CIRCL.yml b/Packs/CIRCL/Integrations/CIRCL/CIRCL.yml
index 649fdbd4b2eb..ebd81ed8fcbe 100644
--- a/Packs/CIRCL/Integrations/CIRCL/CIRCL.yml
+++ b/Packs/CIRCL/Integrations/CIRCL/CIRCL.yml
@@ -116,7 +116,7 @@ script:
script: '-'
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.13.86272
tests:
- CirclIntegrationTest
fromversion: 5.0.0
diff --git a/Packs/CIRCL/ReleaseNotes/1_0_22.md b/Packs/CIRCL/ReleaseNotes/1_0_22.md
new file mode 100644
index 000000000000..932797c6547d
--- /dev/null
+++ b/Packs/CIRCL/ReleaseNotes/1_0_22.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CIRCL
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/CIRCL/pack_metadata.json b/Packs/CIRCL/pack_metadata.json
index 412ffa58cf16..836c143c69f5 100644
--- a/Packs/CIRCL/pack_metadata.json
+++ b/Packs/CIRCL/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CIRCL",
"description": "The Computer Incident Response Center Luxembourg (CIRCL) is a government-driven initiative designed to provide a systematic response facility to computer security threats and incidents.\nThis pack includes:\n# CIRCL Passive DNS which is a database storing historical DNS records from various resources.\n# CIRCL Passive SSL is a database storing historical X.509 certificates seen per IP address. The Passive SSL historical data is indexed per IP address.\n# CIRCL CVE Search, interface to search publicly known information from security vulnerabilities in software and hardware along with their corresponding exposures.",
"support": "xsoar",
- "currentVersion": "1.0.21",
+ "currentVersion": "1.0.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CTF02/Playbooks/playbook-CTF_2_-_Classify_an_incident_RDP_Brute_force.yml b/Packs/CTF02/Playbooks/playbook-CTF_2_-_Classify_an_incident_RDP_Brute_force.yml
index 28f194352202..e32e512bb624 100644
--- a/Packs/CTF02/Playbooks/playbook-CTF_2_-_Classify_an_incident_RDP_Brute_force.yml
+++ b/Packs/CTF02/Playbooks/playbook-CTF_2_-_Classify_an_incident_RDP_Brute_force.yml
@@ -503,7 +503,7 @@ tasks:
label: ""
labelarg:
simple: |-
- Based on the Campaign's Report Description, what was the year when this campaign was first seen?
+ Based on the Campaign's Report Description, what was the year when a banking trojan malware associated with this campaign was first seen?
To answer - search for the report indicator type associated with that campaign
required: false
gridcolumns: []
diff --git a/Packs/CTF02/README.md b/Packs/CTF02/README.md
index b8047d48d20a..0c3ca99cb9bb 100644
--- a/Packs/CTF02/README.md
+++ b/Packs/CTF02/README.md
@@ -7,6 +7,7 @@ This pack was prepared with small challenges which enables you to get familiar w
To play this game, follow the instructions located within the "Prepare your CTF" playbook, located in the `Capture The Flag - 01` pack.
For more information, visit the following references:
+
- [Prepare your instance for Capture The Flag](https://xsoar.pan.dev/docs/reference/packs/capture-the-flag-preparation)
- [Introducing New XSOAR Capture the Flags!](https://www.paloaltonetworks.com/blog/security-operations/introducing-new-xsoar-capture-the-flags/)
diff --git a/Packs/CTF02/ReleaseNotes/1_0_3.md b/Packs/CTF02/ReleaseNotes/1_0_3.md
new file mode 100644
index 000000000000..bef9c6d7d6f8
--- /dev/null
+++ b/Packs/CTF02/ReleaseNotes/1_0_3.md
@@ -0,0 +1,13 @@
+
+#### Playbooks
+
+##### CTF 2 - Classify an incident - RDP Brute force
+
+Updated the question's description about the 'report' indicator type.
+
+#### Scripts
+
+##### CTF_2_BF
+
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
+
diff --git a/Packs/CTF02/Scripts/CTF2BF/CTF2BF.py b/Packs/CTF02/Scripts/CTF2BF/CTF2BF.py
index 5b0c29fea700..a0bc103f7462 100644
--- a/Packs/CTF02/Scripts/CTF2BF/CTF2BF.py
+++ b/Packs/CTF02/Scripts/CTF2BF/CTF2BF.py
@@ -99,14 +99,14 @@ def error_msg():
def main():
try:
args = demisto.args()
- # __Error handeling when there is an empty secret or question id__
+ # __Error handling when there is an empty secret or question id__
question_id = args.get("question_ID")
secret = args.get("secret", "").lower()
if not secret or not question_id:
raise DemistoException('Please specify Secret and Question ID to proceed with the challenge')
- # __Validate Quesion number 03__
+ # __Validate Question number 03__
match question_id:
case "03":
diff --git a/Packs/CTF02/Scripts/CTF2BF/CTF2BF.yml b/Packs/CTF02/Scripts/CTF2BF/CTF2BF.yml
index 5fa560dc3fc0..e08c96132c21 100644
--- a/Packs/CTF02/Scripts/CTF2BF/CTF2BF.yml
+++ b/Packs/CTF02/Scripts/CTF2BF/CTF2BF.yml
@@ -26,7 +26,7 @@ args:
scripttarget: 0
subtype: python3
runonce: false
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.13.87159
runas: DBotWeakRole
engineinfo: {}
fromversion: 8.2.0
diff --git a/Packs/CTF02/pack_metadata.json b/Packs/CTF02/pack_metadata.json
index 77f2ae22777d..8c1e616f4ddb 100644
--- a/Packs/CTF02/pack_metadata.json
+++ b/Packs/CTF02/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Capture The Flag - 02",
"description": "XSOAR's Capture the flag (CTF)",
"support": "xsoar",
- "currentVersion": "1.0.2",
+ "currentVersion": "1.0.3",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -15,7 +15,8 @@
"useCases": [],
"keywords": [],
"marketplaces": [
- "xsoar"
+ "xsoar",
+ "xsoar_saas"
],
"dependencies": {
"ctf01": {
diff --git a/Packs/CTIX/Integrations/CTIX/CTIX.py b/Packs/CTIX/Integrations/CTIX/CTIX.py
index 91e75a775bd6..8e5c4bdcc4ad 100644
--- a/Packs/CTIX/Integrations/CTIX/CTIX.py
+++ b/Packs/CTIX/Integrations/CTIX/CTIX.py
@@ -13,7 +13,7 @@
import requests
import urllib.parse
import urllib3
-from typing import Any, Dict
+from typing import Any
# Disable insecure warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@@ -233,7 +233,7 @@ def test_module(client: Client):
demisto.results('ok')
-def ip_details_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def ip_details_command(client: Client, args: dict[str, Any]) -> List[CommandResults]:
"""
ip command: Returns IP details for a list of IPs
"""
@@ -304,7 +304,7 @@ def ip_details_command(client: Client, args: Dict[str, Any]) -> List[CommandResu
return ip_data_list
-def domain_details_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def domain_details_command(client: Client, args: dict[str, Any]) -> List[CommandResults]:
"""
domain command: Returns domain details for a list of domains
"""
@@ -374,7 +374,7 @@ def domain_details_command(client: Client, args: Dict[str, Any]) -> List[Command
return domain_data_list
-def url_details_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def url_details_command(client: Client, args: dict[str, Any]) -> List[CommandResults]:
"""
url command: Returns URL details for a list of URL
"""
@@ -443,7 +443,7 @@ def url_details_command(client: Client, args: Dict[str, Any]) -> List[CommandRes
return url_data_list
-def file_details_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def file_details_command(client: Client, args: dict[str, Any]) -> List[CommandResults]:
"""
file command: Returns FILE details for a list of FILE
"""
@@ -489,7 +489,7 @@ def file_details_command(client: Client, args: Dict[str, Any]) -> List[CommandRe
elif hash_type == "sha256":
file_standard_context.sha256 = file_key
elif hash_type == "sha512":
- file_standard_context.sha512 == file_key
+ file_standard_context.sha512 = file_key
file_data_list.append(CommandResults(
readable_output=tableToMarkdown('File Data', file_data, removeNull=True),
@@ -517,7 +517,7 @@ def file_details_command(client: Client, args: Dict[str, Any]) -> List[CommandRe
elif hash_type == "sha256":
file_standard_context.sha256 = file_key
elif hash_type == "sha512":
- file_standard_context.sha512 == file_key
+ file_standard_context.sha512 = file_key
file_data_list.append(CommandResults(
readable_output=f'No matches found for FILE {file_key}',
@@ -529,7 +529,7 @@ def file_details_command(client: Client, args: Dict[str, Any]) -> List[CommandRe
return file_data_list
-def create_intel_command(client: Client, args: Dict[str, Any]) -> Dict:
+def create_intel_command(client: Client, args: dict[str, Any]) -> dict:
"""
create_intel command: Creates Intel in CTIX
"""
diff --git a/Packs/CTIX/Integrations/CTIX/CTIX.yml b/Packs/CTIX/Integrations/CTIX/CTIX.yml
index 46d076e565d7..6261a9017a23 100644
--- a/Packs/CTIX/Integrations/CTIX/CTIX.yml
+++ b/Packs/CTIX/Integrations/CTIX/CTIX.yml
@@ -784,7 +784,7 @@ script:
- contextPath: CTIX.Intel.status
description: Status code returned from the api.
type: String
- dockerimage: demisto/python3:3.10.13.84405
+ dockerimage: demisto/python3:3.10.13.87159
subtype: python3
tests:
- No tests
diff --git a/Packs/CTIX/Integrations/CTIX/CTIX_test.py b/Packs/CTIX/Integrations/CTIX/CTIX_test.py
index 7ed91ccc1e48..dfbde8a5f807 100644
--- a/Packs/CTIX/Integrations/CTIX/CTIX_test.py
+++ b/Packs/CTIX/Integrations/CTIX/CTIX_test.py
@@ -1,4 +1,3 @@
-import io
import json
'''CONSTANTS'''
@@ -9,7 +8,7 @@
def util_load_json(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
@@ -276,6 +275,6 @@ def test_create_intel(requests_mock):
}
response = create_intel_command(client, post_data)
- assert 'data', 'status' in response['CTIX']['Intel']['response']
+ assert all(k in response['CTIX']['Intel']['response'] for k in ('data', 'status'))
assert 'status' in response['CTIX']['Intel']
assert response['CTIX']['Intel']['response']['status'] == 201
diff --git a/Packs/CTIX/Integrations/CTIXv3/CTIXv3.py b/Packs/CTIX/Integrations/CTIXv3/CTIXv3.py
index dcb171eadb07..baaf517056d7 100644
--- a/Packs/CTIX/Integrations/CTIXv3/CTIXv3.py
+++ b/Packs/CTIX/Integrations/CTIXv3/CTIXv3.py
@@ -4,7 +4,8 @@
from http import HTTPStatus
import urllib3
import requests
-from typing import Any, Callable, Dict, cast
+from typing import Any, cast
+from collections.abc import Callable
import urllib.parse
import time
import json
@@ -140,12 +141,14 @@ def add_common_params(self, params: dict):
params["Signature"] = self.signature(expires)
return params
- def get_http_request(self, full_url: str, payload: dict = None, **kwargs):
+ def get_http_request(self, full_url: str, payload: dict = None,
+ fallback_full_url: str = None, **kwargs):
"""
GET HTTP Request
:param str full_url: URL to be called
:param dict payload: Request body, defaults to None
+ :param str fallback_full_url: URL to be called if the first 404s, defaults to None
:raises DemistoException: If Any error is found will be raised on XSOAR
:return dict: Response object
"""
@@ -168,16 +171,22 @@ def get_http_request(self, full_url: str, payload: dict = None, **kwargs):
return response
except requests.exceptions.HTTPError:
if status_code == HTTPStatus.NOT_FOUND:
- return_error("Your CTIX version does not support this command.")
+ if fallback_full_url:
+ # try again with the fallback url
+ return self.get_http_request(fallback_full_url, payload, **kwargs)
+ else:
+ return_error("Your CTIX version does not support this command.")
else:
return_error(f"Error: status-> {status_code!r}; Reason-> {resp.reason!r}]")
- def post_http_request(self, full_url: str, payload: dict, params: dict):
+ def post_http_request(self, full_url: str, payload: dict, params: dict,
+ fallback_full_url: str = None):
"""
POST HTTP Request
:param str full_url: URL to be called
:param dict payload: Request body, defaults to None
+ :param str fallback_full_url: URL to be called if the first 404s, defaults to None
:raises DemistoException: If Any error is found will be raised on XSOAR
:return dict: Response object
"""
@@ -199,7 +208,11 @@ def post_http_request(self, full_url: str, payload: dict, params: dict):
return response
except requests.exceptions.HTTPError:
if status_code == HTTPStatus.NOT_FOUND:
- return_error("Your CTIX version does not support this command.")
+ if fallback_full_url:
+ # try again with the fallback url
+ return self.post_http_request(fallback_full_url, payload, params)
+ else:
+ return_error("Your CTIX version does not support this command.")
else:
return_error(f"Error: status-> {status_code!r}; Reason-> {resp.reason!r}]")
@@ -321,10 +334,13 @@ def delete_tag(self, tag_id: str):
)
def whitelist_iocs(self, ioc_type, values, reason):
- url_suffix = "conversion/whitelist/"
+ url_suffix = "conversion/allowed_indicators/" # for CTIX >= 3.6
+ fallback_url_suffix = "conversion/whitelist/" # for CTIX < 3.6
client_url = self.base_url + url_suffix
+ fallback_client_url = self.base_url + fallback_url_suffix
payload = {"type": ioc_type, "values": values, "reason": reason}
- return self.post_http_request(client_url, payload, {})
+ return self.post_http_request(client_url, payload, {},
+ fallback_full_url=fallback_client_url)
def get_whitelist_iocs(self, page: int, page_size: int, q: str):
"""Paginated list of tags from ctix platform using page_number and page_size
@@ -337,12 +353,15 @@ def get_whitelist_iocs(self, page: int, page_size: int, q: str):
:type q: str
:param q: search query string for the list api
"""
- url_suffix = "conversion/whitelist/"
+ url_suffix = "conversion/allowed_indicators/" # for CTIX >= 3.6
+ fallback_url_suffix = "conversion/whitelist/" # for CTIX < 3.6
client_url = self.base_url + url_suffix
+ fallback_client_url = self.base_url + fallback_url_suffix
params = {"page": page, "page_size": page_size}
if q:
params["q"] = q # type: ignore
- return self.get_http_request(client_url, {}, **params)
+ return self.get_http_request(client_url, {}, fallback_full_url=fallback_client_url,
+ **params)
def remove_whitelisted_ioc(self, whitelist_id: str):
"""Removes whitelisted ioc with given `whitelist_id`
@@ -381,7 +400,7 @@ def get_saved_searches(self, page: int, page_size: int):
url_suffix = "ingestion/saved-searches/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
- return self.get_http_request(client_url, {}, **params)
+ return self.get_http_request(client_url, {}, None, **params)
def get_server_collections(self, page: int, page_size: int):
"""
@@ -394,9 +413,9 @@ def get_server_collections(self, page: int, page_size: int):
url_suffix = "publishing/collection/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
- return self.get_http_request(client_url, {}, **params)
+ return self.get_http_request(client_url, {}, None, **params)
- def get_actions(self, page: int, page_size: int, params: Dict[str, Any]):
+ def get_actions(self, page: int, page_size: int, params: dict[str, Any]):
"""
Get Actions
@@ -682,7 +701,7 @@ def get_vulnerability_product_details(self, obj_id: str, page: int, page_size: i
url_suffix = f"ingestion/threat-data/vulnerability/{obj_id}/product-details/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
- return self.get_http_request(client_url, {}, **params)
+ return self.get_http_request(client_url, {}, None, **params)
def get_vulnerability_cvss_score(self, obj_id: str, page: int, page_size: int):
"""
@@ -696,7 +715,7 @@ def get_vulnerability_cvss_score(self, obj_id: str, page: int, page_size: int):
url_suffix = f"ingestion/threat-data/vulnerability/{obj_id}/cvss-score/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
- return self.get_http_request(client_url, {}, **params)
+ return self.get_http_request(client_url, {}, None, **params)
def get_vulnerability_source_description(self, obj_id: str, source_id: str, page: int, page_size: int):
"""
@@ -711,7 +730,7 @@ def get_vulnerability_source_description(self, obj_id: str, source_id: str, page
url_suffix = f"ingestion/threat-data/vulnerability/{obj_id}/source-description/"
client_url = self.base_url + url_suffix
params = {"source_id": source_id, "page": page, "page_size": page_size}
- return self.get_http_request(client_url, {}, **params)
+ return self.get_http_request(client_url, {}, None, **params)
""" HELPER FUNCTIONS """
@@ -805,7 +824,7 @@ def iter_dbot_score(
elif hash_type == "sha-256":
file_standard_context.sha256 = file_key
elif hash_type == "sha-512":
- file_standard_context.sha512 == file_key
+ file_standard_context.sha512 = file_key
final_data.append(
CommandResults(
@@ -925,7 +944,7 @@ def test_module(client: Client):
demisto.results("ok")
-def create_tag_command(client: Client, args: Dict[str, str]) -> CommandResults:
+def create_tag_command(client: Client, args: dict[str, str]) -> CommandResults:
"""
create_tag command: Creates a new tag in the CTIX platform
"""
@@ -950,7 +969,7 @@ def create_tag_command(client: Client, args: Dict[str, str]) -> CommandResults:
return results
-def get_tags_command(client: Client, args=Dict[str, Any]) -> List[CommandResults]:
+def get_tags_command(client: Client, args=dict[str, Any]) -> List[CommandResults]:
"""
get_tags commands: Returns paginated list of tags
"""
@@ -1005,7 +1024,7 @@ def delete_tag_command(client: Client, args: dict) -> CommandResults:
return results
-def whitelist_iocs_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def whitelist_iocs_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Whitelist IOCs command
@@ -1037,7 +1056,7 @@ def whitelist_iocs_command(client: Client, args: Dict[str, Any]) -> CommandResul
def get_whitelist_iocs_command(
- client: Client, args=Dict[str, Any]
+ client: Client, args=dict[str, Any]
) -> List[CommandResults]:
"""
get_tags commands: Returns paginated list of tags
@@ -1069,7 +1088,7 @@ def get_whitelist_iocs_command(
def remove_whitelisted_ioc_command(
- client: Client, args=Dict[str, Any]
+ client: Client, args=dict[str, Any]
) -> CommandResults:
"""
remove_whitelist_ioc: Deletes a whitelisted ioc with given id
@@ -1092,7 +1111,7 @@ def remove_whitelisted_ioc_command(
def get_threat_data_command(
- client: Client, args=Dict[str, Any]
+ client: Client, args=dict[str, Any]
) -> List[CommandResults]:
"""
get_threat_data: List thread data and allow query
@@ -1104,7 +1123,7 @@ def get_threat_data_command(
query = args.get("query", "type=indicator")
response = client.get_threat_data(page, page_size, query)
threat_data_list = response.get("data", {}).get("results", [])
- results = [data for data in threat_data_list]
+ results = list(threat_data_list)
results = no_result_found(results)
reliability = args.get("reliability")
@@ -1123,7 +1142,7 @@ def get_threat_data_command(
return result
-def get_saved_searches_command(client: Client, args=Dict[str, Any]) -> CommandResults:
+def get_saved_searches_command(client: Client, args=dict[str, Any]) -> CommandResults:
"""
get_saved_searches: List saved search data
"""
@@ -1133,7 +1152,7 @@ def get_saved_searches_command(client: Client, args=Dict[str, Any]) -> CommandRe
page_size = check_for_empty_variable(page_size, 10)
response = client.get_saved_searches(page, page_size)
data_list = response.get("data", {}).get("results", [])
- results = [data for data in data_list]
+ results = list(data_list)
results = no_result_found(results)
if isinstance(results, CommandResults):
return results
@@ -1149,7 +1168,7 @@ def get_saved_searches_command(client: Client, args=Dict[str, Any]) -> CommandRe
def get_server_collections_command(
- client: Client, args=Dict[str, Any]
+ client: Client, args=dict[str, Any]
) -> CommandResults:
"""
get_server_collections: List server collections
@@ -1160,7 +1179,7 @@ def get_server_collections_command(
page_size = check_for_empty_variable(page_size, 10)
response = client.get_server_collections(page, page_size)
data_list = response.get("data", {}).get("results", [])
- results = [data for data in data_list]
+ results = list(data_list)
results = no_result_found(results)
if isinstance(results, CommandResults):
return results
@@ -1177,7 +1196,7 @@ def get_server_collections_command(
return result
-def get_actions_command(client: Client, args=Dict[str, Any]) -> CommandResults:
+def get_actions_command(client: Client, args=dict[str, Any]) -> CommandResults:
"""
get_actions: List Actions
"""
@@ -1194,7 +1213,7 @@ def get_actions_command(client: Client, args=Dict[str, Any]) -> CommandResults:
params["object_type"] = object_type
response = client.get_actions(page, page_size, params)
data_list = response.get("data", {}).get("results", [])
- results = [data for data in data_list]
+ results = list(data_list)
results = no_result_found(results)
if isinstance(results, CommandResults):
return results
@@ -1210,7 +1229,7 @@ def get_actions_command(client: Client, args=Dict[str, Any]) -> CommandResults:
def add_indicator_as_false_positive_command(
- client: Client, args: Dict[str, str]
+ client: Client, args: dict[str, str]
) -> CommandResults:
"""
Add Indicator as False Positive Command
@@ -1240,7 +1259,7 @@ def add_indicator_as_false_positive_command(
return results
-def add_ioc_manual_review_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def add_ioc_manual_review_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Add IOC for Manual Review Command
@@ -1353,7 +1372,7 @@ def add_analyst_score_command(client: Client, args: dict) -> CommandResults:
return results
-def saved_result_set_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def saved_result_set_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Get Saved Result Set data Command
@@ -1388,7 +1407,7 @@ def saved_result_set_command(client: Client, args: Dict[str, Any]) -> CommandRes
def tag_indicator_updation_command(
- client: Client, args: Dict[str, Any], operation: str
+ client: Client, args: dict[str, Any], operation: str
) -> CommandResults:
"""
Tag Indicator Updation Command
@@ -1424,7 +1443,7 @@ def tag_indicator_updation_command(
return results
-def search_for_tag_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def search_for_tag_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Search for Tag Command
@@ -1453,7 +1472,7 @@ def search_for_tag_command(client: Client, args: Dict[str, Any]) -> CommandResul
return results
-def get_indicator_details_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def get_indicator_details_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Get Indicator Details Command
@@ -1484,7 +1503,7 @@ def get_indicator_details_command(client: Client, args: Dict[str, Any]) -> Comma
return results
-def get_indicator_tags_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def get_indicator_tags_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Get Indicator Tags Command
@@ -1516,7 +1535,7 @@ def get_indicator_tags_command(client: Client, args: Dict[str, Any]) -> CommandR
return results
-def get_indicator_relations_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def get_indicator_relations_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Get Indicator Relations Command
@@ -1547,7 +1566,7 @@ def get_indicator_relations_command(client: Client, args: Dict[str, Any]) -> Com
return results
-def get_indicator_observations_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def get_indicator_observations_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Get Indicator Observations Command
@@ -1584,7 +1603,7 @@ def get_indicator_observations_command(client: Client, args: Dict[str, Any]) ->
return results
-def get_conversion_feed_source_command(client: Client, args: Dict[str, Any]) -> CommandResults:
+def get_conversion_feed_source_command(client: Client, args: dict[str, Any]) -> CommandResults:
"""
Get Conversion Feed Source Command
@@ -1625,7 +1644,7 @@ def get_conversion_feed_source_command(client: Client, args: Dict[str, Any]) ->
def get_lookup_threat_data_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: dict[str, Any]
) -> List[CommandResults]:
"""
Get Lookup Threat Data Command
@@ -1661,7 +1680,7 @@ def get_lookup_threat_data_command(
def get_create_threat_data_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: dict[str, Any]
) -> List[CommandResults]:
"""
Get or Create Threat Data Command
@@ -1713,25 +1732,25 @@ def get_create_threat_data_command(
return results + created_after_lookup_results + invalid_values_results
-def domain(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def domain(client: Client, args: dict[str, Any]) -> List[CommandResults]:
args["object_names"] = args["domain"]
args["ioc_type"] = ["domain-name"]
return get_lookup_threat_data_command(client, args)
-def url(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def url(client: Client, args: dict[str, Any]) -> List[CommandResults]:
args["object_names"] = args["url"]
args["ioc_type"] = ["url"]
return get_lookup_threat_data_command(client, args)
-def ip(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def ip(client: Client, args: dict[str, Any]) -> List[CommandResults]:
args["object_names"] = args["ip"]
args["ioc_type"] = ["ipv4-addr", "ipv6-addr"]
return get_lookup_threat_data_command(client, args)
-def file(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def file(client: Client, args: dict[str, Any]) -> List[CommandResults]:
args["object_names"] = args["file"]
args["ioc_type"] = [
"MD5",
@@ -1745,7 +1764,7 @@ def file(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
return get_lookup_threat_data_command(client, args)
-def get_all_notes(client: Client, args: Dict[str, Any]) -> CommandResults:
+def get_all_notes(client: Client, args: dict[str, Any]) -> CommandResults:
page = args["page"]
page = check_for_empty_variable(page, 1)
page_size = args["page_size"]
@@ -1779,7 +1798,7 @@ def get_all_notes(client: Client, args: Dict[str, Any]) -> CommandResults:
)
-def get_note_details(client: Client, args: Dict[str, Any]) -> CommandResults:
+def get_note_details(client: Client, args: dict[str, Any]) -> CommandResults:
id = args["id"]
client_url = client.base_url + f"ingestion/notes/{id}/"
response = client.get_http_request(client_url)
@@ -1803,7 +1822,7 @@ def get_note_details(client: Client, args: Dict[str, Any]) -> CommandResults:
)
-def create_note(client: Client, args: Dict[str, Any]) -> CommandResults:
+def create_note(client: Client, args: dict[str, Any]) -> CommandResults:
text = args['text']
client_url = client.base_url + "ingestion/notes/"
object_id = args.get('object_id', None)
@@ -1848,7 +1867,7 @@ def create_note(client: Client, args: Dict[str, Any]) -> CommandResults:
)
-def update_note(client: Client, args: Dict[str, Any]) -> CommandResults:
+def update_note(client: Client, args: dict[str, Any]) -> CommandResults:
id = args['id']
text = args.get("text", None)
client_url = client.base_url + f"ingestion/notes/{id}/"
@@ -1899,7 +1918,7 @@ def update_note(client: Client, args: Dict[str, Any]) -> CommandResults:
)
-def delete_note(client: Client, args: Dict[str, Any]) -> CommandResults:
+def delete_note(client: Client, args: dict[str, Any]) -> CommandResults:
id = args['id']
client_url = client.base_url + f"ingestion/notes/{id}/"
response = client.delete_http_request(client_url)
@@ -1921,7 +1940,7 @@ def delete_note(client: Client, args: Dict[str, Any]) -> CommandResults:
)
-def make_request(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def make_request(client: Client, args: dict[str, Any]) -> List[CommandResults]:
type = args['type']
body = json.loads(args.get('body', "{}"))
params = json.loads(args.get('params', "{}"))
@@ -1968,7 +1987,7 @@ def make_request(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
]
-def cve_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
+def cve_command(client: Client, args: dict[str, Any]) -> List[CommandResults]:
page = 1
page_size = 15
params = {"page": page, "page_size": page_size}
@@ -1976,7 +1995,7 @@ def cve_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
extra_fields = argToList(args.get("extra_fields", []))
response = client.get_lookup_threat_data("vulnerability", [], cve, params)
threat_data_list = response.get("data", {}).get("results", [])
- results = [data for data in threat_data_list]
+ results = list(threat_data_list)
results = no_result_found(results)
if isinstance(results, CommandResults):
@@ -1988,7 +2007,7 @@ def cve_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
return final_results
-def _lookup_cve_result(client: Client, cve_detail: Dict[str, Any], page: int, page_size: int, extra_fields: List[str]):
+def _lookup_cve_result(client: Client, cve_detail: dict[str, Any], page: int, page_size: int, extra_fields: List[str]):
cve_uuid = str(cve_detail.get("id"))
created = str(datetime.fromtimestamp(cve_detail.get("created", 0)))
modified = str(datetime.fromtimestamp(cve_detail.get("modified", 0)))
@@ -2000,7 +2019,7 @@ def _lookup_cve_result(client: Client, cve_detail: Dict[str, Any], page: int, pa
response = client.get_vulnerability_product_details(cve_uuid, page, page_size)
product_details_list = response.get("data", {}).get("results", [])
- results = [data for data in product_details_list]
+ results = list(product_details_list)
cpe_list = ",\n".join(product.get("product") for product in results)
response = client.get_vulnerability_cvss_score(cve_uuid, page, page_size)
@@ -2022,7 +2041,7 @@ def _lookup_cve_result(client: Client, cve_detail: Dict[str, Any], page: int, pa
dbot_reputation_score = 1
elif 3 <= cvss_map_value < 7:
dbot_reputation_score = 2
- elif 7 <= cvss_map_value:
+ elif cvss_map_value >= 7:
dbot_reputation_score = 3
description = None
diff --git a/Packs/CTIX/Integrations/CTIXv3/CTIXv3.yml b/Packs/CTIX/Integrations/CTIXv3/CTIXv3.yml
index f01d3bdc1f9d..14f17be3b319 100644
--- a/Packs/CTIX/Integrations/CTIXv3/CTIXv3.yml
+++ b/Packs/CTIX/Integrations/CTIXv3/CTIXv3.yml
@@ -1859,7 +1859,7 @@ script:
- contextPath: DBotScore.Score
description: The actual score.
type: Number
- dockerimage: demisto/python3:3.10.13.84405
+ dockerimage: demisto/python3:3.10.13.87159
subtype: python3
tests:
- No tests (auto formatted)
diff --git a/Packs/CTIX/Integrations/CTIXv3/CTIXv3_test.py b/Packs/CTIX/Integrations/CTIXv3/CTIXv3_test.py
index e314c64aee0e..4e58d1454ac3 100644
--- a/Packs/CTIX/Integrations/CTIXv3/CTIXv3_test.py
+++ b/Packs/CTIX/Integrations/CTIXv3/CTIXv3_test.py
@@ -1,4 +1,3 @@
-import io
import json
from CTIXv3 import (
Client,
@@ -46,7 +45,7 @@
def util_load_json(path):
- with io.open(path, mode="r", encoding="utf-8") as f:
+ with open(path, encoding="utf-8") as f:
return json.loads(f.read())
@@ -182,6 +181,31 @@ def test_delete_tags_no_input(requests_mock):
def test_whitelist_iocs_command(requests_mock):
mock_response = util_load_json("test_data/whitelist_iocs.json")
+ requests_mock.post(f"{BASE_URL}conversion/allowed_indicators/", json=mock_response)
+
+ client = Client(
+ base_url=BASE_URL,
+ access_id=ACCESS_ID,
+ secret_key=SECRET_KEY,
+ verify=False,
+ proxies={},
+ )
+
+ args = {"type": "indicator", "values": "127.0.0.1, 127.0.0.2", "reason": "test"}
+
+ resp = whitelist_iocs_command(client, args)
+ response = resp.raw_response
+
+ assert response == mock_response["details"]
+ assert resp.outputs_prefix == "CTIX.AllowedIOC"
+
+ assert isinstance(response, dict)
+ assert len(response) == 3
+
+
+def test_whitelist_iocs_command_fallback(requests_mock):
+ mock_response = util_load_json("test_data/whitelist_iocs.json")
+ requests_mock.post(f"{BASE_URL}conversion/allowed_indicators/", status_code=404)
requests_mock.post(f"{BASE_URL}conversion/whitelist/", json=mock_response)
client = Client(
@@ -204,8 +228,9 @@ def test_whitelist_iocs_command(requests_mock):
assert len(response) == 3
-def test_get_whitelist_iocs_command(requests_mock):
+def test_get_whitelist_iocs_command_fallback(requests_mock):
mock_response = util_load_json("test_data/get_whitelist_iocs.json")
+ requests_mock.get(f"{BASE_URL}conversion/allowed_indicators/", status_code=404)
requests_mock.get(f"{BASE_URL}conversion/whitelist/", json=mock_response)
client = Client(
@@ -228,6 +253,30 @@ def test_get_whitelist_iocs_command(requests_mock):
assert len(response) == 11
+def test_get_whitelist_iocs_command(requests_mock):
+ mock_response = util_load_json("test_data/get_whitelist_iocs.json")
+ requests_mock.get(f"{BASE_URL}conversion/allowed_indicators/", json=mock_response)
+
+ client = Client(
+ base_url=BASE_URL,
+ access_id=ACCESS_ID,
+ secret_key=SECRET_KEY,
+ verify=False,
+ proxies={},
+ )
+
+ args = {"page": 1, "page_size": 1}
+
+ resp = get_whitelist_iocs_command(client, args)
+ response = resp[0].raw_response
+
+ assert response == mock_response["results"][0]
+ assert resp[0].outputs_prefix == "CTIX.IOC"
+
+ assert isinstance(response, dict)
+ assert len(response) == 11
+
+
def test_remove_whitelisted_ioc_command(requests_mock):
mock_response = util_load_json("test_data/remove_whitelist_ioc.json")
requests_mock.post(
diff --git a/Packs/CTIX/ReleaseNotes/2_2_16.md b/Packs/CTIX/ReleaseNotes/2_2_16.md
new file mode 100644
index 000000000000..d86824763420
--- /dev/null
+++ b/Packs/CTIX/ReleaseNotes/2_2_16.md
@@ -0,0 +1,6 @@
+#### Integrations
+##### Cyware Threat Intelligence eXchange
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
+##### CTIX v3
+- Added Compatibility with new Allowed Indicator Module.
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
diff --git a/Packs/CTIX/pack_metadata.json b/Packs/CTIX/pack_metadata.json
index 22dfc1024259..5cf59f1efac7 100644
--- a/Packs/CTIX/pack_metadata.json
+++ b/Packs/CTIX/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CTIX",
"description": "Cyware Threat Intelligence eXchange",
"support": "partner",
- "currentVersion": "2.2.15",
+ "currentVersion": "2.2.16",
"author": "Cyware Labs",
"url": "https://cyware.com/",
"email": "connector-dev@cyware.com",
diff --git a/Packs/Campaign/ReleaseNotes/3_4_4.md b/Packs/Campaign/ReleaseNotes/3_4_4.md
new file mode 100644
index 000000000000..f659d3befc99
--- /dev/null
+++ b/Packs/Campaign/ReleaseNotes/3_4_4.md
@@ -0,0 +1,12 @@
+
+#### Scripts
+
+##### ShowCampaignUniqueSenders
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### ShowNumberOfCampaignIncidents
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### ShowCampaignUniqueRecipients
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/Campaign/Scripts/ShowCampaignUniqueRecipients/ShowCampaignUniqueRecipients.yml b/Packs/Campaign/Scripts/ShowCampaignUniqueRecipients/ShowCampaignUniqueRecipients.yml
index 6dd22be988fc..652e08b0c8ed 100644
--- a/Packs/Campaign/Scripts/ShowCampaignUniqueRecipients/ShowCampaignUniqueRecipients.yml
+++ b/Packs/Campaign/Scripts/ShowCampaignUniqueRecipients/ShowCampaignUniqueRecipients.yml
@@ -10,7 +10,7 @@ comment: Displays the number of unique recipients of an email campaign.
enabled: true
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Campaign/Scripts/ShowCampaignUniqueSenders/ShowCampaignUniqueSenders.yml b/Packs/Campaign/Scripts/ShowCampaignUniqueSenders/ShowCampaignUniqueSenders.yml
index f6f9864df48d..a447c60d67a7 100644
--- a/Packs/Campaign/Scripts/ShowCampaignUniqueSenders/ShowCampaignUniqueSenders.yml
+++ b/Packs/Campaign/Scripts/ShowCampaignUniqueSenders/ShowCampaignUniqueSenders.yml
@@ -11,7 +11,7 @@ enabled: true
scripttarget: 0
subtype: python3
fromversion: 6.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
tests:
- No tests (auto formatted)
diff --git a/Packs/Campaign/Scripts/ShowNumberOfCampaignIncidents/ShowNumberOfCampaignIncidents.yml b/Packs/Campaign/Scripts/ShowNumberOfCampaignIncidents/ShowNumberOfCampaignIncidents.yml
index 8dcd99f4ebf6..312af40b7e04 100644
--- a/Packs/Campaign/Scripts/ShowNumberOfCampaignIncidents/ShowNumberOfCampaignIncidents.yml
+++ b/Packs/Campaign/Scripts/ShowNumberOfCampaignIncidents/ShowNumberOfCampaignIncidents.yml
@@ -11,7 +11,7 @@ enabled: true
scripttarget: 0
subtype: python3
fromversion: 6.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
tests:
- No tests (auto formatted)
diff --git a/Packs/Campaign/pack_metadata.json b/Packs/Campaign/pack_metadata.json
index d04df1d315de..f77a831765ac 100644
--- a/Packs/Campaign/pack_metadata.json
+++ b/Packs/Campaign/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Phishing Campaign",
"description": "This pack can help you find related phishing, spam or other types of email incidents and characterize campaigns.",
"support": "xsoar",
- "currentVersion": "3.4.3",
+ "currentVersion": "3.4.4",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/CarbonBlackEndpointStandardEventCollector.yml b/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/CarbonBlackEndpointStandardEventCollector.yml
index 8288abea6e6a..e5e31c0c27f6 100644
--- a/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/CarbonBlackEndpointStandardEventCollector.yml
+++ b/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/CarbonBlackEndpointStandardEventCollector.yml
@@ -77,7 +77,7 @@ script:
defaultValue: 2500
description: 'Fetch alerts and audit logs from Carbon Black Endpoint Standard.'
name: carbonblack-endpoint-standard-get-events
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/CarbonBlackDefense/ReleaseNotes/3_1_4.md b/Packs/CarbonBlackDefense/ReleaseNotes/3_1_4.md
new file mode 100644
index 000000000000..f33525216e02
--- /dev/null
+++ b/Packs/CarbonBlackDefense/ReleaseNotes/3_1_4.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Carbon Black Endpoint Standard Event Collector
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/CarbonBlackDefense/pack_metadata.json b/Packs/CarbonBlackDefense/pack_metadata.json
index 021fbb7c5f01..f16bde1881b6 100644
--- a/Packs/CarbonBlackDefense/pack_metadata.json
+++ b/Packs/CarbonBlackDefense/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Carbon Black Endpoint Standard",
"description": "Next-generation antivirus + EDR in one cloud-delivered platform that stops commodity malware, advanced malware, non-malware attacks and ransomware.",
"support": "xsoar",
- "currentVersion": "3.1.3",
+ "currentVersion": "3.1.4",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CarbonBlackProtect/TestPlaybooks/playbook-CbP-PlayBook.yml b/Packs/CarbonBlackProtect/TestPlaybooks/playbook-CbP-PlayBook.yml
index c3dc5b69ee09..411c709ec28b 100644
--- a/Packs/CarbonBlackProtect/TestPlaybooks/playbook-CbP-PlayBook.yml
+++ b/Packs/CarbonBlackProtect/TestPlaybooks/playbook-CbP-PlayBook.yml
@@ -1,4 +1,4 @@
-id: 67b0f25f-b061-4468-8613-43ab13147173
+id: CbP-PlayBook
version: -1
name: CbP-PlayBook
starttaskid: "0"
diff --git a/Packs/Carbon_Black_Enterprise_Response/Integrations/CarbonBlackResponseV2/CarbonBlackResponseV2.py b/Packs/Carbon_Black_Enterprise_Response/Integrations/CarbonBlackResponseV2/CarbonBlackResponseV2.py
index 6858814153a4..1d9dd9bcbf18 100644
--- a/Packs/Carbon_Black_Enterprise_Response/Integrations/CarbonBlackResponseV2/CarbonBlackResponseV2.py
+++ b/Packs/Carbon_Black_Enterprise_Response/Integrations/CarbonBlackResponseV2/CarbonBlackResponseV2.py
@@ -822,6 +822,7 @@ def fetch_incidents(client: Client, max_results: int, last_run: dict, first_fetc
incidents: List[Dict[str, Any]] = []
alerts = []
+ time_sort = 'created_time'
# multiple statuses are not supported by api. If multiple statuses provided, gets the incidents for each status.
# Otherwise will run without status.
@@ -834,13 +835,13 @@ def fetch_incidents(client: Client, max_results: int, last_run: dict, first_fetc
demisto.debug(f'{INTEGRATION_NAME} - Fetching incident from Server with status: {current_status}')
query_params['status'] = f'"{current_status}"'
# we create a new query containing params since we do not allow both query and params.
- res = client.get_alerts(query=_create_query_string(query_params), limit=max_results)
+ res = client.get_alerts(query=_create_query_string(query_params), limit=max_results, sort=time_sort)
alerts += res.get('results', [])
demisto.debug(f'{INTEGRATION_NAME} - fetched {len(alerts)} so far.')
else:
query = _add_to_current_query(query, query_params)
demisto.debug(f'{INTEGRATION_NAME} - Fetching incident from Server with status: {status}')
- res = client.get_alerts(query=query, limit=max_results)
+ res = client.get_alerts(query=query, limit=max_results, sort=time_sort)
alerts += res.get('results', [])
demisto.debug(f'{INTEGRATION_NAME} - Got total of {len(alerts)} alerts from CB server.')
diff --git a/Packs/Carbon_Black_Enterprise_Response/Integrations/CarbonBlackResponseV2/CarbonBlackResponseV2.yml b/Packs/Carbon_Black_Enterprise_Response/Integrations/CarbonBlackResponseV2/CarbonBlackResponseV2.yml
index 3aa1649395c2..3ef265c813a9 100644
--- a/Packs/Carbon_Black_Enterprise_Response/Integrations/CarbonBlackResponseV2/CarbonBlackResponseV2.yml
+++ b/Packs/Carbon_Black_Enterprise_Response/Integrations/CarbonBlackResponseV2/CarbonBlackResponseV2.yml
@@ -79,23 +79,23 @@ configuration:
section: Connect
advanced: true
required: false
-description: VMware Carbon Black EDR (formerly known as Carbon Black Response)
+description: VMware Carbon Black EDR (formerly known as Carbon Black Response).
display: VMware Carbon Black EDR v2
name: VMware Carbon Black EDR v2
script:
commands:
- arguments:
- - description: The name of the process
+ - description: The name of the process.
name: process_name
- - description: The CB Host group this sensor is assigned to
+ - description: The CB Host group this sensor is assigned to.
name: group
- - description: The hostname of the computer for this process
+ - description: The hostname of the computer for this process.
name: hostname
- - description: The parent process name
+ - description: The parent process name.
name: parent_name
- - description: The process path
+ - description: The process path.
name: process_path
- - description: The md5 of the binary image backing the process
+ - description: The md5 of the binary image backing the process.
name: md5
- description: Advanced query string. Accepts the same data as the search box on the Process Search page. For more information on the query syntax see https://developer.carbonblack.com/resources/query_overview.pdf. If not provided, at least one other search field must be provided.
name: query
@@ -128,7 +128,7 @@ script:
name: limit
- description: Start at this row, 0 by default.
name: start
- description: Process search
+ description: Process search.
name: cb-edr-processes-search
outputs:
- contextPath: CarbonBlackEDR.ProcessSearch.Terms
@@ -225,7 +225,7 @@ script:
description: The count of cross process events launched by this process.
type: Number
- contextPath: CarbonBlackEDR.ProcessSearch.Results.segment_id
- description: The process segment id (processes are identified by this segment id and their id)
+ description: The process segment id (processes are identified by this segment id and their id).
type: String
- contextPath: CarbonBlackEDR.ProcessSearch.Results.watchlists.segments_hit
description: Number of segment hits associated with the watchlist.
@@ -361,7 +361,7 @@ script:
description: The pid of the process's parent.
type: Number
- contextPath: CarbonBlackEDR.Process.process.segment_id
- description: The process segment id (processes are identified by this segment id and their id)
+ description: The process segment id (processes are identified by this segment id and their id).
type: Date
- contextPath: CarbonBlackEDR.Process.process.min_last_server_update
description: When the process was last updated in the server.
@@ -379,7 +379,7 @@ script:
description: The count of child processes launched by this process.
type: Number
- contextPath: CarbonBlackEDR.Process.process.unique_id
- description: An internal CB process id combining of the process id and segment id
+ description: An internal CB process id combining of the process id and segment id.
type: String
- contextPath: CarbonBlackEDR.Process.siblings.process_md5
description: The md5 of the binary image backing the sibling process.
@@ -394,7 +394,7 @@ script:
description: Internal CB process id of the sibling process's parent.
type: String
- contextPath: CarbonBlackEDR.Process.siblings.cmdline
- description: The command line of the sibling process
+ description: The command line of the sibling process.
type: String
- contextPath: CarbonBlackEDR.Process.siblings.id
description: The id of the process.
@@ -433,7 +433,7 @@ script:
description: The pid of the sibling process's parent.
type: Number
- contextPath: CarbonBlackEDR.Process.siblings.segment_id
- description: The sibling process segment id (processes are identified by this segment id and their id)
+ description: The sibling process segment id (processes are identified by this segment id and their id).
type: Date
- contextPath: CarbonBlackEDR.Process.siblings.host_type
description: The type of the host associated with the process.
@@ -445,7 +445,7 @@ script:
description: The type of the child process associated with the process.
type: String
- contextPath: CarbonBlackEDR.Process.siblings.unique_id
- description: An internal CB process id combining of the sibling process id and segment id
+ description: An internal CB process id combining of the sibling process id and segment id.
type: String
- contextPath: CarbonBlackEDR.Process.children.process_md5
description: The md5 of the binary image backing the children process.
@@ -460,7 +460,7 @@ script:
description: Internal CB process id of the child process's parent.
type: String
- contextPath: CarbonBlackEDR.Process.children.cmdline
- description: The command line of the child process
+ description: The command line of the child process.
type: String
- contextPath: CarbonBlackEDR.Process.children.id
description: The id of the process.
@@ -499,7 +499,7 @@ script:
description: The pid of the child process's parent.
type: Number
- contextPath: CarbonBlackEDR.Process.children.segment_id
- description: The child process segment id (processes are identified by this segment id and their id)
+ description: The child process segment id (processes are identified by this segment id and their id).
type: Date
- contextPath: CarbonBlackEDR.Process.children.host_type
description: The host type of the children process.
@@ -511,7 +511,7 @@ script:
description: The type of the host associated with the process.
type: String
- contextPath: CarbonBlackEDR.Process.children.unique_id
- description: An internal CB process id combining of the child process id and segment id
+ description: An internal CB process id combining of the child process id and segment id.
type: String
- arguments:
- description: The internal CB process id; this is the id field in search results.
@@ -547,7 +547,7 @@ script:
For Windows- A ZIP archive which includes a signed Windows EXE or MSI sensor installer and settings file
For Mac OS X- A ZIP archive which includes a signed OSX PKG sensor installer and settings file
- For Linux- A compressed tarball (tar.gz) archive which includes a Linux sensor installer and settings file
+ For Linux- A compressed tarball (tar.gz) archive which includes a Linux sensor installer and settings file.
name: os_type
predefined:
- windows_exe
@@ -562,17 +562,17 @@ script:
name: cb-edr-sensor-installer-download
- arguments:
- default: true
- description: The internal CB process id; this is the id field in search results
+ description: The internal CB process id; this is the id field in search results.
name: process_id
required: true
- description: |-
The process segment id. This is the segment_id field in search results.
- If this is set to 0, the API will merge all segments in results
+ If this is set to 0, the API will merge all segments in results.
name: segment_id
required: true
- description: |-
Return events starting with this offset.
- If not provided, offset will be 0 (returns events starting from the beginning)
+ If not provided, offset will be 0 (returns events starting from the beginning).
name: start
- description: How many events to fetch.
name: count
@@ -765,34 +765,34 @@ script:
type: String
- arguments:
- default: true
- description: The sensor ID to quarantine
+ description: The sensor ID to quarantine.
name: sensor_id
required: true
- description: Unquarantine the endpoint
+ description: Unquarantine the endpoint.
name: cb-edr-unquarantine-device
- arguments:
- default: true
- description: The sensor ID to quarantine
+ description: The sensor ID to quarantine.
name: sensor_id
required: true
- description: Isolate the endpoint from the network
+ description: Isolate the endpoint from the network.
name: cb-edr-quarantine-device
- arguments:
- - description: The sensor ID
+ - description: The sensor ID.
name: id
- - description: Returns the sensor registration(s) with specified IP address
+ - description: Returns the sensor registration(s) with specified IP address.
name: ip
predefined:
- ''
- - description: Retruns the sensor registration(s) in the specified sensor group id
+ - description: Retruns the sensor registration(s) in the specified sensor group id.
name: group_id
- - description: ' only returns sensors that have been inactive for less than the specified number of days'
+ - description: 'only returns sensors that have been inactive for less than the specified number of days.'
name: inactive_filter_days
- - description: Returns the sensor registration(s) with matching hostname
+ - description: Returns the sensor registration(s) with matching hostname.
name: hostname
- description: The maximum amount of sensors to be returned.
name: limit
- description: List the CarbonBlack sensors
+ description: List the CarbonBlack sensors.
name: cb-edr-sensors-list
outputs:
- contextPath: CarbonBlackEDR.Sensor.systemvolume_total_size
@@ -958,10 +958,10 @@ script:
name: cb-edr-watchlist-delete
- arguments:
- default: true
- description: The watchlist ID
+ description: The watchlist ID.
name: id
required: true
- - description: The raw Carbon Black query that this watchlist matches
+ - description: The raw Carbon Black query that this watchlist matches.
name: search_query
- description: A description of the update.
name: description
@@ -972,7 +972,7 @@ script:
name: cb-edr-watchlist-update
- arguments:
- default: true
- description: The watchlist ID
+ description: The watchlist ID.
name: id
required: true
- description: Action type specified for the watchlist. Options for action_type are syslog, email and alert.
@@ -994,25 +994,25 @@ script:
Updates a Watchlist action that is specified using ID.
name: cb-edr-watchlist-update-action
- arguments:
- - description: Name of the watchlist
+ - description: Name of the watchlist.
name: name
required: true
- - description: The raw Carbon Black query that this watchlist matches
+ - description: The raw Carbon Black query that this watchlist matches.
name: search_query
required: true
- description: A description of the update.
name: description
- description: the type of watchlist. Valid values are ‘modules’ and ‘events’ for binary and process watchlists, respectively. Deafult is 'events'.
name: index_type
- description: Creates a new Watchlist within EDR,
+ description: Creates a new Watchlist within EDR,.
name: cb-edr-watchlist-create
outputs:
- contextPath: CarbonBlackEDR.Watchlist.id
- description: An ID for the new watchlist
+ description: An ID for the new watchlist.
type: Number
- arguments:
- default: true
- description: The watchlist ID
+ description: The watchlist ID.
name: id
- description: The maximum amount of watchlists to be returned.
name: limit
@@ -1064,26 +1064,26 @@ script:
description: The name of this watchlist.
type: String
- arguments:
- - description: The banned hash
+ - description: The banned hash.
name: md5
required: true
- - description: Text description of block list
+ - description: Text description of block list.
name: text
required: true
- - description: The last time the hash was blocked or prevented from being executed
+ - description: The last time the hash was blocked or prevented from being executed.
name: last_ban_time
- - description: Total number of blocks on the banned list
+ - description: Total number of blocks on the banned list.
name: ban_count
- - description: Last hostname to block this hash
+ - description: Last hostname to block this hash.
name: last_ban_host
- description: Prevent execution of a specified md5 hash
+ description: Prevent execution of a specified md5 hash.
name: cb-edr-binary-ban
- arguments:
- description: The maximum hashs of result to retrieve.
name: limit
required: true
description: |-
- Returns a list of banned hashes
+ Returns a list of banned hashes.
name: cb-edr-binary-bans-list
outputs:
- contextPath: CarbonBlackEDR.BinaryBan.username
@@ -1132,11 +1132,11 @@ script:
description: The last hostname to block this hash.
type: String
- arguments:
- - description: Alert unique identifier
+ - description: Alert unique identifier.
isArray: true
name: alert_ids
required: true
- - description: "The requested status to \n"
+ - description: "The requested status to."
name: status
auto: PREDEFINED
predefined:
@@ -1157,7 +1157,7 @@ script:
Updating Alerts requires an API key with Global Administrator privileges.
name: cb-edr-alert-update
- arguments:
- - description: Alert status to filter by
+ - description: Alert status to filter by.
name: status
auto: PREDEFINED
predefined:
@@ -1165,13 +1165,13 @@ script:
- In Progress
- Resolved
- False Positive
- - description: Alert username to filter by
+ - description: Alert username to filter by.
name: username
- - description: Alert feedname to filter by
+ - description: Alert feedname to filter by.
name: feedname
- - description: Alert hostname to filter by
+ - description: Alert hostname to filter by.
name: hostname
- - description: Alert report name (watchlist_id) to filter by
+ - description: Alert report name (watchlist_id) to filter by.
name: report
- description: Advanced query string. Accepts the same data as the search box. For more information on the query syntax see https://developer.carbonblack.com/resources/query_overview.pdf. If not provided, at least one other search field must be provided.
name: query
@@ -1212,7 +1212,7 @@ script:
description: The id of the sensor.
type: Number
- contextPath: CarbonBlackEDR.Alert.Results.feed_name
- description: The name of the source feed
+ description: The name of the source feed.
type: String
- contextPath: CarbonBlackEDR.Alert.Results.created_time
description: The alert creation time.
@@ -1322,10 +1322,10 @@ script:
type: Number
- arguments:
- default: true
- description: The md5 of the binary
+ description: The md5 of the binary.
name: md5
required: true
- description: Returns the metadata for the binary with the provided md5
+ description: Returns the metadata for the binary with the provided md5.
name: cb-edr-binary-summary
outputs:
- contextPath: CarbonBlackEDR.BinaryMetadata.host_count
@@ -1459,7 +1459,7 @@ script:
type: Date
- arguments:
- default: true
- description: The md5 hash of the binary
+ description: The md5 hash of the binary.
name: md5
required: true
description: Download the binary with this md5 hash.
@@ -1467,9 +1467,9 @@ script:
- arguments:
- description: Gets the name of the product this file is distributed with.
name: product_name
- - description: The md5 hash of this binary
+ - description: The md5 hash of this binary.
name: md5
- - description: Digital signature status
+ - description: Digital signature status.
name: digital_signature
auto: PREDEFINED
predefined:
@@ -1488,11 +1488,11 @@ script:
- description: |-
Sensor group this sensor was
assigned to at the time of process
- execution
+ execution.
name: group
- description: |-
Hostname of the computer on
- which the process was executed
+ which the process was executed.
name: hostname
- description: Sort rows by this field and order. server_added_timestamp desc by default.
name: sort
@@ -1510,7 +1510,7 @@ script:
name: limit
- description: Start at this row, 0 by default.
name: start
- description: Binary search
+ description: Binary search.
name: cb-edr-binary-search
outputs:
- contextPath: CarbonBlackEDR.BinarySearch.terms
@@ -1654,15 +1654,15 @@ script:
description: If present, Private build from FileVersionInformation. For more information check https://docs.microsoft.com/en-us/dotnet/api/system.diagnostics.fileversioninfo?redirectedfrom=MSDN&view=net-5.0
type: String
- arguments:
- - description: The sensor ID
+ - description: The sensor ID.
name: id
- default: true
- description: Query sensors with specified IP address
+ description: Query sensors with specified IP address.
isArray: true
name: ip
- - description: Query sensors with matching hostname
+ - description: Query sensors with matching hostname.
name: hostname
- description: Display information about the given sensor
+ description: Display information about the given sensor.
name: endpoint
outputs:
- contextPath: Endpoint.Hostname
@@ -1728,7 +1728,7 @@ script:
- contextPath: Endpoint.Processor
description: The model of the processor.
type: String
- dockerimage: demisto/python3:3.10.13.72123
+ dockerimage: demisto/python3:3.10.13.89009
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_45.md b/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_45.md
new file mode 100644
index 000000000000..a55197e6ddbe
--- /dev/null
+++ b/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_45.md
@@ -0,0 +1,9 @@
+
+#### Scripts
+
+##### CBFindIP
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CBLiveGetFile_V2
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_46.md b/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_46.md
new file mode 100644
index 000000000000..90aecb02822d
--- /dev/null
+++ b/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_46.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### VMware Carbon Black EDR v2
+
+- Fixed an issue where alerts were not retrieved by the **created_time** sort field.
+- Updated the Docker image to: *demisto/python3:3.10.13.89009*.
diff --git a/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_47.md b/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_47.md
new file mode 100644
index 000000000000..1b06293d5b2f
--- /dev/null
+++ b/Packs/Carbon_Black_Enterprise_Response/ReleaseNotes/2_1_47.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### VMware Carbon Black EDR v2
+
+- Improved implementation of the documentation.
diff --git a/Packs/Carbon_Black_Enterprise_Response/Scripts/CBFindIP/CBFindIP.yml b/Packs/Carbon_Black_Enterprise_Response/Scripts/CBFindIP/CBFindIP.yml
index b138cca3409c..23af799d071a 100644
--- a/Packs/Carbon_Black_Enterprise_Response/Scripts/CBFindIP/CBFindIP.yml
+++ b/Packs/Carbon_Black_Enterprise_Response/Scripts/CBFindIP/CBFindIP.yml
@@ -48,4 +48,4 @@ tests:
dependson:
must: []
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
diff --git a/Packs/Carbon_Black_Enterprise_Response/Scripts/CBLiveGetFile_V2/CBLiveGetFile_V2.yml b/Packs/Carbon_Black_Enterprise_Response/Scripts/CBLiveGetFile_V2/CBLiveGetFile_V2.yml
index b2c78806a1f5..ee4e8b7308bb 100644
--- a/Packs/Carbon_Black_Enterprise_Response/Scripts/CBLiveGetFile_V2/CBLiveGetFile_V2.yml
+++ b/Packs/Carbon_Black_Enterprise_Response/Scripts/CBLiveGetFile_V2/CBLiveGetFile_V2.yml
@@ -105,7 +105,7 @@ tags:
- endpoint
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- No Test
dependson:
diff --git a/Packs/Carbon_Black_Enterprise_Response/pack_metadata.json b/Packs/Carbon_Black_Enterprise_Response/pack_metadata.json
index b19a774dec75..6a26d61b5179 100644
--- a/Packs/Carbon_Black_Enterprise_Response/pack_metadata.json
+++ b/Packs/Carbon_Black_Enterprise_Response/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Carbon Black Enterprise Response",
"description": "Query and respond with Carbon Black endpoint detection and response.",
"support": "xsoar",
- "currentVersion": "2.1.44",
+ "currentVersion": "2.1.47",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Change_Management/ReleaseNotes/1_0_4.md b/Packs/Change_Management/ReleaseNotes/1_0_4.md
new file mode 100644
index 000000000000..635d62ec67dd
--- /dev/null
+++ b/Packs/Change_Management/ReleaseNotes/1_0_4.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### IncidentState
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/Change_Management/Scripts/IncidentState/IncidentState.yml b/Packs/Change_Management/Scripts/IncidentState/IncidentState.yml
index eec247ad1f25..2df66816c8e6 100644
--- a/Packs/Change_Management/Scripts/IncidentState/IncidentState.yml
+++ b/Packs/Change_Management/Scripts/IncidentState/IncidentState.yml
@@ -6,7 +6,7 @@ commonfields:
contentitemexportablefields:
contentitemfields:
fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
enabled: true
name: IncidentState
runas: DBotWeakRole
diff --git a/Packs/Change_Management/pack_metadata.json b/Packs/Change_Management/pack_metadata.json
index 8f4faa5fa1c0..5c6c4d3a6bae 100644
--- a/Packs/Change_Management/pack_metadata.json
+++ b/Packs/Change_Management/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Change Management",
"description": "If you use Pan-Os or Panorama as your enterprise firewall and Jira or ServiceNow as your enterprise ticketing system, this pack will assist you to perform a well coordinated and documented process.",
"support": "xsoar",
- "currentVersion": "1.0.3",
+ "currentVersion": "1.0.4",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Cherwell/ReleaseNotes/1_0_19.md b/Packs/Cherwell/ReleaseNotes/1_0_19.md
new file mode 100644
index 000000000000..1c90d783db39
--- /dev/null
+++ b/Packs/Cherwell/ReleaseNotes/1_0_19.md
@@ -0,0 +1,21 @@
+
+#### Scripts
+
+##### CherwellIncidentUnlinkTask
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CherwellIncidentOwnTask
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CherwellGetIncident
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CherwellCreateIncident
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CherwellQueryIncidents
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CherwellUpdateIncident
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/Cherwell/Scripts/CherwellCreateIncident/CherwellCreateIncident.yml b/Packs/Cherwell/Scripts/CherwellCreateIncident/CherwellCreateIncident.yml
index ece46308df41..916fd07fdf9b 100644
--- a/Packs/Cherwell/Scripts/CherwellCreateIncident/CherwellCreateIncident.yml
+++ b/Packs/Cherwell/Scripts/CherwellCreateIncident/CherwellCreateIncident.yml
@@ -39,7 +39,7 @@ type: python
dependson:
must:
- Cherwell|||cherwell-create-business-object
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
subtype: python3
tests:
- Cherwell Example Scripts - test
diff --git a/Packs/Cherwell/Scripts/CherwellGetIncident/CherwellGetIncident.yml b/Packs/Cherwell/Scripts/CherwellGetIncident/CherwellGetIncident.yml
index dec5b58eae46..b0620a66019c 100644
--- a/Packs/Cherwell/Scripts/CherwellGetIncident/CherwellGetIncident.yml
+++ b/Packs/Cherwell/Scripts/CherwellGetIncident/CherwellGetIncident.yml
@@ -49,7 +49,7 @@ tags: [Cherwell]
dependson:
must:
- Cherwell|||cherwell-get-business-object
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
subtype: python3
tests:
- Cherwell Example Scripts - test
diff --git a/Packs/Cherwell/Scripts/CherwellIncidentOwnTask/CherwellIncidentOwnTask.yml b/Packs/Cherwell/Scripts/CherwellIncidentOwnTask/CherwellIncidentOwnTask.yml
index 5e66777d23f9..1a3823fb7c3b 100644
--- a/Packs/Cherwell/Scripts/CherwellIncidentOwnTask/CherwellIncidentOwnTask.yml
+++ b/Packs/Cherwell/Scripts/CherwellIncidentOwnTask/CherwellIncidentOwnTask.yml
@@ -10,7 +10,7 @@ commonfields:
id: CherwellIncidentOwnTask
version: -1
name: CherwellIncidentOwnTask
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
subtype: python3
script: '-'
timeout: '0'
diff --git a/Packs/Cherwell/Scripts/CherwellIncidentUnlinkTask/CherwellIncidentUnlinkTask.yml b/Packs/Cherwell/Scripts/CherwellIncidentUnlinkTask/CherwellIncidentUnlinkTask.yml
index 661191dc7dde..38d01554af2e 100644
--- a/Packs/Cherwell/Scripts/CherwellIncidentUnlinkTask/CherwellIncidentUnlinkTask.yml
+++ b/Packs/Cherwell/Scripts/CherwellIncidentUnlinkTask/CherwellIncidentUnlinkTask.yml
@@ -13,7 +13,7 @@ name: CherwellIncidentUnlinkTask
script: '-'
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
subtype: python3
tags: [Cherwell]
dependson:
diff --git a/Packs/Cherwell/Scripts/CherwellQueryIncidents/CherwellQueryIncidents.yml b/Packs/Cherwell/Scripts/CherwellQueryIncidents/CherwellQueryIncidents.yml
index 8809f0fa2605..da8b76a71abd 100644
--- a/Packs/Cherwell/Scripts/CherwellQueryIncidents/CherwellQueryIncidents.yml
+++ b/Packs/Cherwell/Scripts/CherwellQueryIncidents/CherwellQueryIncidents.yml
@@ -42,7 +42,7 @@ outputs:
script: '-'
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
subtype: python3
tests:
- Cherwell Example Scripts - test
diff --git a/Packs/Cherwell/Scripts/CherwellUpdateIncident/CherwellUpdateIncident.yml b/Packs/Cherwell/Scripts/CherwellUpdateIncident/CherwellUpdateIncident.yml
index 3ac9cadf5021..aca4fa261e2d 100644
--- a/Packs/Cherwell/Scripts/CherwellUpdateIncident/CherwellUpdateIncident.yml
+++ b/Packs/Cherwell/Scripts/CherwellUpdateIncident/CherwellUpdateIncident.yml
@@ -44,7 +44,7 @@ outputs:
script: '-'
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
subtype: python3
tests:
- Cherwell Example Scripts - test
diff --git a/Packs/Cherwell/pack_metadata.json b/Packs/Cherwell/pack_metadata.json
index f6828b4688a6..e8f3c92977b8 100644
--- a/Packs/Cherwell/pack_metadata.json
+++ b/Packs/Cherwell/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cherwell",
"description": "Cloud-based IT service management solution",
"support": "xsoar",
- "currentVersion": "1.0.18",
+ "currentVersion": "1.0.19",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2.py b/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2.py
index 9abc59ddb2eb..8eae698d72b9 100644
--- a/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2.py
+++ b/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2.py
@@ -1,10 +1,11 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
import enum
import http
from collections.abc import Callable
from typing import Any, TypeVar
-import demistomock as demisto # noqa: F401
-from CommonServerPython import * # noqa: F401
BASE_URL = 'https://api.umbrella.com'
@@ -83,17 +84,39 @@ def login(self) -> None:
Log in to the API using the API key and API secret.
The access token is stored in the headers of the request.
"""
- response = self._http_request(
- method='POST',
- url_suffix=Client.AUTH_SUFFIX,
- auth=(self.api_key, self.api_secret),
- )
+ access_token = self.get_access_token()
+ self._headers['Authorization'] = f'Bearer {access_token}'
- try:
- access_token = response['access_token']
- self._headers['Authorization'] = f'Bearer {access_token}'
- except Exception as e:
- raise DemistoException(f'Failed logging in: {response}') from e
+ def get_access_token(self):
+ """
+ Get an access token that was previously created if it is still valid, else, generate a new access token from
+ the API key and secret.
+ """
+ # Check if there is an existing valid access token
+ integration_context = get_integration_context()
+ if integration_context.get('access_token') and integration_context.get('expiry_time') > date_to_timestamp(datetime.now()):
+ return integration_context.get('access_token')
+ else:
+ try:
+ res = self._http_request(
+ method='POST',
+ url_suffix=Client.AUTH_SUFFIX,
+ headers={'Content-Type': 'application/x-www-form-urlencoded'},
+ auth=(self.api_key, self.api_secret),
+ data={'grant_type': 'client_credentials'}
+ )
+ if res.get('access_token'):
+ expiry_time = date_to_timestamp(datetime.now(), date_format='%Y-%m-%dT%H:%M:%S')
+ expiry_time += res.get('expires_in', 0) * 1000 - 10
+ context = {
+ 'access_token': res.get('access_token'),
+ 'expiry_time': expiry_time
+ }
+ set_integration_context(context)
+ return res.get('access_token')
+ except Exception as e:
+ return_error(f'Error occurred while creating an access token. Please check the instance configuration.'
+ f'\n\n{e.args[0]}')
def _get_destination_payload(
self,
diff --git a/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2.yml b/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2.yml
index f3b7588ab93e..41c154ea74ad 100644
--- a/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2.yml
+++ b/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2.yml
@@ -29,27 +29,18 @@ script:
- name: destination_list_id
description: The ID of the destination list. Destination lists can be fetched with the `umbrella-destination-lists-list` command.
required: true
- isArray: false
- name: destination_ids
description: Comma-separated list of destination IDs to be retrieved from a list of destinations.
- required: false
isArray: true
- name: destinations
description: Comma-separated list of destinations to retrieve, a destination may be a domain, URL, or IP address.
- required: false
isArray: true
- name: page
description: Page number of paginated results. Minimum 1; Default 1.
- required: false
- isArray: false
- name: page_size
description: The number of items per page. Minimum 1; Maximum 100; Default 50.
- required: false
- isArray: false
- name: limit
description: The number of items per page. Minimum 1.
- required: false
- isArray: false
defaultValue: '50'
outputs:
- type: String
@@ -73,15 +64,11 @@ script:
- name: destination_list_id
description: The ID of the destination list. Destination lists can be fetched with the `umbrella-destination-lists-list` command.
required: true
- isArray: false
- name: destinations
description: Comma-separated list of destinations. A destination may be a URL, IPv4, CIDR or fully qualified domain name.
required: true
- isArray: false
- name: comment
description: A comment about all the inserted destinations.
- required: false
- isArray: false
defaultValue: Added from XSOAR
outputs:
- type: Number
@@ -126,7 +113,6 @@ script:
- name: destination_list_id
description: The ID of the destination list. Destination lists can be fetched with the `umbrella-destination-lists-list` command.
required: true
- isArray: false
- name: destination_ids
description: Comma-separated list of destination IDs. Destinations can be fetched with the `umbrella-destination-list` command.
required: true
@@ -173,20 +159,12 @@ script:
arguments:
- name: destination_list_id
description: The ID of the destination list to retrieve.
- required: false
- isArray: false
- name: page
description: Page number of paginated results. Minimum 1; Default 1.
- required: false
- isArray: false
- name: page_size
description: The number of items per page. Minimum 1; Maximum 100; Default 50.
- required: false
- isArray: false
- name: limit
description: The maximum number of records to retrieve. Minimum 1.
- required: false
- isArray: false
defaultValue: '50'
outputs:
- type: Number
@@ -242,8 +220,6 @@ script:
arguments:
- name: bundle_type
description: The type of the Umbrella policy associated with the destination list. If the field is not specified, the default value is 'DNS'.
- required: false
- isArray: false
auto: PREDEFINED
predefined:
- 'DNS'
@@ -251,7 +227,6 @@ script:
- name: access
description: 'The type of access for the destination list. Valid values are "allow" or "block". Accepted types for destination list with the access "allow" are: DOMAIN, IPv4 and CIDR. Accepted types for destination list with the access "block" are: URL and DOMAIN.'
required: true
- isArray: false
auto: PREDEFINED
predefined:
- 'allow'
@@ -259,7 +234,6 @@ script:
- name: is_global
description: Specifies whether the destination list is a global destination list. There is only one default destination list of type 'allow' or 'block' for an organization.
required: true
- isArray: false
auto: PREDEFINED
predefined:
- 'True'
@@ -267,15 +241,11 @@ script:
- name: name
description: The name of the destination list.
required: true
- isArray: false
- name: destinations
description: Comma-separated list of destinations. A destination may be a URL, IPv4, CIDR or fully qualified domain name.
- required: false
isArray: true
- name: destinations_comment
description: A comment about all the inserted destinations.
- required: false
- isArray: false
defaultValue: Added from XSOAR
outputs:
- type: Number
@@ -320,11 +290,9 @@ script:
- name: destination_list_id
description: The ID of the destination list. Destination lists can be fetched with the `umbrella-destination-lists-list` command.
required: true
- isArray: false
- name: name
description: The name of the destination list.
required: true
- isArray: false
outputs:
- type: Number
contextPath: Umbrella.DestinationLists.id
@@ -368,7 +336,6 @@ script:
- name: destination_list_id
description: The ID of the destination list. Destination lists can be fetched with the `umbrella-destination-lists-list` command.
required: true
- isArray: false
- arguments:
- description: Organization ID.
name: orgId
@@ -483,7 +450,7 @@ script:
script: ''
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.13.86272
+ dockerimage: demisto/python3:3.10.13.87159
isfetch: false
fromversion: 6.9.0
tests:
diff --git a/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2_test.py b/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2_test.py
index fb8439a00df7..1e8bce64ee26 100644
--- a/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2_test.py
+++ b/Packs/Cisco-umbrella-cloud-security/Integrations/CiscoUmbrellaCloudSecurityv2/CiscoUmbrellaCloudSecurityv2_test.py
@@ -59,7 +59,7 @@ def test_list_destinations_command(requests_mock, mock_client):
- A destination list ID
When:
- - list_destinations_command
+ - Running the umbrella-destinations-list command.
Then:
- Ensure that the CommandResults outputs_prefix is correct.
@@ -111,7 +111,7 @@ def test_list_destinations_command_fetch_destinations(requests_mock, mock_client
- A destination list ID
When:
- - list_destinations_command
+ - Running the umbrella-destinations-list command.
Then:
- Ensure that the CommandResults outputs_prefix is correct.
@@ -168,7 +168,7 @@ def test_add_destinations_command(requests_mock, mock_client):
- A destination list ID and destinations
When:
- - add_destinations_command
+ - Running the umbrella-destination-add command.
Then:
- Ensure that the CommandResults raw_response is correct.
@@ -206,7 +206,7 @@ def test_delete_destination_command(requests_mock, mock_client):
- A destination list ID and destination IDs
When:
- - delete_destination_command
+ - Running the umbrella-destination-delete command.
Then:
- Ensure that the CommandResults readable_output is correct.
@@ -243,7 +243,7 @@ def test_list_destination_lists_command(requests_mock, mock_client):
- A destination list ID
When:
- - list_destination_lists_command
+ - Running the umbrella-destination-lists-list command.
Then:
- Ensure that the CommandResults outputs_prefix is correct.
@@ -280,7 +280,7 @@ def test_list_destination_lists_command_list_request(requests_mock, mock_client)
- Nothing
When:
- - list_destination_lists_command
+ - Running the umbrella-destination-lists-list command.
Then:
- Ensure that the CommandResults outputs_prefix is correct.
@@ -314,7 +314,7 @@ def test_create_destination_list_command(requests_mock, mock_client):
for a new destination list
When:
- - create_destination_list_command
+ - Running the umbrella-destination-list-create command.
Then:
- Ensure that the CommandResults outputs_prefix is correct.
@@ -356,7 +356,7 @@ def test_update_destination_list_command(requests_mock, mock_client):
- A destination list ID and a new name
When:
- - update_destination_list_command
+ - Running the umbrella-destination-list-update command.
Then:
- Ensure that the CommandResults outputs_prefix is correct.
@@ -418,3 +418,28 @@ def test_delete_destination_list_command(requests_mock, mock_client):
assert command_results.readable_output == expected_readable_output
assert command_results.raw_response == response
+
+
+def test_get_access_token(requests_mock, mock_client):
+ """
+ Scenario:
+ - Test the flow of getting an access token
+ When:
+ - Running the get_access_token method.
+ Then:
+ - Ensure that an access token is returned.
+ """
+
+ response = {
+ "token_type": "bearer",
+ "access_token": "Pichu",
+ "expires_in": 3600
+ }
+ requests_mock.post(
+ url=f'{CiscoUmbrellaCloudSecurityv2.BASE_URL}/auth/v2/token',
+ json=response
+ )
+
+ access_token = CiscoUmbrellaCloudSecurityv2.Client.get_access_token(mock_client)
+
+ assert access_token == response.get('access_token')
diff --git a/Packs/Cisco-umbrella-cloud-security/ReleaseNotes/2_0_8.md b/Packs/Cisco-umbrella-cloud-security/ReleaseNotes/2_0_8.md
new file mode 100644
index 000000000000..bebf1a6fb2fd
--- /dev/null
+++ b/Packs/Cisco-umbrella-cloud-security/ReleaseNotes/2_0_8.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Cisco Umbrella Cloud Security v2
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
diff --git a/Packs/Cisco-umbrella-cloud-security/ReleaseNotes/2_0_9.md b/Packs/Cisco-umbrella-cloud-security/ReleaseNotes/2_0_9.md
new file mode 100644
index 000000000000..d43362467191
--- /dev/null
+++ b/Packs/Cisco-umbrella-cloud-security/ReleaseNotes/2_0_9.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Cisco Umbrella Cloud Security v2
+
+- Improved implementation of retrieving the *Access Token* in the authentication process.
diff --git a/Packs/Cisco-umbrella-cloud-security/pack_metadata.json b/Packs/Cisco-umbrella-cloud-security/pack_metadata.json
index 353597663e1f..ed744f5f0bb9 100644
--- a/Packs/Cisco-umbrella-cloud-security/pack_metadata.json
+++ b/Packs/Cisco-umbrella-cloud-security/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cisco Umbrella cloud security",
"description": "Basic integration with Cisco Umbrella that allows you to add domains to destination lists (e.g. global block / allow)",
"support": "xsoar",
- "currentVersion": "2.0.7",
+ "currentVersion": "2.0.9",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -18,4 +18,4 @@
"marketplacev2"
],
"certification": "certified"
-}
\ No newline at end of file
+}
diff --git a/Packs/CiscoSMA/Integrations/CiscoSMA/CiscoSMA.yml b/Packs/CiscoSMA/Integrations/CiscoSMA/CiscoSMA.yml
index 8e661385a1d3..1818b52d5c84 100644
--- a/Packs/CiscoSMA/Integrations/CiscoSMA/CiscoSMA.yml
+++ b/Packs/CiscoSMA/Integrations/CiscoSMA/CiscoSMA.yml
@@ -100,7 +100,7 @@ script:
script: ''
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
isfetch: true
commands:
- name: cisco-sma-spam-quarantine-message-search
diff --git a/Packs/CiscoSMA/ReleaseNotes/1_1_29.md b/Packs/CiscoSMA/ReleaseNotes/1_1_29.md
new file mode 100644
index 000000000000..14aaff8a97da
--- /dev/null
+++ b/Packs/CiscoSMA/ReleaseNotes/1_1_29.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Cisco Security Management Appliance
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/CiscoSMA/pack_metadata.json b/Packs/CiscoSMA/pack_metadata.json
index 4ec0eadd69ae..5e0580b749ba 100644
--- a/Packs/CiscoSMA/pack_metadata.json
+++ b/Packs/CiscoSMA/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CiscoSMA",
"description": "The Security Management Appliance (SMA) is used to centralize services from Email Security Appliances (ESAs) and Web Security Appliances (WSAs).",
"support": "xsoar",
- "currentVersion": "1.1.28",
+ "currentVersion": "1.1.29",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CiscoStealthwatch/Integrations/CiscoStealthwatch/CiscoStealthwatch.yml b/Packs/CiscoStealthwatch/Integrations/CiscoStealthwatch/CiscoStealthwatch.yml
index 4f5fe10c7317..e267bd47dd11 100644
--- a/Packs/CiscoStealthwatch/Integrations/CiscoStealthwatch/CiscoStealthwatch.yml
+++ b/Packs/CiscoStealthwatch/Integrations/CiscoStealthwatch/CiscoStealthwatch.yml
@@ -283,7 +283,7 @@ script:
- contextPath: CiscoStealthwatch.SecurityEventResults.hitCount
description: The hit count of the security event.
type: str
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/CiscoStealthwatch/ReleaseNotes/1_0_31.md b/Packs/CiscoStealthwatch/ReleaseNotes/1_0_31.md
new file mode 100644
index 000000000000..d0f69b8790ec
--- /dev/null
+++ b/Packs/CiscoStealthwatch/ReleaseNotes/1_0_31.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Cisco Secure Network Analytics (Stealthwatch)
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/CiscoStealthwatch/pack_metadata.json b/Packs/CiscoStealthwatch/pack_metadata.json
index 9699c9a26dd7..8ebd7c724660 100644
--- a/Packs/CiscoStealthwatch/pack_metadata.json
+++ b/Packs/CiscoStealthwatch/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cisco Secure Network Analytics (Stealthwatch)",
"description": "Cisco Secure Network Analytics (Stealthwatch) provides scalable visibility and security analytics.",
"support": "xsoar",
- "currentVersion": "1.0.30",
+ "currentVersion": "1.0.31",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cloud_Token_Theft_Response.yml b/Packs/CloudIncidentResponse/Playbooks/playbook-Cloud_Token_Theft_Response.yml
index 7556d269572e..95c6dd3e2608 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-Cloud_Token_Theft_Response.yml
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cloud_Token_Theft_Response.yml
@@ -1740,9 +1740,28 @@ inputs:
description: The alert ID.
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas.
+ description: A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in a CIDR notation.
playbookInputQuery:
- key: ResolveIP
value:
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cloud_Token_Theft_Response_README.md b/Packs/CloudIncidentResponse/Playbooks/playbook-Cloud_Token_Theft_Response_README.md
index 58d642935b70..212a333737d9 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-Cloud_Token_Theft_Response_README.md
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cloud_Token_Theft_Response_README.md
@@ -25,24 +25,6 @@ The **Cloud Token Theft Response Playbook** provides a structured and comprehens
- Handles false positives identified during the investigation
- Handles true positives by initiating appropriate response actions
-### Supported Alerts
-
-| Alert Name | CSP |
-|----------------------------------------------------|-------|
-| Suspicious usage of AWS Lambda’s token | AWS |
-| Suspicious usage of AWS Lambda’s role | AWS |
-| Suspicious usage of EC2 token | AWS |
-| Remote usage of an AWS service token | AWS |
-| Remote usage of an AWS EKS token | AWS |
-| Suspicious usage of an AWS EKS token | AWS |
-| Suspicious usage of an AWS ECS token | AWS |
-| Remote usage of an AWS ECS token | AWS |
-| Suspicious usage of AWS service token | AWS |
-| Remote usage of an App engine Service Account token | GCP |
-| Suspicious usage of App engine Service Account token| GCP |
-| Remote usage of VM Service Account token | GCP |
-| Suspicious usage of VM Service Account toke | GCP |
-
---
## Dependencies
@@ -51,15 +33,15 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Cloud Threat Hunting - Persistence
-* IP Enrichment - Generic v2
* Handle False Positive Alerts
-* Cloud Response - Generic
-* Cloud Credentials Rotation - Generic
* Cloud Token Theft - Set Verdict
+* Cloud Threat Hunting - Persistence
+* Cloud Enrichment - Generic
+* Cloud Credentials Rotation - Generic
* Entity Enrichment - Generic v3
+* IP Enrichment - Generic v2
* TIM - Indicator Relationships Analysis
-* Cloud Enrichment - Generic
+* Cloud Response - Generic
### Integrations
@@ -82,7 +64,7 @@ This playbook does not use any integrations.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| alert_id | The alert ID. | alert.investigationId | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. | lists.PrivateIPs | Optional |
| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). | True | Optional |
| earlyContainment | Whether to execute early containment. This action allows you to respond rapidly but have higher probability for false positives. | False | Optional |
| VPNIPList | This input can process to types of data: 1. A comma separated list of IP addresses assigned by the VPN provider. \(using a XSIAM list or an hardcoded array\) 2. A comma separated list of CIDRs. 3. A link to an IP addresses list which will be processed and extract the IP dynamically with each execution. | | Optional |
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Cryptomining.yml b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Cryptomining.yml
index 2590aa974df9..6464cbb81293 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Cryptomining.yml
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Cryptomining.yml
@@ -1164,7 +1164,26 @@ inputs:
description: Determines whether to convert the IP address to a hostname using a DNS query (True/ False).
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
description: "A list of internal IP ranges to check IP addresses against. \nFor IP Enrichment - Generic v2 playbook."
playbookInputQuery:
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Cryptomining_README.md b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Cryptomining_README.md
index c081607fdf5a..fa8eff07f567 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Cryptomining_README.md
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Cryptomining_README.md
@@ -18,10 +18,10 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Cloud Credentials Rotation - Generic
* Cortex XDR - XCloud Cryptojacking - Set Verdict
-* Cloud Response - Generic
* Cortex XDR - Cloud Enrichment
+* Cloud Credentials Rotation - Generic
+* Cloud Response - Generic
### Integrations
@@ -33,12 +33,12 @@ This playbook does not use any integrations.
### Commands
-* closeInvestigation
+* send-mail
* setIncident
+* xdr-get-cloud-original-alerts
* xdr-update-incident
* xdr-get-incident-extra-data
-* xdr-get-cloud-original-alerts
-* send-mail
+* closeInvestigation
## Playbook Inputs
@@ -52,12 +52,12 @@ This playbook does not use any integrations.
| alert_id | The alert ID. | | Optional |
| cloudProvider | The cloud service provider involved. | PaloAltoNetworksXDR.OriginalAlert.event.cloud_provider | Optional |
| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). | True | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. For IP Enrichment - Generic v2 playbook. | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. For IP Enrichment - Generic v2 playbook. | lists.PrivateIPs | Optional |
| autoBlockIndicators | Whether to block the indicators automatically. | False | Optional |
| autoAccessKeyRemediation | Whether to execute the user remediation flow automatically. | False | Optional |
| autoResourceRemediation | Whether to execute the resource remediation flow automatically. | False | Optional |
| autoUserRemediation | Whether to execute the user remediation flow automatically. | False | Optional |
-| credentialsRemediationType | The response playbook provides the following remediation actions using AWS, MSGraph Users, GCP and GSuite Admin:
Reset: By entering "Reset" in the input, the playbook will execute password reset. Supports: AWS, MSGraph Users, GCP and GSuite Admin.
Revoke: By entering "Revoke" in the input, the GCP will revoke the access key, GSuite Admin will revoke the access token and the MSGraph Users will revoke the session. Supports: GCP, GSuite Admin and MSGraph Users.
Deactivate - By entering "Deactivate" in the input, the playbook will execute access key deactivation. Supports: AWS.
ALL: By entering "ALL" in the input, the playbook will execute the all remediation actions provided for each CSP. | | Optional |
+| credentialsRemediationType | The response playbook provides the following remediation actions using AWS, MSGraph Users, GCP and GSuite Admin:
Reset: By entering "Reset" in the input, the playbook will execute password reset. Supports: AWS, MSGraph Users, GCP and GSuite Admin.
Revoke: By entering "Revoke" in the input, the GCP will revoke the access key, GSuite Admin will revoke the access token and the MSGraph Users will revoke the session. Supports: GCP, GSuite Admin and MSGraph Users.
Deactivate - By entering "Deactivate" in the input, the playbook will execute access key deactivation. Supports: AWS.
ALL: By entering "ALL" in the input, the playbook will execute the all remediation actions provided for each CSP. | Reset | Optional |
| AWS-accessKeyRemediationType | Choose the remediation type for the user's access key.
AWS available types: Disable - for disabling the user's access key. Delete - for the user's access key deletion. | Disable | Optional |
| AWS-resourceRemediationType | Choose the remediation type for the instances created.
AWS available types: Stop - for stopping the instances. Terminate - for terminating the instances. | Stop | Optional |
| AWS-userRemediationType | Choose the remediation type for the user involved.
AWS available types: Delete - for the user deletion. Revoke - for revoking the user's credentials. | Revoke | Optional |
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Enrichment.yml b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Enrichment.yml
index e0588b7a6c39..5f6bcaaf6450 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Enrichment.yml
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Enrichment.yml
@@ -953,7 +953,26 @@ inputs:
description: Determines whether to convert the IP address to a hostname using a DNS query (True/ False).
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
description: A list of internal IP ranges to check IP addresses against. \nFor IP Enrichment - Generic v2 playbook.
playbookInputQuery:
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Enrichment_README.md b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Enrichment_README.md
index 3a2831e24551..9bee8c115a83 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Enrichment_README.md
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Cloud_Enrichment_README.md
@@ -24,15 +24,15 @@ This playbook does not use any integrations.
### Scripts
-* IsInCidrRanges
* Set
+* IsInCidrRanges
* CopyContextToField
* If-Then-Else
### Commands
-* setIncident
* ip
+* setIncident
## Playbook Inputs
@@ -41,7 +41,7 @@ This playbook does not use any integrations.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). | True | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. \\nFor IP Enrichment - Generic v2 playbook. | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. \\nFor IP Enrichment - Generic v2 playbook. | lists.PrivateIPs | Optional |
## Playbook Outputs
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_XCloud_Token_Theft_Response.yml b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_XCloud_Token_Theft_Response.yml
index 1256db08f796..e634c3c90125 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_XCloud_Token_Theft_Response.yml
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_XCloud_Token_Theft_Response.yml
@@ -2284,7 +2284,26 @@ inputs:
description: The alert ID.
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
description: A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation.
playbookInputQuery:
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_XCloud_Token_Theft_Response_README.md b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_XCloud_Token_Theft_Response_README.md
index 5c28d38d2217..7eda49d760bf 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_XCloud_Token_Theft_Response_README.md
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_XCloud_Token_Theft_Response_README.md
@@ -25,24 +25,6 @@ The **Cloud Token Theft Response Playbook** provides a structured and comprehens
- Handles false positives identified during the investigation.
- Handles true positives by initiating appropriate response actions.
-### Supported Alerts
-
-| Alert Name | CSP |
-|----------------------------------------------------|-------|
-| Suspicious usage of AWS Lambda’s token | AWS |
-| Suspicious usage of AWS Lambda’s role | AWS |
-| Suspicious usage of EC2 token | AWS |
-| Remote usage of an AWS service token | AWS |
-| Remote usage of an AWS EKS token | AWS |
-| Suspicious usage of an AWS EKS token | AWS |
-| Suspicious usage of an AWS ECS token | AWS |
-| Remote usage of an AWS ECS token | AWS |
-| Suspicious usage of AWS service token | AWS |
-| Remote usage of an App engine Service Account token | GCP |
-| Suspicious usage of App engine Service Account token| GCP |
-| Remote usage of VM Service Account token | GCP |
-| Suspicious usage of VM Service Account toke | GCP |
-
---
## Dependencies
@@ -51,14 +33,14 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+* IP Enrichment - Generic v2
+* Entity Enrichment - Generic v3
+* Cortex XDR - XCloud Token Theft - Set Verdict
* Cloud Enrichment - Generic
-* Cloud Threat Hunting - Persistence
* Cloud Credentials Rotation - Generic
-* Cloud Response - Generic
+* Cloud Threat Hunting - Persistence
* TIM - Indicator Relationships Analysis
-* Cortex XDR - XCloud Token Theft - Set Verdict
-* IP Enrichment - Generic v2
-* Entity Enrichment - Generic v3
+* Cloud Response - Generic
### Integrations
@@ -70,9 +52,9 @@ This playbook does not use any integrations.
### Commands
-* setIncident
* xdr-update-incident
* xdr-get-cloud-original-alerts
+* setIncident
* closeInvestigation
## Playbook Inputs
@@ -82,7 +64,7 @@ This playbook does not use any integrations.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| alert_id | The alert ID. | alert.investigationId | Optional |
-| InternalRange | A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. | | Optional |
+| InternalRange | A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. | lists.PrivateIPs | Optional |
| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). | False | Optional |
| earlyContainment | Whether to execute early containment. This action allows you to respond rapidly but have higher probability for false positives. | True | Optional |
| VPNIPList | This input can process two types of data: 1. A comma-separated list of internal IPs assigned by the VPN provider using a XSIAM list or an hardcoded array. 2. A link to an IP list which will be processed and extract the IP dynamically which each execution.
For CIDRs, use the InternalRange input. | | Optional |
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Alert_Enrichment.yml b/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Alert_Enrichment.yml
index 4319d7f69000..8b3f1be04b11 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Alert_Enrichment.yml
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Alert_Enrichment.yml
@@ -490,7 +490,26 @@ inputs:
description: Determines whether to convert the IP address to a hostname using a DNS query (True/ False).
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
description: "A list of internal IP ranges to check IP addresses against. \nFor IP Enrichment - Generic v2 playbook."
playbookInputQuery:
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Alert_Enrichment_README.md b/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Alert_Enrichment_README.md
index 6515b4d665b9..67aa70387d86 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Alert_Enrichment_README.md
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Alert_Enrichment_README.md
@@ -14,32 +14,38 @@ The playbook collects or enriches the following data:
-ASN
## Dependencies
+
This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Account Enrichment - Generic v2.1
+
* IP Enrichment - Generic v2
+* Account Enrichment - Generic v2.1
### Integrations
-* CoreIOCs
-* CortexCoreIR
+
+This playbook does not use any integrations.
### Scripts
-* IsInCidrRanges
+
* Set
+* IsInCidrRanges
### Commands
+
* ip
## Playbook Inputs
+
---
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). | True | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. For IP Enrichment - Generic v2 playbook. | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. For IP Enrichment - Generic v2 playbook. | lists.PrivateIPs | Optional |
## Playbook Outputs
+
---
| **Path** | **Description** | **Type** |
@@ -58,5 +64,7 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
| ActiveDirectory.Users | The ActiveDirectory Users object. | unknown |
## Playbook Image
+
---
-![XCloud Alert Enrichment](../doc_files/XCloud_Alert_Enrichment.png)
\ No newline at end of file
+
+![XCloud Alert Enrichment](../doc_files/XCloud_Alert_Enrichment.png)
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Cryptomining.yml b/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Cryptomining.yml
index 8b70d87f46cd..6973ed35c5cf 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Cryptomining.yml
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Cryptomining.yml
@@ -1231,7 +1231,26 @@ inputs:
description: Determines whether to convert the IP address to a hostname using a DNS query (True/ False).
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
description: "A list of internal IP ranges to check IP addresses against. \nFor IP Enrichment - Generic v2 playbook."
playbookInputQuery:
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Cryptomining_README.md b/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Cryptomining_README.md
index 3bb22bd33043..a6a15f0f7269 100644
--- a/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Cryptomining_README.md
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-XCloud_Cryptomining_README.md
@@ -29,12 +29,12 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+* XCloud Alert Enrichment
+* Cloud Credentials Rotation - Generic
+* Ticket Management - Generic
* Cloud Response - Generic
* XCloud Cryptojacking - Set Verdict
-* Ticket Management - Generic
-* Cloud Credentials Rotation - Generic
* Handle False Positive Alerts
-* XCloud Alert Enrichment
### Integrations
@@ -42,14 +42,14 @@ This playbook does not use any integrations.
### Scripts
-* IncreaseAlertSeverity
+* IncreaseIncidentSeverity
* LoadJSON
### Commands
* closeInvestigation
-* core-get-cloud-original-alerts
* send-mail
+* core-get-cloud-original-alerts
* setParentIncidentFields
## Playbook Inputs
@@ -65,12 +65,12 @@ This playbook does not use any integrations.
| cloudProvider | The cloud service provider involved. | alert.cloudprovider | Optional |
| alert_id | The alert ID. | | Optional |
| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). | True | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. For IP Enrichment - Generic v2 playbook. | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. For IP Enrichment - Generic v2 playbook. | lists.PrivateIPs | Optional |
| autoAccessKeyRemediation | Whether to execute the user remediation flow automatically. | False | Optional |
| autoBlockIndicators | Whether to block the indicators automatically. | False | Optional |
| autoResourceRemediation | Whether to execute the resource remediation flow automatically. | False | Optional |
| autoUserRemediation | Whether to execute the user remediation flow automatically. | False | Optional |
-| credentialsRemediationType | The response playbook provides the following remediation actions using AWS, MSGraph Users, GCP and GSuite Admin:
Reset: By entering "Reset" in the input, the playbook will execute password reset. Supports: AWS, MSGraph Users, GCP and GSuite Admin.
Revoke: By entering "Revoke" in the input, the GCP will revoke the access key, GSuite Admin will revoke the access token and the MSGraph Users will revoke the session. Supports: GCP, GSuite Admin and MSGraph Users.
Deactivate - By entering "Deactivate" in the input, the playbook will execute access key deactivation. Supports: AWS.
ALL: By entering "ALL" in the input, the playbook will execute the all remediation actions provided for each CSP. | | Optional |
+| credentialsRemediationType | The response playbook provides the following remediation actions using AWS, MSGraph Users, GCP and GSuite Admin:
Reset: By entering "Reset" in the input, the playbook will execute password reset. Supports: AWS, MSGraph Users, GCP and GSuite Admin.
Revoke: By entering "Revoke" in the input, the GCP will revoke the access key, GSuite Admin will revoke the access token and the MSGraph Users will revoke the session. Supports: GCP, GSuite Admin and MSGraph Users.
Deactivate - By entering "Deactivate" in the input, the playbook will execute access key deactivation. Supports: AWS.
ALL: By entering "ALL" in the input, the playbook will execute the all remediation actions provided for each CSP. | Reset | Optional |
| AWS-accessKeyRemediationType | Choose the remediation type for the user's access key.
AWS available types: Disable - for disabling the user's access key. Delete - for the user's access key deletion. | Disable | Optional |
| AWS-resourceRemediationType | Choose the remediation type for the instances created.
AWS available types: Stop - for stopping the instances. Terminate - for terminating the instances. | Stop | Optional |
| AWS-userRemediationType | Choose the remediation type for the user involved.
AWS available types: Delete - for the user deletion. Revoke - for revoking the user's credentials. | Revoke | Optional |
diff --git a/Packs/CloudIncidentResponse/ReleaseNotes/1_0_15.md b/Packs/CloudIncidentResponse/ReleaseNotes/1_0_15.md
new file mode 100644
index 000000000000..08c50952ab59
--- /dev/null
+++ b/Packs/CloudIncidentResponse/ReleaseNotes/1_0_15.md
@@ -0,0 +1,9 @@
+
+#### Scripts
+
+##### XCloudProviderWidget
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### displayCloudIndicators
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/CloudIncidentResponse/ReleaseNotes/1_0_16.md b/Packs/CloudIncidentResponse/ReleaseNotes/1_0_16.md
new file mode 100644
index 000000000000..fb6113e396f2
--- /dev/null
+++ b/Packs/CloudIncidentResponse/ReleaseNotes/1_0_16.md
@@ -0,0 +1,26 @@
+
+#### Playbooks
+
+##### Cortex XDR - XCloud Token Theft Response
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Cortex XDR - XCloud Cryptojacking
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Cortex XDR - Cloud Enrichment
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### XCloud Cryptojacking
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### XCloud Alert Enrichment
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Cloud Token Theft Response
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
diff --git a/Packs/CloudIncidentResponse/Scripts/XCloudProviderWidget/XCloudProviderWidget.yml b/Packs/CloudIncidentResponse/Scripts/XCloudProviderWidget/XCloudProviderWidget.yml
index a107ede8c6ce..7c690d8a7bdd 100644
--- a/Packs/CloudIncidentResponse/Scripts/XCloudProviderWidget/XCloudProviderWidget.yml
+++ b/Packs/CloudIncidentResponse/Scripts/XCloudProviderWidget/XCloudProviderWidget.yml
@@ -10,7 +10,7 @@ comment: 'This script returns an HTML result of the cloud providers in the incid
enabled: true
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 6.8.0
tests:
diff --git a/Packs/CloudIncidentResponse/Scripts/displayCloudIndicators/displayCloudIndicators.yml b/Packs/CloudIncidentResponse/Scripts/displayCloudIndicators/displayCloudIndicators.yml
index 4f8cd5ac2e01..2e513f4e2344 100644
--- a/Packs/CloudIncidentResponse/Scripts/displayCloudIndicators/displayCloudIndicators.yml
+++ b/Packs/CloudIncidentResponse/Scripts/displayCloudIndicators/displayCloudIndicators.yml
@@ -10,7 +10,7 @@ comment: Display the Cloud indicators found in a dynamic-section
enabled: true
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 6.8.0
tests:
diff --git a/Packs/CloudIncidentResponse/pack_metadata.json b/Packs/CloudIncidentResponse/pack_metadata.json
index 84dba991c9ee..158bf363e2a6 100644
--- a/Packs/CloudIncidentResponse/pack_metadata.json
+++ b/Packs/CloudIncidentResponse/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cloud Incident Response",
"description": "This content Pack helps you automate collection, investigation, and remediation of incidents related to cloud infrastructure activities in AWS, Azure, and GCP.",
"support": "xsoar",
- "currentVersion": "1.0.14",
+ "currentVersion": "1.0.16",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector.py b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector.py
new file mode 100644
index 000000000000..98d014f64e90
--- /dev/null
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector.py
@@ -0,0 +1,351 @@
+import demistomock as demisto
+from CommonServerPython import *
+import hashlib
+
+NUM_OF_RETRIES = 3
+BACKOFF_FACTOR = 1.0
+API_VERSION = 'v2'
+MAX_EVENTS_PER_TYPE = 50000
+PAGE_SIZE = 10000
+MAX_AUDIT_API_COUNT = 10000
+ALERT_TIME_FIELD = 'latestTimestampUsecs'
+AUDIT_LOGS_TIME_FIELD = 'timestampUsecs'
+
+
+class EventType:
+ """
+ Class used to as Enum for the Cohesity event type
+ """
+ alert = 'Alert'
+ audit_log = 'Audit Log'
+
+
+class Client(BaseClient):
+ """
+ Client class to interact with Cohesity Helios.
+ """
+
+ def get_alerts(self, fetch_start_timestamp: int, fetch_end_timestamp: int) -> dict:
+ request_params = {
+ 'startDateUsecs': fetch_start_timestamp,
+ 'endDateUsecs': fetch_end_timestamp
+ }
+ res = self._http_request( # type: ignore
+ method='GET',
+ url_suffix='/mcm/alerts',
+ params=request_params,
+ retries=NUM_OF_RETRIES,
+ backoff_factor=BACKOFF_FACTOR,
+ resp_type='response'
+ )
+ # In case there are no results the API returns 'null' which parsed into None object by response.json().
+ try:
+ json_res = res.json() or {}
+ except ValueError as exception:
+ raise DemistoException(f'Failed to parse response into json object. Response: {res.content}', exception, res)
+
+ return json_res
+
+ def get_audit_logs(self, start_time: int, end_time: int) -> dict:
+ request_params = {
+ 'startTimeUsecs': start_time,
+ 'endTimeUsecs': end_time,
+ 'count': MAX_AUDIT_API_COUNT
+ }
+
+ return self._http_request(
+ method='GET',
+ url_suffix='/mcm/audit-logs',
+ params=request_params,
+ retries=NUM_OF_RETRIES,
+ backoff_factor=BACKOFF_FACTOR,
+ )
+
+
+''' HELPER FUNCTIONS '''
+
+
+def adjust_and_dedup_elements(new_elements: list[dict], existing_element_ids: list[str], time_field_name: str) -> list[dict]:
+ """
+ Filter out all the elements from the new_elements list that their ID does not appear in the existing_element_ids list.
+ The new elements also get their '_time' foeld set.
+ Args:
+ new_elements (list[dict]): The list of elements to filter.
+ existing_element_ids (list[str]): The list of IDs to remove.
+ time_field_name (str): The name of the field containing the time info.
+
+ Return:
+ list[dict]: a subset of new_elements that their ID did not appear in existing_element_ids.
+ """
+ filtered_list: list = []
+ for element in new_elements:
+ if element.get('id') not in existing_element_ids:
+ try:
+ element['_time'] = timestamp_to_datestring(element.get(time_field_name) / 1000) # type: ignore[operator]
+
+ except TypeError as e:
+ # modeling rule will default on ingestion time if _time is missing
+ demisto.error(f'Could not parse _time field, for event {element}: {e}')
+
+ filtered_list.append(element)
+ return filtered_list
+
+
+def get_earliest_event_ids_with_the_same_time(events: list, time_field: str) -> list[str]:
+ """
+ Upon receiving a descending list of events, returns the ID(s) of the events that were the earliest.
+ Args:
+ events (list): DESCENDING list of alerts.
+ time_field (str): The field name in the event to use for comparison.
+ Return:
+ list[str]: list of element IDs that are the earliest.
+ """
+ earliest_event_fetched_ids = []
+ if events:
+ # Last event is necessarily the earliest as teh list is descending
+ earliest_event_fetched_timestamp = arg_to_number(events[-1].get(time_field))
+ for event in reversed(events):
+ # Once we found an event which i newer than the earliest event we know we have saved all the events that had the same
+ # timestamp.
+ if arg_to_number(event.get(time_field)) > earliest_event_fetched_timestamp: # type: ignore[operator]
+ break
+ # Audit logs don't have an ID, so we create it from the other fields
+ event_id = event.get('id') if event.get('id') else hash_fields_to_create_id(event)
+ earliest_event_fetched_ids.append(event_id)
+
+ return earliest_event_fetched_ids
+
+
+def hash_fields_to_create_id(event: dict) -> str:
+ """
+ create a hash value for a given event based on its properties. This is used to create an ID to the Audit Log events that has
+ no IDs.
+ Args:
+ event (dict): Audit Log event
+
+ Return:
+ str: hash created from the event properties
+ """
+ string = ''
+ for val in event.values():
+ string += str(val)
+ _id = hashlib.sha256(str.encode(string)).hexdigest()
+ return _id
+
+
+def fetch_events_per_type(client: Client, event_type: str, fetch_start_timestamp: int, fetch_end_timestamp: int) -> list[dict]:
+ """
+ Given the event type to pull and the relevant start and end time, call the relevant function to pull the given event type.
+ Args:
+ client (Client): the client to use for the API calls.
+ event_type (EventType): the event type we want to pull.
+ fetch_start_timestamp (int): The start time to pull events from.
+ fetch_end_timestamp (int): The end time to pull events to.
+
+ Return:
+ List: the list of pulled events (Audit Logs | Alerts)
+ """
+ demisto.debug(f'Pulling {event_type}s with {fetch_start_timestamp=}, {fetch_end_timestamp=}')
+ if event_type == EventType.alert:
+ event_pulling_function = client.get_alerts
+ data_field = 'alertsList'
+
+ elif event_type == EventType.audit_log:
+ event_pulling_function = client.get_audit_logs # type: ignore[assignment]
+ data_field = 'auditLogs'
+ else:
+ raise DemistoException(f'Event Type: {event_type} is not supported by the integration')
+
+ try:
+ res = event_pulling_function(fetch_start_timestamp, fetch_end_timestamp)
+ events = res.get(data_field, [])
+ if event_type == EventType.alert:
+ # The API returns alerts with no specific order in each response, but we prefer DESCENDING order since the API will
+ # always return the LATEST PAGE_SIZE alerts that matched the query and not the EARLIEST PAGE_SIZE alerts.
+ events.sort(key=lambda alert: alert.get(ALERT_TIME_FIELD), reverse=True)
+
+ except DemistoException as e:
+ if 'Unauthorized' in e.message:
+ raise DemistoException('Unauthorized - API key is invalid')
+ raise e
+
+ return events
+
+
+def fetch_events_loop(client: Client, event_type: str, cache: dict, max_fetch: int) -> tuple[list, dict, bool]:
+ """
+ This is the main loop to retrieve events, it is called twice - once for each event type (Audit Log, Alert).
+ For each API response we check for duplicates and add the '_time' field.
+ There are 3 different cases the code is able to handle:
+ # 1: API returns fewer events than page_size on the first request - no need for additional iteration, all we need is to
+ save last event timestamp with an increase of 1 micro sec to be used in the next fetch-events-execution.
+ # 2: There are more than page_size events but there are less than max_fetch events - In this case as the events are
+ returned in descending order in both APIs ,we first make sure to save the newest event timestamp to be used in the
+ next fetch-events-execution. Then we perform similar calls where each time we update the fetch_end_timestamp is set
+ the EARLIEST timestamp received in the previous response.
+ # 3: There are more than max_fetch events - This means that we will have to iterate backwards from the original end time
+ in multiple fetch executions. Thus, in addition to what we do in case 2 we also save in cache the earliest timestamp
+ we fetched to continue in the next execution.
+ """
+ demisto.debug(f'Starting fetch_events for event_type={event_type}s')
+ time_field_name = ALERT_TIME_FIELD if event_type == EventType.alert else AUDIT_LOGS_TIME_FIELD
+
+ ids_for_dedup = cache.get('ids_for_dedup', [])
+ fetch_start_timestamp = cache.get('next_start_timestamp') or \
+ int(arg_to_datetime('1 min').timestamp() * 1000000) # type: ignore[union-attr]
+ fetch_end_timestamp = cache.get('next_end_timestamp') or \
+ int(arg_to_datetime('Now').timestamp() * 1000000) # type: ignore[union-attr]
+
+ # The latest_event_fetched_timestamp acts like a pointer to the newest event we ever fetched.
+ latest_fetched_event_timestamp = cache.get('latest_event_fetched_timestamp')
+ aggregated_events: list = []
+ temp_events: list = []
+ while len(aggregated_events) < max_fetch:
+ temp_events = fetch_events_per_type(client, event_type, fetch_start_timestamp, fetch_end_timestamp)
+ demisto.debug(f'Number of events before de-duping {len(temp_events)}:\n{temp_events}')
+ deduped_events = adjust_and_dedup_elements(temp_events, ids_for_dedup, time_field_name)
+ demisto.debug(f'Number of events after de-duping {len(deduped_events)}:{deduped_events}')
+ if not deduped_events:
+ break
+ aggregated_events.extend(deduped_events)
+ # The fetch_end_timestamp variable is used as the next end timestamp and acts like an index marking the earliest alert
+ # pulled so far (alerts are returned in descending order) in cases where we need to perform several calls in the same
+ # round. In cases where we have not reached the user limit we will pass it on to the next fetch-events execution.
+ fetch_end_timestamp = deduped_events[-1].get(time_field_name)
+ ids_for_dedup = get_earliest_event_ids_with_the_same_time(deduped_events, time_field_name)
+ demisto.debug(f'Saved {len(ids_for_dedup)} alert IDs for de-duplication in next iteration')
+ # This means we know there are no more events to pull using the current fetch_start_timestamp and fetch_end_timestamp.
+ if len(temp_events) < PAGE_SIZE:
+ demisto.debug(f'Received {len(temp_events)} events, which is less than {PAGE_SIZE=}')
+ break
+
+ # We only update latest_fetched_event_timestamp if it is empty, o/w it means we are still fetch past events.
+ if not latest_fetched_event_timestamp:
+ latest_fetched_event_timestamp = aggregated_events[0].get(time_field_name) + 1 if aggregated_events else \
+ fetch_end_timestamp # type: ignore[operator]
+ demisto.debug(f'latest_fetched_event_timestamp is empty, setting it to \'{latest_fetched_event_timestamp}\'')
+
+ in_progress_pagination: bool = False
+ # In case the last events list has less than PAGE_SIZE events we know there are no more events to pull using the current
+ # fetch_start_timestamp and fetch_end_timestamp, so next round is basically a new search. In that case we will:
+ # 1. Update the next_start_timestamp to latest_event_fetched_timestamp
+ # 2. Update ids_for_dedup with the latest_fetched_events_ids
+ # 3. clear the next_end_timestamp, latest_fetched_event_timestamp and latest_fetched_events_ids
+ if len(temp_events) < PAGE_SIZE:
+ next_start_timestamp = latest_fetched_event_timestamp
+ next_end_timestamp = ''
+ latest_fetched_event_timestamp = ''
+ ids_for_dedup = []
+ demisto.debug(f'Last events list has {len(temp_events)} events, which is less than {PAGE_SIZE=}, setting '
+ f'{next_start_timestamp=}')
+
+ # If we exited the loop and the last list of pulled events is not smaller than PAGE_SIZE (the first if condition)
+ # it must mean that len(events) == PAGE_SIZE, and that we have reached the user limit, implying we are missing more events to
+ # pull from the original window. In that case we will keep the next_start_timestamp as is (fetch_start_timestamp) and will
+ # update the next_end_timestamp to fetch_end_timestamp (the oldest alert fetched). We will also save the list of
+ # ids_for_dedup to be used in the next round
+ else:
+ in_progress_pagination = True
+ next_start_timestamp = fetch_start_timestamp
+ next_end_timestamp = fetch_end_timestamp # type: ignore[assignment]
+ demisto.debug(f'Last events list has {len(temp_events)} events. The aggregated events list has {len(aggregated_events)} '
+ f'events which should equal to {max_fetch=}. This means we are missing more events.')
+
+ new_cache = {
+ 'ids_for_dedup': ids_for_dedup,
+ 'next_start_timestamp': next_start_timestamp,
+ 'next_end_timestamp': next_end_timestamp,
+ 'latest_event_fetched_timestamp': latest_fetched_event_timestamp,
+ }
+
+ demisto.debug(f'Returning {len(aggregated_events)=} events, and a new {new_cache=}')
+ return aggregated_events, new_cache, in_progress_pagination
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def test_module_command(client, max_fetch):
+ fetch_events_command(client, {}, max_fetch)
+ return 'ok'
+
+
+def fetch_events_command(client: Client, last_run: dict, max_fetch: int):
+ audit_logs, audit_cache, in_progress_pagination_audit_log = fetch_events_loop(client, EventType.audit_log,
+ last_run.get('audit_cache', {}), max_fetch)
+ last_run['audit_cache'] = audit_cache
+ alerts, alerts_cache, in_progress_pagination_alert = fetch_events_loop(client, EventType.alert,
+ last_run.get('alert_cache', {}), max_fetch)
+ last_run['alert_cache'] = alerts_cache
+ if in_progress_pagination_audit_log or in_progress_pagination_alert:
+ last_run["nextTrigger"] = '0'
+
+ return alerts + audit_logs, last_run
+
+
+def get_events_command(client: Client, args: dict):
+ start_time = int(arg_to_datetime(args.get('start_time')).timestamp() * 1000000) # type: ignore[union-attr]
+ end_time = int(arg_to_datetime(args.get('end_time'), 'now').timestamp() * 1000000) # type: ignore[union-attr]
+ raw_audit_logs = client.get_audit_logs(start_time, end_time)
+ raw_alerts = client.get_alerts(start_time, end_time)
+ events = raw_audit_logs.get('auditLogs', []) + raw_alerts.get('alertsList', [])
+ if argToBoolean(args.get('should_push_events')):
+ send_events_to_xsiam(events=events, vendor='cohesity', product='helios')
+ return CommandResults(readable_output=tableToMarkdown('Events returned from Cohesity Helios', t=events),
+ raw_response=raw_audit_logs.get('auditLogs', []) + raw_alerts.get('alertsList', []))
+
+
+def main() -> None:
+ """main function, parses params and runs command functions
+
+ """
+
+ params = demisto.params()
+ # Get API key for authentication.
+ api_key = params.get('api_key', {}).get('password')
+
+ # Get helios service API url.
+ base_url = urljoin(params.get('url'), API_VERSION)
+ max_fetch: int = min(arg_to_number(params.get('max_fetch', MAX_EVENTS_PER_TYPE)),
+ MAX_EVENTS_PER_TYPE) # type: ignore[assignment, type-var]
+ verify_certificate = not params.get('insecure', False)
+ proxy = params.get('proxy', False)
+
+ command = demisto.command()
+ demisto.debug(f'Command being called is {command}')
+ try:
+ # Prepare client and set authentication headers.
+ headers: dict = {
+ 'apikey': api_key,
+ }
+ client = Client(
+ base_url=base_url,
+ verify=verify_certificate,
+ headers=headers,
+ proxy=proxy)
+
+ if command == 'test-module':
+ # This is the call made when pressing the integration Test button.
+ result = test_module_command(client, max_fetch)
+ return_results(result)
+ if command == 'cohesity-helios-get-events':
+ args = demisto.args()
+ return_results(get_events_command(client, args))
+
+ elif command == 'fetch-events':
+ last_run = demisto.getLastRun()
+ demisto.debug(f'{last_run=}')
+ events, new_last_run = fetch_events_command(client, last_run, max_fetch)
+ send_events_to_xsiam(events=events, vendor='cohesity', product='helios')
+ demisto.setLastRun(new_last_run)
+ demisto.debug(f'{new_last_run=}')
+
+ # Log exceptions and return errors
+ except Exception as e:
+ return_error(f'Failed to execute {command} command.\nError:\n{str(e)}')
+
+
+''' ENTRY POINT '''
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector.yml b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector.yml
new file mode 100644
index 000000000000..3be204b695d0
--- /dev/null
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector.yml
@@ -0,0 +1,89 @@
+category: Analytics & SIEM
+sectionOrder:
+- Connect
+- Collect
+commonfields:
+ id: Cohesity Helios Event Collector
+ version: -1
+configuration:
+- defaultvalue: https://helios.cohesity.com
+ display: Server URL (e.g. https://helios.cohesity.com)
+ name: url
+ required: true
+ type: 0
+ section: Connect
+- displaypassword: API Key
+ additionalinfo: The API Key to use for connection
+ name: api_key
+ required: false
+ section: Connect
+ hiddenusername: true
+ type: 9
+- display: The maximum number of events per type. Default is 50000.
+ additionalinfo: The collector pulls both Audit Logs and Alerts. This parameter sets the the maximum fetch number limit for each type.
+ name: max_fetch
+ required: false
+ advanced: false
+ section: Collect
+ defaultvalue: 50000
+ type: 0
+- display: Trust any certificate (not secure)
+ name: insecure
+ advanced: true
+ required: false
+ type: 8
+ section: Connect
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ advanced: true
+ type: 8
+ section: Connect
+description: This is the Cohesity Helios Event Collector integration for XSIAM.
+display: Cohesity Helios Event Collector
+name: Cohesity Helios Event Collector
+supportlevelheader: xsoar
+marketplaces:
+- marketplacev2
+fromversion: '8.2.0'
+script:
+ commands:
+ - arguments:
+ - auto: PREDEFINED
+ defaultValue: 'false'
+ description: If true, the command will create events, otherwise it only displays them.
+ isArray: false
+ name: should_push_events
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ - default: false
+ description: Maximum results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ name: start_time
+ description: Specifies the start time of the alerts to be returned.
+ isArray: false
+ required: true
+ - default: false
+ name: end_time
+ description: Specifies the end time of the alerts to be returned.
+ isArray: false
+ required: true
+ defaultValue: 'Now'
+ deprecated: false
+ description: Gets events from Cohesity Helios.
+ execution: false
+ name: cohesity-helios-get-events
+ dockerimage: demisto/python3:3.10.13.89009
+ isfetchevents: true
+ runonce: false
+ script: '-'
+ subtype: python3
+ type: python
+tests:
+- No tests (auto formatted)
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_description.md b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_description.md
new file mode 100644
index 000000000000..2b64f401cd65
--- /dev/null
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_description.md
@@ -0,0 +1,11 @@
+## Cohesity Helios Help
+
+### Minimum Permission for Helios API user to generate APIKey
+
+To run the workflow, you need to pass the Helios API Key. The user that creates this APIKey must have the following privileges.
+
+* *Viewer Role*: This role enables the user to log in to Cohesity Helios and [create the APIKey.](https://developer.cohesity.com/docs/helios-getting-started)
+
+* *Manage Protection Groups and Manage Recovery*: This enables the user to get a clean snapshot and recover the VM to the latest known safe state.
+
+To know more about Cohesity Roles, go to [Cohesity Product Documentation](https://docs.cohesity.com/6_5_1/Web/UserGuide/Content/Dashboard/Admin/RoleManage.htm?tocpath=Administration%7CAccess%20Management%7CRoles%7C_____0#ManageRoles).
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_image.png b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_image.png
new file mode 100644
index 000000000000..883935fd916b
Binary files /dev/null and b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_image.png differ
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_test.py b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_test.py
new file mode 100644
index 000000000000..918c09c743ae
--- /dev/null
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/CohesityHeliosEventCollector_test.py
@@ -0,0 +1,321 @@
+import json
+
+import pytest
+
+from CommonServerPython import arg_to_datetime
+
+
+def test_dedup_elements():
+ from CohesityHeliosEventCollector import adjust_and_dedup_elements, ALERT_TIME_FIELD, AUDIT_LOGS_TIME_FIELD
+ """
+ Case 1:
+ Given a list of 3 elements where all IDs appear in the ID list.
+ We expect the result list to have no elements at all and that the final list length was not changed.
+ """
+ new_elements = [{'id': '1', 'latestTimestampUsecs': 1704096000000000},
+ {'id': '2', 'latestTimestampUsecs': 1704182400000000},
+ {'id': '3', 'latestTimestampUsecs': 1704268800000000}]
+ existing_element_ids = ['1', '2', '3']
+ deduped_elements = adjust_and_dedup_elements(new_elements=new_elements, existing_element_ids=existing_element_ids,
+ time_field_name='')
+ assert deduped_elements == []
+ assert len(new_elements) == 3
+
+ """
+ Case 2:
+ Given a list of 2 elements where all elements appear in the existing ID list
+ We expect the result list to have no elements at all and that the final list length was not changed.
+ """
+ new_elements = [{'id': '2', 'latestTimestampUsecs': 1704182400000000},
+ {'id': '3', 'latestTimestampUsecs': 1704268800000000}]
+ existing_element_ids = ['1', '2', '3']
+ deduped_elements = adjust_and_dedup_elements(new_elements=new_elements, existing_element_ids=existing_element_ids,
+ time_field_name='')
+ assert deduped_elements == []
+ assert len(new_elements) == 2
+
+ """
+ Case 3:
+ Given a list of 3 elements where the first element appear in the existing ID list.
+ We expect the result list to have the other two elements and that the final list length was not changed.
+ """
+ new_elements = [{'id': '1', 'latestTimestampUsecs': 1704096000000000},
+ {'id': '2', 'latestTimestampUsecs': 1704182400000000},
+ {'id': '3', 'latestTimestampUsecs': 1704268800000000}]
+ existing_element_ids = ['1']
+ deduped_elements = adjust_and_dedup_elements(new_elements=new_elements, existing_element_ids=existing_element_ids,
+ time_field_name=ALERT_TIME_FIELD)
+ assert deduped_elements == [{'id': '2', 'latestTimestampUsecs': 1704182400000000, '_time': '2024-01-02T08:00:00.000Z'},
+ {'id': '3', 'latestTimestampUsecs': 1704268800000000, '_time': '2024-01-03T08:00:00.000Z'}]
+ assert len(new_elements) == 3
+
+ """
+ Case 4:
+ Given a list of 3 elements while the existing ID list is empty.
+ We expect the result list to have all elements and that the final list length was not changed.
+ """
+ new_elements = [{'id': '1', 'timestampUsecs': 1704096000000000}, {'id': '2', 'timestampUsecs': 1704182400000000},
+ {'id': '3', 'timestampUsecs': 1704268800000000}]
+ existing_element_ids = []
+ deduped_elements = adjust_and_dedup_elements(new_elements=new_elements, existing_element_ids=existing_element_ids,
+ time_field_name=AUDIT_LOGS_TIME_FIELD)
+ assert deduped_elements == [{'id': '1', 'timestampUsecs': 1704096000000000, '_time': '2024-01-01T08:00:00.000Z'},
+ {'id': '2', 'timestampUsecs': 1704182400000000, '_time': '2024-01-02T08:00:00.000Z'},
+ {'id': '3', 'timestampUsecs': 1704268800000000, '_time': '2024-01-03T08:00:00.000Z'}]
+ assert len(new_elements) == 3
+
+ """
+ Case 5:
+ Given an empty list elements.
+ We expect the result list to have no elements at all and that the final list length was not changed.
+ """
+ new_elements = []
+ existing_element_ids = ['1', '2', '3']
+ deduped_elements = adjust_and_dedup_elements(new_elements=new_elements, existing_element_ids=existing_element_ids,
+ time_field_name='')
+ assert deduped_elements == []
+ assert len(new_elements) == 0
+
+
+def test_get_earliest_event_ids_with_the_same_time():
+ from CohesityHeliosEventCollector import get_earliest_event_ids_with_the_same_time, ALERT_TIME_FIELD, AUDIT_LOGS_TIME_FIELD
+
+ time_field = ALERT_TIME_FIELD
+ """
+ Case 1:
+ Given a list of Alert events where there is only one event that has the earliest timestamp
+ Ensure only the ID of the earliest Alert is returned
+ """
+ events = [
+ {'latestTimestampUsecs': '3', 'id': 'c'},
+ {'latestTimestampUsecs': '2', 'id': 'b'},
+ {'latestTimestampUsecs': '1', 'id': 'a'}
+ ]
+ earliest_event_fetched_ids = get_earliest_event_ids_with_the_same_time(events=events, time_field=time_field)
+ assert earliest_event_fetched_ids == ['a']
+
+ """
+ Case 2:
+ Given a list of Alert events where there are two "earliest" events
+ Ensure the ID of the TWO earliest Alerts is returned
+ """
+ events = [
+ {'latestTimestampUsecs': '3', 'id': 'd'},
+ {'latestTimestampUsecs': '2', 'id': 'c'},
+ {'latestTimestampUsecs': '1', 'id': 'b'},
+ {'latestTimestampUsecs': '1', 'id': 'a'}
+ ]
+ earliest_event_fetched_ids = get_earliest_event_ids_with_the_same_time(events=events, time_field=time_field)
+ assert earliest_event_fetched_ids == ['a', 'b']
+
+ time_field = AUDIT_LOGS_TIME_FIELD
+ """
+ Case 3:
+ Given a list of Audit Log events where there is only one event that has the earliest timestamp
+ Ensure only the ID of the earliest event is returned
+ """
+ events = [
+ {'timestampUsecs': '3', 'id': 'c'},
+ {'timestampUsecs': '2', 'id': 'b'},
+ {'timestampUsecs': '1', 'id': 'a'}
+ ]
+ earliest_event_fetched_ids = get_earliest_event_ids_with_the_same_time(events=events, time_field=time_field)
+ assert earliest_event_fetched_ids == ['a']
+
+ """
+ Case 4:
+ Given a list of Audit Log events where there are two "earliest" events
+ Ensure the ID of the TWO earliest Audit logs is returned
+ """
+ events = [
+ {'timestampUsecs': '3', 'id': 'd'},
+ {'timestampUsecs': '2', 'id': 'c'},
+ {'timestampUsecs': '1', 'id': 'b'},
+ {'timestampUsecs': '1', 'id': 'a'}
+ ]
+ earliest_event_fetched_ids = get_earliest_event_ids_with_the_same_time(events=events, time_field=time_field)
+ assert earliest_event_fetched_ids == ['a', 'b']
+
+
+def test_hash_fields_to_create_id():
+ """
+ Given dummy audit log event with the relevant fields
+ Ensure the id is created correctly
+ """
+ from CohesityHeliosEventCollector import hash_fields_to_create_id
+ event = {
+ 'details': 'dummy_details',
+ 'username': 'dummy_username',
+ 'domain': 'dummy_domain',
+ 'sourceType': 'dummy_sourceType',
+ 'entityName': 'dummy_entityName',
+ 'entityType': 'dummy_entityType',
+ 'action': 'dummy_action',
+ 'timestampUsecs': 'dummy_timestampUsecs',
+ 'ip': 'dummy_ip',
+ 'isImpersonation': 'dummy_isImpersonation',
+ 'tenantId': 'dummy_tenantId',
+ 'originalTenantId': 'dummy_originalTenantId',
+ 'serviceContext': 'dummy_serviceContext'
+ }
+ _id = hash_fields_to_create_id(event)
+ assert _id == '8bb89cb674035796b755e9e1db5022dc750e904f520eb290d18e134b12656bf2'
+
+
+class TestFetchEventsCommand:
+ """
+ Class to test the different Fetch events flow.
+ Fetch events has test 3 case:
+ 1: There are fewer events than page_size on first request
+ 2: There are more than page_size events but there are less than max_fetch events
+ 3: There are more than max_fetch events
+ """
+ base_url = 'https://test.com'
+ audit_logs_endpoint = 'mcm/audit-logs'
+ alerts_endpoint = 'mcm/alerts'
+ mock_time = '2024-01-01 10:00:00'
+ mock_fixed_time_unix = int(arg_to_datetime(mock_time).timestamp() * 1000000)
+
+ @staticmethod
+ def load_response(event_type) -> dict:
+ from CohesityHeliosEventCollector import EventType
+ filename = 'test_data/CohesityHeliosEventCollector-AuditLogList.json' if event_type == EventType.audit_log else \
+ 'test_data/CohesityHeliosEventCollector-AlertList.json'
+ with open(filename) as f:
+ return json.loads(f.read())
+
+ @pytest.fixture()
+ def audit_logs_mock_res(self):
+ import CohesityHeliosEventCollector
+ return self.load_response(CohesityHeliosEventCollector.EventType.audit_log)
+
+ @pytest.fixture()
+ def alerts_mock_res(self):
+ import CohesityHeliosEventCollector
+ return self.load_response(CohesityHeliosEventCollector.EventType.alert)
+
+ def test_fetch_events_command_case_1(self, requests_mock, mocker, audit_logs_mock_res, alerts_mock_res):
+ """
+ Case 1 is when where are fewer events (4) than page_size (10,000) on the first request.
+
+ We expect:
+ - Each event type API call to be called once
+ - To have only 4 events returned
+ - Audit logs next start time for the next fetch to be set to the latest pulled event timestamp plus 1
+ (170691857331523)
+ - No list of ids_for_dedup and no latest_event_fetched_timestamp for audit logs
+ - Alerts next start time for the next fetch to be set to the latest pulled event timestamp plus 1 (1708175775539274)
+ - No list of ids_for_dedup and no latest_event_fetched_timestamp for alerts
+ """
+ from CohesityHeliosEventCollector import Client, fetch_events_command
+
+ # mockers
+ mocker.patch("CohesityHeliosEventCollector.arg_to_datetime", return_value=arg_to_datetime(self.mock_time))
+ audit_logs_call = requests_mock.get(f'{self.base_url}/{self.audit_logs_endpoint}', json=audit_logs_mock_res[0])
+ alerts_call = requests_mock.get(f'{self.base_url}/{self.alerts_endpoint}', json=alerts_mock_res[0])
+
+ client = Client(base_url=self.base_url)
+ events, last_run = fetch_events_command(client=client, last_run={}, max_fetch=1000)
+
+ assert audit_logs_call.call_count == alerts_call.call_count == 1
+ assert len(events) == 4
+ assert last_run['audit_cache']['next_start_timestamp'] == 170691857331523
+ assert not last_run['audit_cache']['ids_for_dedup']
+ assert not last_run['audit_cache']['latest_event_fetched_timestamp']
+ assert last_run['alert_cache']['next_start_timestamp'] == 1708175775539274
+ assert not last_run['alert_cache']['ids_for_dedup']
+ assert not last_run['audit_cache']['latest_event_fetched_timestamp']
+
+ def test_fetch_events_command_case_2(self, requests_mock, mocker, audit_logs_mock_res, alerts_mock_res):
+ """
+ Case 2 is when there are more events (3) from each type than the page_size (2), but there are not more than max_fetch
+ (1000).
+
+ We expect:
+ - Each event type API call to be called twice
+ - That the endtimeusecs in the 2dn API call for audit logs will be the same as the time of the earliest event fetched
+ timestamp
+ - That the enddateusecs in the 2dn API call for alerts will be the same as the time of the earliest event fetched
+ timestamp
+ - To have 6 events returned
+ - Audit logs next start time for the next fetch to be set to the latest pulled event timestamp plus 1
+ (170691857331523)
+ - No list of ids_for_dedup and no latest_event_fetched_timestamp for audit logs
+ - Alerts next start time for the next fetch to be set to the latest pulled event timestamp plus 1 (1708175775539274)
+ - No list of ids_for_dedup and no latest_event_fetched_timestamp for alerts
+ """
+ import CohesityHeliosEventCollector
+ from CohesityHeliosEventCollector import Client, fetch_events_command
+
+ # mockers
+ mocker.patch.object(CohesityHeliosEventCollector, 'PAGE_SIZE', 2)
+ mocker.patch("CohesityHeliosEventCollector.arg_to_datetime", return_value=arg_to_datetime(self.mock_time))
+ audit_logs_call = requests_mock.get(f'{self.base_url}/{self.audit_logs_endpoint}',
+ [{'json': audit_logs_mock_res[0]}, {'json': audit_logs_mock_res[2]}])
+ alerts_call = requests_mock.get(f'{self.base_url}/{self.alerts_endpoint}',
+ [{'json': alerts_mock_res[0]}, {'json': alerts_mock_res[2]}])
+
+ audit_logs_expected_end_time = audit_logs_mock_res[0].get('auditLogs')[1].get('timestampUsecs')
+ alerts_expected_end_time = alerts_mock_res[0].get('alertsList')[1].get('latestTimestampUsecs')
+ client = Client(base_url=self.base_url)
+ events, last_run = fetch_events_command(client=client, last_run={}, max_fetch=1000)
+
+ assert audit_logs_call.call_count == alerts_call.call_count == 2
+ assert audit_logs_call.request_history[1].qs['endtimeusecs'][0] == str(audit_logs_expected_end_time)
+ assert alerts_call.request_history[1].qs['enddateusecs'][0] == str(alerts_expected_end_time)
+ assert len(events) == 6
+ assert last_run['audit_cache']['next_start_timestamp'] == 170691857331523
+ assert not last_run['audit_cache']['ids_for_dedup']
+ assert not last_run['audit_cache']['latest_event_fetched_timestamp']
+ assert last_run['alert_cache']['next_start_timestamp'] == 1708175775539274
+ assert not last_run['alert_cache']['ids_for_dedup']
+ assert not last_run['alert_cache']['latest_event_fetched_timestamp']
+
+ def test_fetch_events_command_case_3(self, requests_mock, mocker, audit_logs_mock_res, alerts_mock_res):
+ """
+ Case 3 is when there are more events than max_fetch events.
+
+ We expect:
+ - Each event type API call to be called twice
+ - That the endtimeusecs in the 2dn API call for audit logs will be the same as the time of the earliest event fetched
+ timestamp
+ - That the enddateusecs in the 2dn API call for alerts will be the same as the time of the earliest event fetched
+ timestamp
+ - To have 8 events returned
+ - Audit logs next start time for the next fetch to be set to the same initial start time
+ - ids_for_dedup in the audit_log cache has the ID of the earliest audit log event
+ - latest_event_fetched_timestamp in the audit_log cache holds the latest audit log event timestamp plus 1 sec
+ - Alerts next start time for the next fetch to be set to the same initial start time
+ - No list of ids_for_dedup and no latest_event_fetched_timestamp for alerts
+ - ids_for_dedup in the alerts cache has the ID of the earliest alert event
+ - latest_event_fetched_timestamp in the alerts cache holds the latest alert event timestamp plus 1 sec
+ """
+ import CohesityHeliosEventCollector
+ from CohesityHeliosEventCollector import Client, fetch_events_command
+
+ # mockers
+ mocker.patch.object(CohesityHeliosEventCollector, 'PAGE_SIZE', 2)
+ mocker.patch("CohesityHeliosEventCollector.arg_to_datetime", return_value=arg_to_datetime(self.mock_time))
+ audit_logs_call = requests_mock.get(f'{self.base_url}/{self.audit_logs_endpoint}',
+ [{'json': audit_logs_mock_res[0]},
+ {'json': audit_logs_mock_res[1]}])
+ alerts_call = requests_mock.get(f'{self.base_url}/{self.alerts_endpoint}',
+ [{'json': alerts_mock_res[0]},
+ {'json': alerts_mock_res[1]}])
+
+ audit_logs_expected_end_time = audit_logs_mock_res[0].get('auditLogs')[1].get('timestampUsecs')
+ alerts_expected_end_time = alerts_mock_res[0].get('alertsList')[1].get('latestTimestampUsecs')
+
+ client = Client(base_url=self.base_url)
+ events, last_run = fetch_events_command(client=client, last_run={}, max_fetch=3)
+
+ assert audit_logs_call.call_count == alerts_call.call_count == 2
+ assert audit_logs_call.request_history[1].qs['endtimeusecs'][0] == str(audit_logs_expected_end_time)
+ assert alerts_call.request_history[1].qs['enddateusecs'][0] == str(alerts_expected_end_time)
+ assert len(events) == 8
+ assert last_run['audit_cache']['next_start_timestamp'] == self.mock_fixed_time_unix
+ assert last_run['audit_cache']['ids_for_dedup'] == ['94f359b611f0272505c36002ed4dbcaff9496d0f16460287f1ed05af0f7257a1']
+ assert last_run['audit_cache']['latest_event_fetched_timestamp'] == 170691857331523
+ assert last_run['alert_cache']['next_start_timestamp'] == self.mock_fixed_time_unix
+ assert last_run['alert_cache']['ids_for_dedup'] == ['66770']
+ assert last_run['alert_cache']['latest_event_fetched_timestamp'] == 1708175775539274
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/README.md b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/README.md
new file mode 100644
index 000000000000..5b92340a0dbc
--- /dev/null
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/README.md
@@ -0,0 +1,44 @@
+This is the Cohesity Helios Event Collector integration for XSIAM.
+
+## Configure Cohesity Helios Event Collector on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Cohesity Helios Event Collector.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Server URL (e.g. https://helios.cohesity.com) | | True |
+ | API Key | The API Key to use for connection | False |
+ | The maximum number of events per type. Default is 50000. | The collector pulls both Audit Logs and Alerts. This parameter sets the the maximum fetch number limit for each type. | False |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### cohesity-helios-get-events
+
+***
+Gets events from Cohesity Helios.
+
+#### Base Command
+
+`cohesity-helios-get-events`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| should_push_events | If true, the command will create events, otherwise it only displays them. Possible values are: true, false. Default is false. | Required |
+| limit | Maximum results to return. | Optional |
+| start_time | Specifies the start time of the alerts to be returned. | Required |
+| end_time | Specifies the end time of the alerts to be returned. Default is Now. | Required |
+
+#### Context Output
+
+There is no context output for this command.
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/command_examples b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/command_examples
new file mode 100644
index 000000000000..b1c4ddbbbe03
--- /dev/null
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/command_examples
@@ -0,0 +1,3 @@
+cohesity-helios-get-events start_time=1706191757331522 end_time=1709108194331522
+fetch-incidents
+tst-module
\ No newline at end of file
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/test_data/CohesityHeliosEventCollector-AlertList.json b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/test_data/CohesityHeliosEventCollector-AlertList.json
new file mode 100644
index 000000000000..e67d323a6f85
--- /dev/null
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/test_data/CohesityHeliosEventCollector-AlertList.json
@@ -0,0 +1,258 @@
+[
+ {
+ "alertsList": [
+ {
+ "id": "308054",
+ "alertCode": "CE00411001",
+ "firstTimestampUsecs": 1708175775539273,
+ "latestTimestampUsecs": 1708175775539273,
+ "alertType": 11001,
+ "dedupTimestamps": [
+ 1708175775539273
+ ],
+ "dedupCount": 1,
+ "clusterName": "cohesity-cluster-2",
+ "clusterId": 878948577151123,
+ "alertTypeBucket": "kSoftware",
+ "alertCategory": "kNodeHealth",
+ "severity": "kCritical",
+ "alertState": "kOpen",
+ "alertDocument": {
+ "alertName": "FrequentProcessRestarts",
+ "alertDescription": "Process bifrost_broker_exec is restarting frequently",
+ "alertCause": "process bifrost_broker_exec is restarting frequently on the cluster.",
+ "alertHelpText": "Please refer to KB for details/resolution."
+ },
+ "vaults": null,
+ "propertyList": [
+ {
+ "key": "num_restarts",
+ "value": "22"
+ },
+ {
+ "key": "process_name",
+ "value": "bifrost_broker_exec"
+ },
+ {
+ "key": "reason_string",
+ "value": "process bifrost_broker_exec is restarting frequently on the cluster."
+ },
+ {
+ "key": "cluster_name",
+ "value": "cohesity-cluster-2"
+ },
+ {
+ "key": "duration_seconds",
+ "value": "3600"
+ },
+ {
+ "key": "node_id",
+ "value": "3812890615711891"
+ }
+ ]
+ },
+ {
+ "id": "306054",
+ "alertCode": "CE00411001",
+ "firstTimestampUsecs": 1707996205135856,
+ "latestTimestampUsecs": 1707996205135856,
+ "alertType": 11001,
+ "dedupTimestamps": [
+ 1707996205135856
+ ],
+ "dedupCount": 1,
+ "clusterName": "cohesity-cluster-2",
+ "clusterId": 878948577151123,
+ "alertTypeBucket": "kSoftware",
+ "alertCategory": "kNodeHealth",
+ "severity": "kCritical",
+ "alertState": "kOpen",
+ "alertDocument": {
+ "alertName": "FrequentProcessRestarts",
+ "alertDescription": "Process bifrost_broker_exec is restarting frequently",
+ "alertCause": "process bifrost_broker_exec is restarting frequently on the cluster.",
+ "alertHelpText": "Please refer to KB for details/resolution."
+ },
+ "vaults": null,
+ "propertyList": [
+ {
+ "key": "num_restarts",
+ "value": "22"
+ },
+ {
+ "key": "process_name",
+ "value": "bifrost_broker_exec"
+ },
+ {
+ "key": "reason_string",
+ "value": "process bifrost_broker_exec is restarting frequently on the cluster."
+ },
+ {
+ "key": "cluster_name",
+ "value": "cohesity-cluster-2"
+ },
+ {
+ "key": "duration_seconds",
+ "value": "3600"
+ },
+ {
+ "key": "node_id",
+ "value": "3812890615711891"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "alertsList": [
+ {
+ "id": "66777",
+ "alertCode": "CE00411001",
+ "firstTimestampUsecs": 1707986205135856,
+ "latestTimestampUsecs": 1707986205135856,
+ "alertType": 11001,
+ "dedupTimestamps": [
+ 1708072432714915,
+ 1708071663181477
+ ],
+ "dedupCount": 53,
+ "clusterName": "cohesity-cluster-1",
+ "clusterId": 3543552943385798,
+ "alertTypeBucket": "kSoftware",
+ "alertCategory": "kNodeHealth",
+ "severity": "kCritical",
+ "alertState": "kOpen",
+ "alertDocument": {
+ "alertName": "FrequentProcessRestarts",
+ "alertDescription": "Process storage_proxy_exec has restarted 36 times on node 3810370756996294 in last 3600 seconds.",
+ "alertCause": "process storage_proxy_exec is restarting frequently on the cluster.",
+ "alertHelpText": "Please refer to KB for details/resolution."
+ },
+ "vaults": null,
+ "propertyList": [
+ {
+ "key": "node_id",
+ "value": "3810370756996294"
+ },
+ {
+ "key": "num_restarts",
+ "value": "36"
+ },
+ {
+ "key": "process_name",
+ "value": "storage_proxy_exec"
+ },
+ {
+ "key": "reason_string",
+ "value": "process storage_proxy_exec is restarting frequently on the cluster."
+ },
+ {
+ "key": "cluster_name",
+ "value": "cohesity-cluster-1"
+ },
+ {
+ "key": "duration_seconds",
+ "value": "3600"
+ }
+ ]
+ },
+ {
+ "id": "66770",
+ "alertCode": "CE00411001",
+ "firstTimestampUsecs": 1707984205135856,
+ "latestTimestampUsecs": 1707984205135856,
+ "alertType": 11001,
+ "dedupTimestamps": [
+ 1708072432714915,
+ 1708071663181477,
+ 1708071438750893
+ ],
+ "dedupCount": 53,
+ "clusterName": "cohesity-cluster-1",
+ "clusterId": 3543552943385798,
+ "alertTypeBucket": "kSoftware",
+ "alertCategory": "kNodeHealth",
+ "severity": "kCritical",
+ "alertState": "kOpen",
+ "alertDocument": {
+ "alertName": "FrequentProcessRestarts",
+ "alertDescription": "Process storage_proxy_exec has restarted 36 times on node 3810370756996294 in last 3600 seconds.",
+ "alertCause": "process storage_proxy_exec is restarting frequently on the cluster.",
+ "alertHelpText": "Please refer to KB for details/resolution."
+ },
+ "vaults": null,
+ "propertyList": [
+ {
+ "key": "node_id",
+ "value": "3810370756996294"
+ },
+ {
+ "key": "num_restarts",
+ "value": "36"
+ },
+ {
+ "key": "process_name",
+ "value": "storage_proxy_exec"
+ },
+ {
+ "key": "reason_string",
+ "value": "process storage_proxy_exec is restarting frequently on the cluster."
+ },
+ {
+ "key": "cluster_name",
+ "value": "cohesity-cluster-1"
+ },
+ {
+ "key": "duration_seconds",
+ "value": "3600"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "alertsList": [
+ {
+ "id": "158138",
+ "alertCode": "CE00301008",
+ "firstTimestampUsecs": 1707814673947198,
+ "latestTimestampUsecs": 1707814673947198,
+ "alertType": 1008,
+ "dedupTimestamps": [
+ ],
+ "dedupCount": 2405,
+ "clusterName": "cohesity-cluster-2",
+ "clusterId": 878948577151123,
+ "alertTypeBucket": "kHardware",
+ "alertCategory": "kCluster",
+ "severity": "kWarning",
+ "alertState": "kOpen",
+ "alertDocument": {
+ "alertName": "TimeService",
+ "alertDescription": "Node 3812890615711891 with ip 1.1.1.1 cannot sync time.",
+ "alertCause": "The difference between the node's time and the server 1.1.1.1's time is 35 seconds. Resyncing failed 3 times.A new NTP server might be required.",
+ "alertHelpText": "Verify NTP server."
+ },
+ "vaults": null,
+ "propertyList": [
+ {
+ "key": "node_id",
+ "value": "3812890615711891"
+ },
+ {
+ "key": "node_ip",
+ "value": "1.1.1.1"
+ },
+ {
+ "key": "reason_string",
+ "value": "The difference between the node's time and the server 1.1.1.1's time is 35 seconds. Resyncing failed 3 times.A new NTP server might be required."
+ },
+ {
+ "key": "cluster_name",
+ "value": "cohesity-cluster-2"
+ }
+ ]
+ }
+ ]
+ }
+]
\ No newline at end of file
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/test_data/CohesityHeliosEventCollector-AuditLogList.json b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/test_data/CohesityHeliosEventCollector-AuditLogList.json
new file mode 100644
index 000000000000..7e55e5853382
--- /dev/null
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/test_data/CohesityHeliosEventCollector-AuditLogList.json
@@ -0,0 +1,94 @@
+[
+ {
+ "auditLogs": [
+ {
+ "details": "test@test.com@helios logged in from \"1.2.3.4\" with role \"Super Admin\".",
+ "username": "test@test.com",
+ "domain": "helios",
+ "sourceType": "helios",
+ "entityName": "test@test.com",
+ "entityType": "User",
+ "action": "Login",
+ "timestampUsecs": 170691857331522,
+ "ip": "1.2.3.4",
+ "isImpersonation": false,
+ "tenantId": "1234/",
+ "originalTenantId": "",
+ "serviceContext": "Mcm"
+ },
+ {
+ "details": "test@test.com@helios logged in from \"1.2.3.4\" with role \"Super Admin\".",
+ "username": "test@test.com",
+ "domain": "helios",
+ "sourceType": "helios",
+ "entityName": "test@test.com",
+ "entityType": "User",
+ "action": "Login",
+ "timestampUsecs": 1706191757331522,
+ "ip": "1.2.3.4",
+ "isImpersonation": false,
+ "tenantId": "1234/",
+ "originalTenantId": "",
+ "serviceContext": "Mcm"
+ }
+ ],
+ "count": 2
+ },
+ {
+ "auditLogs": [
+ {
+ "details": "test@test.com@helios created api key \"test\" with id \"1234\".",
+ "username": "test@test.com",
+ "domain": "helios",
+ "sourceType": "helios",
+ "entityName": "ferrum-labs-integration",
+ "entityType": "ApiKey",
+ "action": "Create",
+ "timestampUsecs": 1706038602184624,
+ "ip": "1.2.3.4",
+ "isImpersonation": false,
+ "tenantId": "1234/",
+ "originalTenantId": "",
+ "newRecord": "{\"id\":\"491f5e75-dd50-4ed7-54e9-8faa6a6dc27c\",\"name\":\"test\"}",
+ "serviceContext": "Mcm"
+ },
+ {
+ "details": "test@test.com@helios created api key \"test\" with id \"1234\".",
+ "username": "test@test.com",
+ "domain": "helios",
+ "sourceType": "helios",
+ "entityName": "ferrum-labs-integration",
+ "entityType": "ApiKey",
+ "action": "Create",
+ "timestampUsecs": 1706038601184624,
+ "ip": "1.2.3.4",
+ "isImpersonation": false,
+ "tenantId": "1234/",
+ "originalTenantId": "",
+ "newRecord": "{\"id\":\"491f5e75-dd50-4ed7-54e9-8faa6a6dc27c\",\"name\":\"test\"}",
+ "serviceContext": "Mcm"
+ }
+ ],
+ "count": 2
+ },
+ {
+ "auditLogs": [
+ {
+ "details": "test@test.com@helios logged in from \"1.2.3.4\" with role \"Super Admin\".",
+ "username": "test@test.com",
+ "domain": "helios",
+ "sourceType": "helios",
+ "entityName": "test@test.com",
+ "entityType": "User",
+ "action": "Login",
+ "timestampUsecs": 1706038519477685,
+ "ip": "1.2.3.4",
+ "isImpersonation": false,
+ "tenantId": "1234/",
+ "originalTenantId": "",
+ "serviceContext": "Mcm"
+ }
+ ],
+ "count": 1
+ }
+]
\ No newline at end of file
diff --git a/Packs/CohesityHelios/ReleaseNotes/1_1_0.md b/Packs/CohesityHelios/ReleaseNotes/1_1_0.md
new file mode 100644
index 000000000000..00f5c769e0ca
--- /dev/null
+++ b/Packs/CohesityHelios/ReleaseNotes/1_1_0.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### New: Cohesity Helios Event Collector
+
+- New: This is the Cohesity Helios Event Collector integration for XSIAM. (Available from Cortex XSIAM 2.1).
diff --git a/Packs/CohesityHelios/pack_metadata.json b/Packs/CohesityHelios/pack_metadata.json
index 1be583119542..959f1f216794 100644
--- a/Packs/CohesityHelios/pack_metadata.json
+++ b/Packs/CohesityHelios/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cohesity Helios",
"description": "This integration interacts with Cohesity Helios and performs actions based on alerts raised.",
"support": "partner",
- "currentVersion": "1.0.8",
+ "currentVersion": "1.1.0",
"author": "Cohesity Inc.",
"url": "https://www.cohesity.com",
"email": "support@cohesity.com",
diff --git a/Packs/CommonPlaybooks/.pack-ignore b/Packs/CommonPlaybooks/.pack-ignore
index db2a4b3acd5e..81461256b5d1 100644
--- a/Packs/CommonPlaybooks/.pack-ignore
+++ b/Packs/CommonPlaybooks/.pack-ignore
@@ -22,6 +22,7 @@ NonFoundHashes
opswat
filescan
Stringify
+QR
[file:playbook-File_Enrichment_-_File_reputation.yml]
ignore=BA101
diff --git a/Packs/CommonPlaybooks/Playbooks/Entity_Enrichment_-_Generic_v3.yml b/Packs/CommonPlaybooks/Playbooks/Entity_Enrichment_-_Generic_v3.yml
index 93d10b61604d..73c476a063d9 100644
--- a/Packs/CommonPlaybooks/Playbooks/Entity_Enrichment_-_Generic_v3.yml
+++ b/Packs/CommonPlaybooks/Playbooks/Entity_Enrichment_-_Generic_v3.yml
@@ -353,17 +353,29 @@ tasks:
isautoswitchedtoquietmode: false
"24":
id: "24"
- taskid: 7f313517-52cb-4653-8c77-08819d4910f3
+ taskid: 3543872e-d47e-47a3-8456-39ff9ccfcabd
type: playbook
task:
- id: 7f313517-52cb-4653-8c77-08819d4910f3
+ id: 3543872e-d47e-47a3-8456-39ff9ccfcabd
version: -1
name: Account Enrichment - Generic v2.1
playbookName: Account Enrichment - Generic v2.1
type: playbook
iscommand: false
brand: ""
- description: ''
+ description: |-
+ Enrich accounts using one or more integrations.
+ Supported integrations:
+ - Active Directory
+ - Microsoft Graph User
+ - SailPoint IdentityNow
+ - SailPoint IdentityIQ
+ - PingOne
+ - Okta
+ - AWS IAM
+ - Cortex XDR (account enrichment and reputation)
+
+ Also, the playbook supports the generic command 'iam-get-user' (implemented in IAM integrations. For more information, visit https://xsoar.pan.dev/docs/integrations/iam-integrations.
nexttasks:
'#none#':
- "26"
@@ -374,6 +386,11 @@ tasks:
root: inputs.Username
transformers:
- operator: uniq
+ Domain:
+ complex:
+ root: inputs.AccountDomain
+ transformers:
+ - operator: uniq
separatecontext: true
loop:
iscommand: false
@@ -507,9 +524,28 @@ inputs:
description: The IP addresses to enrich
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: MD5
value:
@@ -624,6 +660,13 @@ inputs:
required: false
description: Whether to execute the reputation command on the indicator.
playbookInputQuery:
+- key: AccountDomain
+ value: {}
+ required: false
+ description: |-
+ Optional - This input is needed for the IAM-get-user command (used in the Account Enrichment - IAM playbook). Please provide the domain name that the user is related to.
+ Example: @xsoar.com
+ playbookInputQuery:
outputs:
- contextPath: IP
description: The IP object.
diff --git a/Packs/CommonPlaybooks/Playbooks/Entity_Enrichment_-_Generic_v3_README.md b/Packs/CommonPlaybooks/Playbooks/Entity_Enrichment_-_Generic_v3_README.md
index 00160c4e49d6..a4a058627057 100644
--- a/Packs/CommonPlaybooks/Playbooks/Entity_Enrichment_-_Generic_v3_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/Entity_Enrichment_-_Generic_v3_README.md
@@ -6,14 +6,14 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* CVE Enrichment - Generic v2
-* URL Enrichment - Generic v2
-* Domain Enrichment - Generic v2
+* IP Enrichment - Generic v2
* Account Enrichment - Generic v2.1
* Email Address Enrichment - Generic v2.1
-* IP Enrichment - Generic v2
+* Domain Enrichment - Generic v2
* Endpoint Enrichment - Generic v2.1
* File Enrichment - Generic v2
+* URL Enrichment - Generic v2
+* CVE Enrichment - Generic v2
### Integrations
@@ -34,7 +34,7 @@ This playbook does not use any commands.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| IP | The IP addresses to enrich | IP.Address | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| MD5 | File MD5 to enrich | File.MD5 | Optional |
| SHA256 | File SHA256 to enrich | File.SHA256 | Optional |
| SHA1 | File SHA1 to enrich | File.SHA1 | Optional |
@@ -48,6 +48,7 @@ This playbook does not use any commands.
| CVE | CVE ID to enrich. | CVE.ID | Optional |
| URLSSLVerification | Whether to verify SSL certificates for URLs. Can be True or False. | False | Optional |
| UseReputationCommand | Whether to execute the reputation command on the indicator. | False | Optional |
+| AccountDomain | Optional - This input is needed for the IAM-get-user command \(used in the Account Enrichment - IAM playbook\). Please provide the domain name that the user is related to. Example: @xsoar.com | | Optional |
## Playbook Outputs
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Account_Enrichment_-_Generic_v2.2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Account_Enrichment_-_Generic_v2.2.yml
index 361ad912427c..81569f63ae41 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Account_Enrichment_-_Generic_v2.2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Account_Enrichment_-_Generic_v2.2.yml
@@ -15,7 +15,7 @@ description: |-
- AWS IAM
- Cortex XDR (account enrichment and reputation)
- Also, the playbook supports the generic command 'iam-get-user' (implemented in IAM integrations. For more information, visit https://xsoar.pan.dev/docs/integrations/iam-integrations.
+ Also, the playbook supports the generic command 'iam-get-user' (implemented in IAM integrations). For more information, visit https://xsoar.pan.dev/docs/integrations/iam-integrations.
starttaskid: "0"
tasks:
"0":
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Account_Enrichment_-_Generic_v2.2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Account_Enrichment_-_Generic_v2.2_README.md
index e397a9e42ff6..f1c3d11849a0 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Account_Enrichment_-_Generic_v2.2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Account_Enrichment_-_Generic_v2.2_README.md
@@ -9,7 +9,7 @@ Supported integrations:
- AWS IAM
- Cortex XDR (account enrichment and reputation)
-Also, the playbook supports the generic command 'iam-get-user' (implemented in IAM integrations. For more information, visit https://xsoar.pan.dev/docs/integrations/iam-integrations.
+Also, the playbook supports the generic command 'iam-get-user' (implemented in IAM integrations). For more information, visit https://xsoar.pan.dev/docs/integrations/iam-integrations.
## Dependencies
@@ -25,22 +25,22 @@ This playbook does not use any integrations.
### Scripts
-* Set
* IsIntegrationAvailable
* SetAndHandleEmpty
+* Set
### Commands
* iam-get-user
* identityiq-search-identities
-* msgraph-user-get-manager
-* okta-get-user
+* ad-get-user
* xdr-list-risky-users
* aws-iam-get-user
-* msgraph-user-get
* identitynow-get-accounts
-* ad-get-user
* pingone-get-user
+* msgraph-user-get-manager
+* msgraph-user-get
+* okta-get-user
## Playbook Inputs
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2.yml
index 68a4fb2f39a7..d1111f41fe32 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2.yml
@@ -74,7 +74,7 @@ tasks:
{
"position": {
"x": -1070,
- "y": 1260
+ "y": 1470
}
}
note: false
@@ -971,7 +971,7 @@ tasks:
'#default#':
- "2"
"yes":
- - "48"
+ - "76"
scriptarguments:
brandname:
simple: Active Directory Query v2
@@ -1551,15 +1551,14 @@ tasks:
scriptarguments:
username:
complex:
- root: Blocklist
- accessor: Final
+ root: UserAD
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 1060,
- "y": 920
+ "x": 1050,
+ "y": 1290
}
}
note: false
@@ -1914,7 +1913,7 @@ tasks:
'#default#':
- "2"
"yes":
- - "69"
+ - "78"
scriptarguments:
brandname:
simple: Microsoft Graph User
@@ -2129,15 +2128,14 @@ tasks:
scriptarguments:
user:
complex:
- root: Blocklist
- accessor: Final
+ root: UserMSGraph
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": 1480,
- "y": 920
+ "y": 1290
}
}
note: false
@@ -2363,7 +2361,176 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
-system: true
+ "76":
+ id: "76"
+ taskid: da02de44-c739-4db1-85bc-3c053f8aa183
+ type: regular
+ task:
+ id: da02de44-c739-4db1-85bc-3c053f8aa183
+ version: -1
+ name: Active Directory - Get User
+ description: Retrieves detailed information about a user account. The user can be specified by name, email address, or as an Active Directory Distinguished Name (DN). If no filter is specified, all users are returned.
+ script: '|||ad-get-user'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "77"
+ scriptarguments:
+ extend-context:
+ simple: UserAD=attributes.sAMAccountName
+ ignore-outputs:
+ simple: "true"
+ username:
+ complex:
+ root: Blocklist
+ accessor: Final
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1050,
+ "y": 920
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "77":
+ id: "77"
+ taskid: 419df6f0-2416-4a48-8124-eb64ce5da93a
+ type: condition
+ task:
+ id: 419df6f0-2416-4a48-8124-eb64ce5da93a
+ version: -1
+ name: Does the username exist?
+ description: Verify that the user exists.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "2"
+ "yes":
+ - "48"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: UserAD
+ iscontext: true
+ right:
+ value: {}
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1050,
+ "y": 1090
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "78":
+ id: "78"
+ taskid: e56d753f-ac0c-417d-83b3-1eec253d740b
+ type: regular
+ task:
+ id: e56d753f-ac0c-417d-83b3-1eec253d740b
+ version: -1
+ name: Microsoft Graph User - Get User
+ description: |-
+ Retrieves the properties and relationships of a user object. For more information, visit: https://docs.microsoft.com/en-us/graph/api/user-update?view=graph-rest-1.0.
+ Permissions: - User.Read (Delegated) - User.Read.All (Application).
+ script: '|||msgraph-user-get'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "79"
+ scriptarguments:
+ extend-context:
+ simple: UserMSGraph=id
+ ignore-outputs:
+ simple: "true"
+ user:
+ complex:
+ root: Blocklist
+ accessor: Final
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1480,
+ "y": 920
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "79":
+ id: "79"
+ taskid: 6a4ebfcb-e506-4333-81b5-8889065e2fbe
+ type: condition
+ task:
+ id: 6a4ebfcb-e506-4333-81b5-8889065e2fbe
+ version: -1
+ name: Does the username exist?
+ description: Verify that the user exists.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "2"
+ "yes":
+ - "69"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: UserMSGraph
+ iscontext: true
+ right:
+ value: {}
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1480,
+ "y": 1090
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {
@@ -2384,7 +2551,6 @@ view: |-
"32_33_yes": 0.24,
"36_2_#default#": 0.1,
"37_2_#default#": 0.12,
- "37_48_yes": 0.42,
"43_44_yes": 0.49,
"52_39_#default#": 0.2,
"53_2_#default#": 0.1,
@@ -2396,7 +2562,6 @@ view: |-
"59_2_#default#": 0.1,
"59_33_yes": 0.14,
"63_2_#default#": 0.1,
- "63_69_yes": 0.44,
"65_2_#default#": 0.1,
"65_33_yes": 0.1,
"67_2_#default#": 0.1,
@@ -2410,7 +2575,7 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 2775,
+ "height": 2985,
"width": 8080,
"x": -5370,
"y": -1450
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2_README.md
index 46ea516871b4..2cb649faf5cb 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2_README.md
@@ -17,7 +17,7 @@ Supported integrations for this playbook:
* Prisma Cloud IAM
* Zoom IAM
* Atlassian IAM
-* GitHub IAM
+* GitHub IAM.
## Dependencies
@@ -33,19 +33,21 @@ This playbook does not use any sub-playbooks.
### Scripts
-* SetAndHandleEmpty
* IsIntegrationAvailable
+* SetAndHandleEmpty
### Commands
+* iam-disable-user
* gsuite-user-update
* identityiq-disable-account
-* pan-os-register-user-tag
* pingone-deactivate-user
-* identityiq-get-accounts
* msgraph-user-account-disable
+* pan-os-register-user-tag
+* ad-get-user
+* msgraph-user-get
* ad-disable-account
-* iam-disable-user
+* identityiq-get-accounts
## Playbook Inputs
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Email_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Email_-_Generic_v2.yml
index 6486cf641196..a6a1c7347433 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Email_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Email_-_Generic_v2.yml
@@ -31,8 +31,8 @@ tasks:
view: |-
{
"position": {
- "x": 750,
- "y": -230
+ "x": 960,
+ "y": -220
}
}
note: false
@@ -58,8 +58,8 @@ tasks:
view: |-
{
"position": {
- "x": 750,
- "y": 680
+ "x": 960,
+ "y": 710
}
}
note: false
@@ -109,65 +109,6 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "4":
- id: "4"
- taskid: 0f8c5d9f-3217-4e43-8956-560aabf63ea5
- type: condition
- task:
- id: 0f8c5d9f-3217-4e43-8956-560aabf63ea5
- version: -1
- name: Is Cisco Email Security Available?
- description: Returns 'yes' if integration brand is available. Otherwise returns 'no'
- type: condition
- iscommand: false
- brand: ""
- nexttasks:
- '#default#':
- - "2"
- "yes":
- - "8"
- separatecontext: false
- conditions:
- - label: "yes"
- condition:
- - - operator: isExists
- left:
- value:
- complex:
- root: modules
- filters:
- - - operator: isEqualString
- left:
- value:
- simple: modules.brand
- iscontext: true
- right:
- value:
- simple: CiscoEmailSecurity
- - - operator: isEqualString
- left:
- value:
- simple: modules.state
- iscontext: true
- right:
- value:
- simple: active
- accessor: brand
- iscontext: true
- view: |-
- {
- "position": {
- "x": -70,
- "y": 260
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
"5":
id: "5"
taskid: 2c065c72-23f4-41d6-8391-027a43640107
@@ -244,47 +185,6 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "8":
- id: "8"
- taskid: e16965eb-49c2-4d54-87ca-6861f04b7170
- type: regular
- task:
- id: e16965eb-49c2-4d54-87ca-6861f04b7170
- version: -1
- name: Cisco Email Security (Deprecated)
- description: Performs actions on list entries. Supports add, edit, and append.
- script: '|||cisco-email-security-list-entry-add'
- type: regular
- iscommand: true
- brand: ""
- nexttasks:
- '#none#':
- - "2"
- scriptarguments:
- action:
- simple: append
- list_type:
- simple: blocklist
- sender_addresses:
- complex:
- root: inputs.EmailToBlock
- view_by:
- simple: sender
- separatecontext: false
- view: |-
- {
- "position": {
- "x": -70,
- "y": 490
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: true
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
"9":
id: "9"
taskid: 3615eb04-9998-4341-86ab-6c3494a4c876
@@ -425,36 +325,6 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "13":
- id: "13"
- taskid: 042406d1-8d98-483a-86a3-fba433057d27
- type: title
- task:
- id: 042406d1-8d98-483a-86a3-fba433057d27
- version: -1
- name: Cisco Email Security
- type: title
- iscommand: false
- brand: ""
- description: ''
- nexttasks:
- '#none#':
- - "4"
- separatecontext: false
- view: |-
- {
- "position": {
- "x": -70,
- "y": 130
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
"14":
id: "14"
taskid: 45e2cbf1-08bc-4085-8d3e-a0fe2fd4553e
@@ -635,7 +505,6 @@ tasks:
'#default#':
- "2"
"yes":
- - "13"
- "15"
- "12"
- "11"
@@ -654,8 +523,8 @@ tasks:
view: |-
{
"position": {
- "x": 750,
- "y": -90
+ "x": 960,
+ "y": -80
}
}
note: false
@@ -670,21 +539,19 @@ view: |-
{
"linkLabelsPosition": {
"16_17_yes": 0.62,
- "16_2_#default#": 0.32,
- "18_2_#default#": 0.67,
- "4_2_#default#": 0.21,
- "4_8_yes": 0.62,
- "5_2_#default#": 0.27,
+ "16_2_#default#": 0.24,
+ "18_2_#default#": 0.12,
+ "5_2_#default#": 0.16,
"5_9_yes": 0.62,
"6_10_yes": 0.62,
"6_2_#default#": 0.18
},
"paper": {
"dimensions": {
- "height": 975,
- "width": 2020,
- "x": -70,
- "y": -230
+ "height": 995,
+ "width": 1620,
+ "x": 330,
+ "y": -220
}
}
}
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Email_-_Generic_v2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Email_-_Generic_v2_README.md
index 231d8211d682..f6130f803ef9 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Email_-_Generic_v2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Email_-_Generic_v2_README.md
@@ -25,10 +25,9 @@ This playbook does not use any integrations.
### Commands
-* mimecast-create-policy
-* cisco-email-security-list-entry-add
* cisco-sma-list-entry-append
* fireeye-ex-update-blockedlist
+* mimecast-create-policy
## Playbook Inputs
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3.yml
index ff678d669453..20577bc8d831 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3.yml
@@ -3648,9 +3648,28 @@ inputs:
description: "Possible values: True/False. Default: True.\nWhether to provide user verification for blocking those IPs. \n\nFalse - No prompt will be displayed to the user.\nTrue - The server will ask the user for blocking verification and will display the blocking list."
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use the default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: SiteName
value: {}
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3_README.md
index d63bbbbccbe1..df279b16c809 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3_README.md
@@ -28,12 +28,13 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+* Prisma SASE - Block IP
+* Cisco FirePower- Append network group object
* PAN-OS - Block IP - Static Address Group
+* Checkpoint - Block IP - Custom Block Rule
* PAN-OS - Block IP - Custom Block Rule
-* PAN-OS DAG Configuration
* Sophos Firewall - Block IP
-* Cisco FirePower- Append network group object
-* Checkpoint - Block IP - Custom Block Rule
+* PAN-OS DAG Configuration
### Integrations
@@ -48,19 +49,19 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Commands
-* fortigate-ban-ip
-* cisco-asa-create-rule
-* aria-block-dest-subnet
-* threatx-blacklist-ip
-* createNewIndicator
-* enrichIndicators
* zscaler-blacklist-ip
-* threatx-block-ip
-* setIndicators
-* sw-block-domain-or-ip
+* aria-block-dest-subnet
* akamai-add-elements-to-network-list
+* threatx-block-ip
+* threatx-blacklist-ip
+* fortigate-ban-ip
* sigsci-blacklist-add-ip
+* sw-block-domain-or-ip
+* appendIndicatorField
+* enrichIndicators
+* createNewIndicator
* f5-silverline-ip-object-add
+* cisco-asa-create-rule
## Playbook Inputs
@@ -76,13 +77,14 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
| Tag | Insert a tag name with which indicators will get tagged. This tag can be used later in the External Dynamic Lists integration by using the tag for filtering IPs in the indicator query. | | Optional |
| DAG | This input determines whether Palo Alto Networks Panorama or Firewall Dynamic Address Groups are used. Determine the Dynamic Address Group tag for IPs list handling. | | Optional |
| UserVerification | Possible values: True/False. Default: True. Whether to provide user verification for blocking those IPs.
False - No prompt will be displayed to the user. True - The server will ask the user for blocking verification and will display the blocking list. | True | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use the default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| SiteName | Signal Sciences WAF - Enter the site name for the integration to be applied. The site name can be found in your instance console. | | Optional |
| AkamaiNetworkListID | Akamai's WAF network list ID, which is mandatory to be mentioned for the integration. The chosen IPs will be added to this ID. | | Optional |
| InputEnrichment | Possible values: True/False . Default: False Enrich the input IP address/es with reputation commands. | False | Optional |
| RuleName | The rule name/description that will be presented on the created rule in certain integrations \(if there is a need\). The supported integrations: PAN-OS, CheckPoint.
Default input- "XSOAR - Block IP playbook - $\{incident.id\}" | XSOAR - Block IP playbook - ${incident.id} | Optional |
| RuleDirection | Determine if a newly created rule should be with the network direction of outbound or inbound blocked traffic. Possible values: inbound or outbound Default: outbound | outbound | Optional |
| DAGName | This input determines whether Palo Alto Networks Panorama or Firewall Dynamic Address Groups are used. Determine the Dynamic Address Group name for IPs list handling. | | Optional |
+| Folder | For prisma SASE usage - Specify the scope for a newly created security rule to be applied. Remember, this input will only be used when there is no input to the CategoryName. Default: Shared | Shared | Optional |
## Playbook Outputs
@@ -101,6 +103,12 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
| SigSciences.Corp.Site.Blacklist.ID | Signal Sciences created rule ID. | unknown |
| SigSciences.Corp.Site.Blacklist.Source | Signal Sciences blocked address in a dedicated rule. | unknown |
| SigSciences.Corp.Site.Blacklist.CreatedBy | Signal Sciences - the blocking rule's creator name. | unknown |
+| PrismaSase | The root context key for Prisma SASE integration output. | unknown |
+| PrismaSase.AddressGroup | The Prisma Access Address group object. | unknown |
+| PrismaSase.SecurityRule | Created security rule. | unknown |
+| PrismaSase.SecurityRule.profile_setting | The Security rule group object in the rule. | unknown |
+| PrismaSase.CandidateConfig | Configuration job object. | unknown |
+| PrismaSase.Address | Created address object. | unknown |
## Playbook Image
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Indicators_-_Generic_v3.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Indicators_-_Generic_v3.yml
index 9266ea4cc81e..717bfb5e1440 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Indicators_-_Generic_v3.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Indicators_-_Generic_v3.yml
@@ -1630,9 +1630,28 @@ inputs:
description: "Possible values: True/False. Default: True.\nWhether to provide user verification for blocking those IPs. \n\nFalse - No prompt will be displayed to the user.\nTrue - The server will ask the user for blocking verification and will display the blocking list."
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use the default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: SiteName
value: {}
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Indicators_-_Generic_v3_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Indicators_-_Generic_v3_README.md
index 441792d98fb9..2d310574bd70 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Indicators_-_Generic_v3_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Indicators_-_Generic_v3_README.md
@@ -13,12 +13,12 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Block File - Generic v2
-* Block Email - Generic v2
* Block Domain - Generic v2
-* Block Account - Generic v2
-* Block IP - Generic v3
* Block URL - Generic v2
+* Block Email - Generic v2
+* Block IP - Generic v3
+* Block File - Generic v2
+* Block Account - Generic v2
### Integrations
@@ -59,7 +59,7 @@ This playbook does not use any integrations.
| Tag | Insert a tag name with which indicators will get tagged. This tag can be used later in the External Dynamic Lists integration by using the tag for filtering IPs in the indicator query. | Blocked Indicator In Systems | Optional |
| DAG | This input determines whether Palo Alto Networks Panorama or Firewall Dynamic Address Groups are used. Specify the Dynamic Address Group tag name for IPs list handling. | | Optional |
| UserVerification | Possible values: True/False. Default: True. Whether to provide user verification for blocking those IPs.
False - No prompt will be displayed to the user. True - The server will ask the user for blocking verification and will display the blocking list. | True | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use the default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| SiteName | Signal Sciences WAF - Enter the site name for the integration to be applied. The site name can be found in your instance console. | | Optional |
| AkamaiNetworkListID | Akamai's WAF network list ID, which is mandatory to be mentioned for the integration. The chosen IPs will be added to this ID. | | Optional |
| CiscoFWSource | Cisco ASA \(firewall\) value for the rule's source object in the created blocking rule. Can be the value of an IPv4, an address block, or the name of a network object. | | Optional |
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2.yml
index b408b1a2874c..f7e487cbcbc0 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2.yml
@@ -1138,9 +1138,7 @@ tasks:
- "41"
scriptarguments:
brandname:
- simple: Netcraft
- results:
- - brandInstances
+ simple: Netcraft V2
separatecontext: false
view: |-
{
@@ -1165,11 +1163,13 @@ tasks:
id: 0e06c863-2829-4749-8d1a-6daeb34d89c6
version: -1
name: Block URL with Netcraft
- description: Reports an attack to Netcraft.
- script: Netcraft|||netcraft-report-attack
+ description: |
+ Report a new attack or authorize an existing attack in the Takedown Service.
+ If a takedown for the attack already exists in the Netcraft system it will be authorized, otherwise, a new takedown will be added and authorized.
+ script: '|||netcraft-attack-report'
type: regular
iscommand: true
- brand: Netcraft
+ brand: ""
nexttasks:
'#none#':
- "2"
@@ -1414,6 +1414,6 @@ inputs:
playbookInputQuery: null
outputs: []
tests:
-- Send Investigation Summary Reports - Test
+- No tests (auto formatted)
fromversion: 6.5.0
system: true
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2_README.md
index f5f3fb5f18ca..2c26a193b535 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2_README.md
@@ -14,27 +14,27 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* PAN-OS - Block URL - Custom URL Category
* Sophos Firewall - Block URL
+* Prisma SASE - Block URL
* Checkpoint - Block URL
+* PAN-OS - Block URL - Custom URL Category
### Integrations
-* Netcraft
* Forcepoint
* Zscaler
### Scripts
-* SetAndHandleEmpty
* IsIntegrationAvailable
+* SetAndHandleEmpty
### Commands
* fp-add-address-to-category
-* netcraft-report-attack
-* appendIndicatorField
* zscaler-blacklist-url
+* netcraft-attack-report
+* appendIndicatorField
## Playbook Inputs
@@ -52,6 +52,7 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
| EDLServerIP | EDL Server IP Address | | Optional |
| device-group | Device group for the Custom URL Category \(Panorama instances\). | | Optional |
| Tag | Insert a tag name with which indicators will get tagged. This tag can be used later in the External Dynamic Lists integration by using the tag for filtering IPs in the indicator query. | | Optional |
+| Folder | For Prisma SASE usage - Specify the scope for a newly created security rule to be applied. Remember, this input will only be used when there is no input to the CategoryName. Default: Shared | Shared | Optional |
## Playbook Outputs
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Command-Line_Analysis.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Command-Line_Analysis.yml
index 3a89a4e49192..f508851cfa53 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Command-Line_Analysis.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Command-Line_Analysis.yml
@@ -3,7 +3,7 @@ version: -1
contentitemexportablefields:
contentitemfields: {}
name: Command-Line Analysis
-description: "This playbook takes a command line from the alert and performs the following actions:\n- Checks for base64 string and decodes if exists\n- Extracts and enriches indicators from the command line\n- Checks specific arguments for malicious usage \n\nAt the end of the playbook, it sets a possible verdict for the command line, based on the finding:\n1. Indicators found in the command line\n2. Found AMSI techniques\n3. Found suspicious parameters\n4. Usage of malicious tools\n5. Indication of network activity\n6. Indication of suspicious LOLBIN execution\n\nNote: In case you are wishing to run this playbook with a list of command lines, set this playbook to be running in a loop. To do so, navigate to the 'Loop' and check \"For Each Input\"."
+description: "This playbook takes a command line from the alert and performs the following actions:\n- Checks for base64 string and decodes if exists\n- Extracts and enriches indicators from the command line\n- Checks specific arguments for malicious usage \n\nAt the end of the playbook, it sets a possible verdict for the command line, based on the finding:\n1. Indicators found in the command line\n2. Found AMSI techniques\n3. Found suspicious parameters\n4. Usage of malicious tools\n5. Indication of network activity\n6. Indication of suspicious LOLBIN execution\n\nNote: To run this playbook with a list of command lines, set this playbook to run in a loop. To do so, navigate to 'Loop' and check \"For Each Input\"."
starttaskid: "0"
tasks:
"0":
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Command-Line_Analysis_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Command-Line_Analysis_README.md
index 45fc4fc90fd1..193c46783963 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Command-Line_Analysis_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Command-Line_Analysis_README.md
@@ -11,7 +11,7 @@ At the end of the playbook, it sets a possible verdict for the command line, bas
5. Indication of network activity
6. Indication of suspicious LOLBIN execution
-Note: In case you are wishing to run this playbook with a list of command lines, set this playbook to be running in a loop. To do so, navigate to the 'Loop' and check "For Each Input".
+Note: To run this playbook with a list of command lines, set this playbook to run in a loop. To do so, navigate to 'Loop' and check "For Each Input".
## Dependencies
@@ -27,10 +27,10 @@ This playbook does not use any integrations.
### Scripts
-* Base64Decode
* Set
* MatchRegexV2
* DeleteContext
+* Base64Decode
### Commands
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Convert_file_hash_to_corresponding_hashes.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Convert_file_hash_to_corresponding_hashes.yml
index d17009150973..efde48091457 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Convert_file_hash_to_corresponding_hashes.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Convert_file_hash_to_corresponding_hashes.yml
@@ -35,6 +35,10 @@ tasks:
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"2":
id: "2"
taskid: d0c581d2-789c-4a12-8940-37f17a47b4f6
@@ -51,13 +55,17 @@ tasks:
view: |-
{
"position": {
- "x": 520,
- "y": 970
+ "x": 530,
+ "y": 950
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"4":
id: "4"
taskid: b797fdc7-1704-442a-8605-ee06bfb0bf54
@@ -74,7 +82,7 @@ tasks:
'#default#':
- "2"
"yes":
- - "18"
+ - "44"
separatecontext: false
conditions:
- label: "yes"
@@ -88,13 +96,17 @@ tasks:
view: |-
{
"position": {
- "x": 970,
- "y": 240
+ "x": 1080,
+ "y": 200
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"9":
id: "9"
taskid: f7142eb2-9483-4546-8d07-a828d636d0ad
@@ -111,7 +123,7 @@ tasks:
'#default#':
- "2"
"yes":
- - "16"
+ - "46"
separatecontext: false
conditions:
- label: "yes"
@@ -125,13 +137,17 @@ tasks:
view: |-
{
"position": {
- "x": -250,
- "y": 240
+ "x": -40,
+ "y": 190
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"10":
id: "10"
taskid: eff5d9b7-ea36-4310-8650-5e26fa38209e
@@ -148,7 +164,7 @@ tasks:
'#default#':
- "2"
"yes":
- - "15"
+ - "45"
separatecontext: false
conditions:
- label: "yes"
@@ -159,16 +175,23 @@ tasks:
complex:
root: inputs.SHA1
iscontext: true
+ right:
+ value: {}
+ continueonerrortype: ""
view: |-
{
"position": {
"x": 520,
- "y": 230
+ "y": 190
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"15":
id: "15"
taskid: 65ff1c51-6bc1-4171-8bde-eb6ecb862f4e
@@ -186,32 +209,26 @@ tasks:
'#none#':
- "2"
scriptarguments:
- confidenceThreshold: {}
file:
complex:
root: inputs.SHA1
- include_inactive: {}
- long: {}
- md5: {}
- owners: {}
- ratingThreshold: {}
- retries: {}
- sha256: {}
- threshold: {}
- wait: {}
reputationcalc: 2
continueonerror: true
separatecontext: false
view: |-
{
"position": {
- "x": 310,
- "y": 410
+ "x": 300,
+ "y": 750
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"16":
id: "16"
taskid: 9e3c8b51-dbc6-464a-8af7-f5d43b663bd4
@@ -233,28 +250,23 @@ tasks:
file:
complex:
root: inputs.MD5
- include_inactive: {}
- long: {}
- md5: {}
- owners: {}
- ratingThreshold: {}
- retries: {}
- sha256: {}
- threshold: {}
- wait: {}
reputationcalc: 2
continueonerror: true
separatecontext: false
view: |-
{
"position": {
- "x": -490,
- "y": 410
+ "x": -550,
+ "y": 750
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"18":
id: "18"
taskid: 29ef7703-de75-4ff3-844c-a852e333bdbe
@@ -276,28 +288,23 @@ tasks:
file:
complex:
root: inputs.SHA256
- include_inactive: {}
- long: {}
- md5: {}
- owners: {}
- ratingThreshold: {}
- retries: {}
- sha256: {}
- threshold: {}
- wait: {}
reputationcalc: 2
continueonerror: true
separatecontext: false
view: |-
{
"position": {
- "x": 1230,
- "y": 410
+ "x": 1180,
+ "y": 750
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"35":
id: "35"
taskid: fdfd1cd9-3095-455b-8481-072b140ee5de
@@ -317,13 +324,17 @@ tasks:
view: |-
{
"position": {
- "x": -250,
- "y": 115
+ "x": -40,
+ "y": 60
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"36":
id: "36"
taskid: a75bc19c-d514-44ab-885d-fb6664dd0b29
@@ -344,12 +355,16 @@ tasks:
{
"position": {
"x": 520,
- "y": 115
+ "y": 60
}
}
note: false
timertriggers: []
ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"37":
id: "37"
taskid: a2d56e80-d468-46a0-891d-3792a6bf0bd9
@@ -369,25 +384,425 @@ tasks:
view: |-
{
"position": {
- "x": 970,
- "y": 110
+ "x": 1080,
+ "y": 60
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "39":
+ id: "39"
+ taskid: 336d7e69-334b-490c-81c4-4b48788a56be
+ type: condition
+ task:
+ id: 336d7e69-334b-490c-81c4-4b48788a56be
+ version: -1
+ name: Have the hashes been retrieved?
+ type: condition
+ iscommand: false
+ brand: ""
+ description: ""
+ nexttasks:
+ '#default#':
+ - "18"
+ "yes":
+ - "40"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: foundIndicators.value
+ iscontext: true
+ right:
+ value: {}
+ - - operator: stringHasLength
+ left:
+ value:
+ complex:
+ root: foundIndicators
+ accessor: value
+ iscontext: true
+ right:
+ value:
+ simple: "64"
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1350,
+ "y": 540
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "40":
+ id: "40"
+ taskid: 4cbe3c2a-dc3d-46fc-8b6a-b589accfdba3
+ type: regular
+ task:
+ id: 4cbe3c2a-dc3d-46fc-8b6a-b589accfdba3
+ version: -1
+ name: Enrich indicators
+ description: commands.local.cmd.enrich.indicators
+ script: Builtin|||enrichIndicators
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ indicatorsValues:
+ complex:
+ root: foundIndicators
+ accessor: value
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1600,
+ "y": 750
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "44":
+ id: "44"
+ taskid: b27dc11a-1b9f-4056-8288-776eaa57bd63
+ type: regular
+ task:
+ id: b27dc11a-1b9f-4056-8288-776eaa57bd63
+ version: -1
+ name: Search indicators
+ description: |-
+ Searches Cortex XSOAR indicators.
+
+ Searches for Cortex XSOAR indicators and returns the id, indicator_type, value, and score/verdict.
+
+ You can add additional fields from the indicators using the add_field_to_context argument.
+ scriptName: SearchIndicator
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "39"
+ scriptarguments:
+ query:
+ simple: value:${inputs.SHA256}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1350,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "45":
+ id: "45"
+ taskid: 1b49a035-86f4-4e1d-81ac-add593725d88
+ type: regular
+ task:
+ id: 1b49a035-86f4-4e1d-81ac-add593725d88
+ version: -1
+ name: Search indicators
+ description: |-
+ Searches Cortex XSOAR indicators.
+
+ Searches for Cortex XSOAR Indicators and returns the id, indicator_type, value, and score/verdict.
+
+ You can add additional fields from the indicators using the add_field_to_context argument.
+ scriptName: SearchIndicator
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "47"
+ scriptarguments:
+ query:
+ simple: value:${inputs.SHA1}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 520,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "46":
+ id: "46"
+ taskid: f9974076-074f-4bf4-82a5-0c804fb7875b
+ type: regular
+ task:
+ id: f9974076-074f-4bf4-82a5-0c804fb7875b
+ version: -1
+ name: Search indicators
+ description: |-
+ Searches Cortex XSOAR indicators.
+
+ Searches for Cortex XSOAR Indicators and returns the id, indicator_type, value, and score/verdict.
+
+ You can add additional fields from the indicators using the add_field_to_context argument.
+ scriptName: SearchIndicator
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "48"
+ scriptarguments:
+ query:
+ simple: value:${inputs.MD5}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -330,
+ "y": 370
}
}
note: false
timertriggers: []
ignoreworker: false
-system: true
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "47":
+ id: "47"
+ taskid: 0161e3d5-ca7c-4e0e-86d4-f767f1c7b64c
+ type: condition
+ task:
+ id: 0161e3d5-ca7c-4e0e-86d4-f767f1c7b64c
+ version: -1
+ name: Have the hashes been retrieved?
+ type: condition
+ iscommand: false
+ brand: ""
+ description: ""
+ nexttasks:
+ '#default#':
+ - "15"
+ "yes":
+ - "49"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: foundIndicators.value
+ iscontext: true
+ right:
+ value: {}
+ - - operator: stringHasLength
+ left:
+ value:
+ complex:
+ root: foundIndicators
+ accessor: value
+ iscontext: true
+ right:
+ value:
+ simple: "40"
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 520,
+ "y": 540
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "48":
+ id: "48"
+ taskid: 9fb9988f-d614-4a30-8eb9-09c8a05ad461
+ type: condition
+ task:
+ id: 9fb9988f-d614-4a30-8eb9-09c8a05ad461
+ version: -1
+ name: Have the hashes been retrieved?
+ type: condition
+ iscommand: false
+ brand: ""
+ description: ""
+ nexttasks:
+ '#default#':
+ - "16"
+ "yes":
+ - "50"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: foundIndicators.value
+ iscontext: true
+ right:
+ value: {}
+ - - operator: stringHasLength
+ left:
+ value:
+ complex:
+ root: foundIndicators
+ accessor: value
+ iscontext: true
+ right:
+ value:
+ simple: "32"
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -330,
+ "y": 540
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "49":
+ id: "49"
+ taskid: 620476dc-83d3-40fe-8d2f-8f978fcdfa23
+ type: regular
+ task:
+ id: 620476dc-83d3-40fe-8d2f-8f978fcdfa23
+ version: -1
+ name: Enrich indicators
+ description: commands.local.cmd.enrich.indicators
+ script: Builtin|||enrichIndicators
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ indicatorsValues:
+ complex:
+ root: foundIndicators
+ accessor: value
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 750,
+ "y": 750
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "50":
+ id: "50"
+ taskid: e4213b2e-29ba-40d3-8ae3-6deec5436549
+ type: regular
+ task:
+ id: e4213b2e-29ba-40d3-8ae3-6deec5436549
+ version: -1
+ name: Enrich indicators
+ description: commands.local.cmd.enrich.indicators
+ script: Builtin|||enrichIndicators
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ indicatorsValues:
+ complex:
+ root: foundIndicators
+ accessor: value
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -120,
+ "y": 750
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {
- "10_15_yes": 0.55,
- "9_16_yes": 0.48
+ "10_2_#default#": 0.39,
+ "10_45_yes": 0.46,
+ "47_15_#default#": 0.44,
+ "47_49_yes": 0.41,
+ "48_16_#default#": 0.57,
+ "48_50_yes": 0.61,
+ "4_2_#default#": 0.15,
+ "9_2_#default#": 0.19,
+ "9_46_yes": 0.51
},
"paper": {
"dimensions": {
- "height": 1135,
- "width": 2100,
- "x": -490,
+ "height": 1115,
+ "width": 2530,
+ "x": -550,
"y": -100
}
}
@@ -400,6 +815,7 @@ inputs:
accessor: SHA256
required: false
description: The SHA256 hash on which to search.
+ playbookInputQuery:
- key: SHA1
value:
complex:
@@ -407,6 +823,7 @@ inputs:
accessor: SHA1
required: false
description: The SHA1 hash on which to search.
+ playbookInputQuery:
- key: MD5
value:
complex:
@@ -414,6 +831,7 @@ inputs:
accessor: MD5
required: false
description: The MD5 hash on which to search.
+ playbookInputQuery:
outputs:
- contextPath: File.SHA256
description: Output for detected SHA256 hash.
@@ -424,6 +842,7 @@ outputs:
- contextPath: File.MD5
description: Output for detected MD5 hash.
type: string
-
+- contextPath: Indicators.Value
+ description: Output for detected hashes.
tests:
- Test Convert file hash to corresponding hashes
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Convert_file_hash_to_corresponding_hashes_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Convert_file_hash_to_corresponding_hashes_README.md
index cdb0d2320e98..5eaeb105ee68 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Convert_file_hash_to_corresponding_hashes_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Convert_file_hash_to_corresponding_hashes_README.md
@@ -1,41 +1,52 @@
-Gets all of the corresponding hashes for a file even if there is only one hash type available.
-For example, if we have only the SHA256 hash, the playbook will get the SHA1 hash and MD5 hash as long as the
+The playbook enables you to get all of the corresponding file hashes for a file even if there is only one hash type available.
+For example, if we have only the SHA256 hash, the playbook will get the SHA1 and MD5 hashes as long as the
original searched hash is recognized by any our the threat intelligence integrations.
## Dependencies
+
This playbook uses the following sub-playbooks, integrations, and scripts.
-## Sub-playbooks
+### Sub-playbooks
+
This playbook does not use any sub-playbooks.
-## Integrations
+### Integrations
+
This playbook does not use any integrations.
-## Scripts
-This playbook does not use any scripts.
+### Scripts
+
+* SearchIndicator
+
+### Commands
-## Commands
+* enrichIndicators
* file
## Playbook Inputs
+
---
-| **Name** | **Description** | **Default Value** | **Source** | **Required** |
-| --- | --- | --- | --- | --- |
-| SHA256 | The SHA256 hash on which to search. | SHA256 | File | Optional |
-| SHA1 | The SHA1 hash on which to search. | SHA1 | File | Optional |
-| MD5 | The MD5 hash on which to search. | MD5 | File | Optional |
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| SHA256 | The SHA256 hash on which to search. | File.SHA256 | Optional |
+| SHA1 | The SHA1 hash on which to search. | File.SHA1 | Optional |
+| MD5 | The MD5 hash on which to search. | File.MD5 | Optional |
## Playbook Outputs
+
---
| **Path** | **Description** | **Type** |
| --- | --- | --- |
-| File.SHA256 | The output for detected SHA256 hash of the file. | string |
-| File.SHA1 | The output for detected SHA1 hash of the file. | string |
-| File.MD5 | The output for detected MD5 hash of the file. | string |
+| File.SHA256 | Output for detected SHA256 hash. | string |
+| File.SHA1 | Output for detected SHA1 hash. | string |
+| File.MD5 | Output for detected MD5 hash. | string |
+| Indicators.Value | Output for detected hashes. | unknown |
## Playbook Image
+
---
-![Convert_file_hash_to_corresponding_hashes](https://raw.githubusercontent.com/demisto/content/1bdd5229392bd86f0cc58265a24df23ee3f7e662/docs/images/playbooks/Convert_file_hash_to_corresponding_hashes.png)
+
+![Convert file hash to corresponding hashes](../doc_files/Convert_file_hash_to_corresponding_hashes.png)
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Email_Address_Enrichment_-_Generic_v2.1.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Email_Address_Enrichment_-_Generic_v2.1.yml
index e3bc04ac5d02..70950998e10e 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Email_Address_Enrichment_-_Generic_v2.1.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Email_Address_Enrichment_-_Generic_v2.1.yml
@@ -6,7 +6,7 @@ description: |-
Enrich email addresses.
- Get information from Active Directory for internal addresses
- Get the domain-squatting reputation for external addresses
- - Email address reputation using !email command
+ - Email address reputation using !email command.
starttaskid: '0'
tasks:
'0':
@@ -177,11 +177,12 @@ tasks:
nexttasks:
'#none#':
- '17'
+ - "23"
separatecontext: false
view: |-
{
"position": {
- "x": 102.5,
+ "x": 22.5,
"y": 790
}
}
@@ -213,7 +214,7 @@ tasks:
view: |-
{
"position": {
- "x": 910,
+ "x": 950,
"y": 790
}
}
@@ -263,7 +264,7 @@ tasks:
view: |-
{
"position": {
- "x": 102.5,
+ "x": 222.5,
"y": 1135
}
}
@@ -314,7 +315,7 @@ tasks:
view: |-
{
"position": {
- "x": 910,
+ "x": 710.5,
"y": 620
}
}
@@ -365,7 +366,7 @@ tasks:
view: |-
{
"position": {
- "x": 102.5,
+ "x": 292.5,
"y": 620
}
}
@@ -436,7 +437,7 @@ tasks:
view: |-
{
"position": {
- "x": 710.5,
+ "x": 750.5,
"y": 1135
}
}
@@ -494,7 +495,7 @@ tasks:
view: |-
{
"position": {
- "x": 102.5,
+ "x": 222.5,
"y": 930
}
}
@@ -540,7 +541,7 @@ tasks:
view: |-
{
"position": {
- "x": 710.5,
+ "x": 750.5,
"y": 930
}
}
@@ -571,7 +572,7 @@ tasks:
view: |-
{
"position": {
- "x": -220,
+ "x": -400,
"y": 465
}
}
@@ -625,7 +626,7 @@ tasks:
view: |-
{
"position": {
- "x": 1110,
+ "x": 1150,
"y": 930
}
}
@@ -674,7 +675,99 @@ tasks:
view: |-
{
"position": {
- "x": 1120,
+ "x": 1160,
+ "y": 1135
+ }
+ }
+ "23":
+ continueonerrortype: ""
+ id: "23"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "4"
+ "yes":
+ - "24"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ brandname:
+ simple: EWS v2
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Returns 'yes' if integration brand is available. Otherwise returns 'no'.
+ id: 766c0f6c-feff-49af-8bfd-62574133e61c
+ iscommand: false
+ name: Is EWS v2 enabled?
+ scriptName: IsIntegrationAvailable
+ type: condition
+ version: -1
+ taskid: 766c0f6c-feff-49af-8bfd-62574133e61c
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": -180,
+ "y": 930
+ }
+ }
+ "24":
+ continueonerrortype: ""
+ fieldMapping:
+ - incidentfield: Additional Email Addresses
+ output:
+ simple: ${EWS.ResolvedNames.email_address}
+ id: "24"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "4"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ full-contact-data:
+ simple: "False"
+ identifier:
+ complex:
+ accessor: Address
+ filters:
+ - - ignorecase: true
+ left:
+ iscontext: true
+ value:
+ simple: Account.Email.NetworkType
+ operator: isEqualString
+ right:
+ value:
+ simple: Internal
+ root: Account.Email
+ transformers:
+ - operator: uniq
+ separatecontext: false
+ skipunavailable: true
+ task:
+ brand: ""
+ description: This operation verifies aliases and matches display names to the correct mailbox user. It handles one ambiguous name at a time. If there are multiple potential matches, all will be returned, but limited to a maximum of 100 candidates.
+ id: d10b47f1-37e1-4354-870b-0ac7b022d7d3
+ iscommand: true
+ name: Get full contact info
+ script: '|||ews-resolve-name'
+ type: regular
+ version: -1
+ taskid: d10b47f1-37e1-4354-870b-0ac7b022d7d3
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": -180,
"y": 1135
}
}
@@ -693,8 +786,8 @@ view: |-
"paper": {
"dimensions": {
"height": 1255,
- "width": 1720,
- "x": -220,
+ "width": 1940,
+ "x": -400,
"y": 120
}
}
@@ -747,10 +840,10 @@ outputs:
description: The DBotScore object.
type: unknown
- contextPath: Account.Email.Username
- description: The Email account username
+ description: The Email account username.
type: string
- contextPath: Account.Email.Domain
- description: The Email account domain
+ description: The Email account domain.
type: string
- contextPath: ActiveDirectory.Users.dn
description: The user distinguished name.
@@ -827,7 +920,7 @@ outputs:
- contextPath: ActiveDirectory.Users.mail
description: The user email address.
- contextPath: Account.Email.Address
- description: The Email account full address
+ description: The Email account full address.
type: string
- contextPath: Account.Email.Distance
description: The email address distance compare to the domains in query.
@@ -856,8 +949,19 @@ outputs:
description: The type of the source of the relationship.
- contextPath: Email.Relationships.EntityBType
description: The type of the destination of the relationship.
+- contextPath: EWS.ResolvedNames
+ type: unknown
+ description: EWS resolved name primary key output.
+- contextPath: EWS.ResolvedNames.email_address
+ description: The primary SMTP address of a mailbox user.
+- contextPath: EWS.ResolvedNames.mailbox_type
+ description: The type of mailbox that is represented by the email address.
+- contextPath: EWS.ResolvedNames.name
+ description: The name of a mailbox user.
+- contextPath: EWS.ResolvedNames.routing_type
+ description: The address type for the mailbox.
tests:
- Email Address Enrichment - Generic v2.1 - Test
marketplaces:
- - xsoar
- - marketplacev2
+- xsoar
+- marketplacev2
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Endpoint_Investigation_Plan.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Endpoint_Investigation_Plan.yml
index 822c029d9f82..bd4151895605 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Endpoint_Investigation_Plan.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Endpoint_Investigation_Plan.yml
@@ -2134,7 +2134,7 @@ inputs:
playbookInputQuery:
outputs: []
tests:
-- No tests (auto formatted)
+- Endpoint Investigation Plan - Test
marketplaces:
- marketplacev2
- xpanse
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict.yml
index baa0bb3df044..2424929bfc83 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict.yml
@@ -1381,13 +1381,28 @@ inputs:
description: IP address to enrich and give verdict.
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A list of internal IP ranges to check IP addresses against. The list
- should be provided in CIDR notation, separated by commas. An example of a list
- of ranges is: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).
- If a list is not provided, will use the default list provided in the IsIPInRanges
- script (the known IPv4 private address ranges).'
+ description: 'A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges is: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: ResolveIP
value: {}
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict_README.md
index b11545db9c85..a4699e522ea9 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict_README.md
@@ -7,11 +7,11 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
* URL Enrichment - Generic v2
-* Domain Enrichment - Generic v2
-* Get prevalence for IOCs
* File Reputation
-* Account Enrichment - Generic v2.1
* IP Enrichment - Generic v2
+* Account Enrichment - Generic v2.1
+* Get prevalence for IOCs
+* Domain Enrichment - Generic v2
### Integrations
@@ -37,7 +37,7 @@ This playbook does not use any commands.
| CloseReason | The closing reason of the previous alerts to search for. Possible values are: - Resolved - Threat Handled - Resolved - True Positive - Resolved - False Positive - Resolved - Security Testing - Resolved - Known Issue - Resolved - Duplicate Incident - Resolved - Other - Resolved - Auto | Resolved - False Positive,Resolved - Duplicate Incident,Resolved - Known Issue | Optional |
| FileSHA256 | File SHA256 to enrich and give verdict. | alert.initiatorsha256 | Optional |
| IP | IP address to enrich and give verdict. | alert.hostip | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges is: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use the default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges is: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). | | Optional |
| URL | URL to enrich and give verdict. | alert.url | Optional |
| User | User to enrich and give verdict. \(AWS IAM or Active Directory\). | alert.username | Optional |
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Entity_Enrichment_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Entity_Enrichment_-_Generic_v2.yml
index 2de6cb01ceaa..ba881ccc6626 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Entity_Enrichment_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Entity_Enrichment_-_Generic_v2.yml
@@ -492,9 +492,28 @@ inputs:
description: The IP addresses to enrich
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: MD5
value:
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Entity_Enrichment_-_Generic_v2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Entity_Enrichment_-_Generic_v2_README.md
index 8b581d295f19..b48a88f22956 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Entity_Enrichment_-_Generic_v2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Entity_Enrichment_-_Generic_v2_README.md
@@ -6,13 +6,13 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* IP Enrichment - Generic v2
-* Domain Enrichment - Generic v2
-* Endpoint Enrichment - Generic v2.1
-* URL Enrichment - Generic v2
* File Enrichment - Generic v2
+* URL Enrichment - Generic v2
+* Endpoint Enrichment - Generic v2.1
+* Domain Enrichment - Generic v2
* Account Enrichment - Generic v2.1
* Email Address Enrichment - Generic v2.1
+* IP Enrichment - Generic v2
### Integrations
@@ -33,7 +33,7 @@ This playbook does not use any commands.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| IP | The IP addresses to enrich | IP.Address | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| MD5 | File MD5 to enrich | File.MD5 | Optional |
| SHA256 | File SHA256 to enrich | File.SHA256 | Optional |
| SHA1 | File SHA1 to enrich | File.SHA1 | Optional |
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Extract_Indicators_From_File_-_Generic_v2_4_5.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Extract_Indicators_From_File_-_Generic_v2_4_5.yml
index e3725a04dec8..7c0f69bc6ac6 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Extract_Indicators_From_File_-_Generic_v2_4_5.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Extract_Indicators_From_File_-_Generic_v2_4_5.yml
@@ -116,7 +116,7 @@ tasks:
{
"position": {
"x": 915,
- "y": 1410
+ "y": 1740
}
}
note: false
@@ -215,6 +215,15 @@ tasks:
right:
value:
simple: UTF-8 Unicode text
+ - operator: containsString
+ left:
+ value:
+ simple: inputs.File.Type
+ iscontext: true
+ right:
+ value:
+ simple: JSON data
+ ignorecase: true
- - operator: notContainsString
left:
value:
@@ -303,6 +312,8 @@ tasks:
transformers:
- operator: uniq
iscontext: true
+ right:
+ value: {}
continueonerrortype: ""
view: |-
{
@@ -364,6 +375,15 @@ tasks:
right:
value:
simple: UTF-8 Unicode
+ - operator: containsString
+ left:
+ value:
+ simple: inputs.File.Type
+ iscontext: true
+ right:
+ value:
+ simple: JSON data
+ ignorecase: true
- - operator: notContainsString
left:
value:
@@ -543,6 +563,7 @@ tasks:
nexttasks:
'#none#':
- "19"
+ - "25"
scriptarguments:
entryID:
complex:
@@ -585,7 +606,7 @@ tasks:
{
"position": {
"x": 1320,
- "y": 720
+ "y": 900
}
}
note: false
@@ -1054,8 +1075,8 @@ tasks:
view: |-
{
"position": {
- "x": 2150,
- "y": 720
+ "x": 2190,
+ "y": 900
}
}
note: false
@@ -1131,14 +1152,14 @@ tasks:
view: |-
{
"position": {
- "x": 2150,
- "y": 1040
+ "x": 2190,
+ "y": 1310
}
}
note: false
timertriggers: []
ignoreworker: false
- skipunavailable: false
+ skipunavailable: true
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
@@ -1463,7 +1484,7 @@ tasks:
{
"position": {
"x": 915,
- "y": 1230
+ "y": 1550
}
}
note: false
@@ -1517,7 +1538,7 @@ tasks:
view: |-
{
"position": {
- "x": -290,
+ "x": -330,
"y": 460
}
}
@@ -1546,6 +1567,7 @@ tasks:
- "10"
"yes":
- "13"
+ - "24"
separatecontext: false
conditions:
- label: "yes"
@@ -1601,7 +1623,7 @@ tasks:
view: |-
{
"position": {
- "x": 2150,
+ "x": 2190,
"y": 460
}
}
@@ -1737,10 +1759,142 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "24":
+ id: "24"
+ taskid: 4ee7917b-6813-4e61-8612-5f7116bd30eb
+ type: regular
+ task:
+ id: 4ee7917b-6813-4e61-8612-5f7116bd30eb
+ version: -1
+ name: Extract Text from QR Code
+ description: Extracts the text from a QR code. The output of this script includes the output of the script "extractIndicators" run on the text extracted from the QR code.
+ scriptName: ReadQRCode
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "19"
+ scriptarguments:
+ entry_id:
+ complex:
+ root: inputs.File
+ filters:
+ - - operator: containsGeneral
+ left:
+ value:
+ simple: inputs.File.Type
+ iscontext: true
+ right:
+ value:
+ simple: image
+ ignorecase: true
+ accessor: EntryID
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: InfoFile.EntryID
+ iscontext: true
+ - operator: uniq
+ - operator: RemoveEmpty
+ args:
+ empty_values: {}
+ remove_keys:
+ value:
+ simple: "true"
+ reputationcalc: 2
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1530,
+ "y": 1310
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "25":
+ id: "25"
+ taskid: 35ed31ed-41e9-4108-8580-e94786764ae8
+ type: regular
+ task:
+ id: 35ed31ed-41e9-4108-8580-e94786764ae8
+ version: -1
+ name: Convert PDF to Image
+ description: Converts a PDF file to an image file.
+ script: '|||rasterize-pdf'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "24"
+ scriptarguments:
+ EntryID:
+ complex:
+ root: inputs.File
+ filters:
+ - - operator: containsGeneral
+ left:
+ value:
+ simple: inputs.File.Type
+ iscontext: true
+ right:
+ value:
+ simple: pdf
+ ignorecase: true
+ - operator: containsGeneral
+ left:
+ value:
+ simple: inputs.File.Info
+ iscontext: true
+ right:
+ value:
+ simple: pdf
+ ignorecase: true
+ accessor: EntryID
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ${File(val.Info=="application/pdf").EntryID}
+ iscontext: true
+ - operator: uniq
+ - operator: RemoveEmpty
+ args:
+ empty_values: {}
+ remove_keys:
+ value:
+ simple: "true"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1320,
+ "y": 1130
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {
- "13_19_#default#": 0.14,
+ "13_19_#default#": 0.11,
"15_10_#default#": 0.11,
"15_16_yes": 0.38,
"1_10_#default#": 0.17,
@@ -1749,15 +1903,15 @@ view: |-
"23_22_yes": 0.43,
"5_10_#default#": 0.33,
"5_6_yes": 0.36,
- "7_10_#default#": 0.15,
- "7_8_yes": 0.38,
+ "7_10_#default#": 0.1,
+ "7_8_yes": 0.24,
"9_10_#default#": 0.18,
"9_11_yes": 0.35
},
"paper": {
"dimensions": {
- "height": 1485,
- "width": 3085,
+ "height": 1815,
+ "width": 3125,
"x": -555,
"y": -10
}
@@ -2004,11 +2158,24 @@ outputs:
- contextPath: DBotScore.Score
description: The actual score.
type: number
+- contextPath: QRCodeReader
+ description: The QR code reader primary key object.
+ type: unknown
+- contextPath: QRCodeReader.Text
+ description: The raw text extracted from the QR code image.
+ type: String
+- contextPath: QRCodeReader.Domain
+ description: The domains extracted from the QR code image if they are present.
+ type: String
+- contextPath: QRCodeReader.URL
+ description: The URLs extracted from the QR code image if they are present.
+ type: String
+- contextPath: QRCodeReader.IP
+ description: The IPs extracted from the QR code image if they are present.
+ type: String
tests:
- Extract Indicators From File - Generic v2 - Test
fromversion: 5.0.0
-marketplaces:
-- xsoar
-- marketplacev2
contentitemexportablefields:
contentitemfields: {}
+system: true
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Extract_Indicators_From_File_-_Generic_v2_4_5_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Extract_Indicators_From_File_-_Generic_v2_4_5_README.md
index c3d3f57985e4..8122ca1f3b91 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Extract_Indicators_From_File_-_Generic_v2_4_5_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Extract_Indicators_From_File_-_Generic_v2_4_5_README.md
@@ -33,11 +33,11 @@ This playbook does not use any integrations.
### Scripts
-* ExtractIndicatorsFromTextFile
-* ConvertFile
* ReadPDFFileV2
* ExtractIndicatorsFromWordFile
+* ExtractIndicatorsFromTextFile
* SetAndHandleEmpty
+* ConvertFile
### Commands
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-File_Enrichment_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-File_Enrichment_-_Generic_v2.yml
index 399308184ce6..7976be079994 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-File_Enrichment_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-File_Enrichment_-_Generic_v2.yml
@@ -5,7 +5,7 @@ description: |-
Enrich a file using one or more integrations.
- Provide threat information
- - File Reputation using !file command
+ - Determine file reputation using the !file command
starttaskid: "0"
tasks:
"0":
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-File_Enrichment_-_Generic_v2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-File_Enrichment_-_Generic_v2_README.md
index fd69e2e8fdc1..4980dd0ab6d9 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-File_Enrichment_-_Generic_v2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-File_Enrichment_-_Generic_v2_README.md
@@ -1,7 +1,7 @@
Enrich a file using one or more integrations.
- Provide threat information
-- File Reputation using !file command
+- Determine file reputation using the !file command
## Dependencies
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-GenericPolling.yml b/Packs/CommonPlaybooks/Playbooks/playbook-GenericPolling.yml
index da4b88c302c6..b1aa868400af 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-GenericPolling.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-GenericPolling.yml
@@ -77,6 +77,8 @@ tasks:
simple: ${inputs.PollingCommandArgName}
timeout:
simple: ${inputs.Timeout}
+ extractMode:
+ simple: ${inputs.ExtractMode}
separatecontext: false
view: |-
{
@@ -292,6 +294,11 @@ inputs:
required: false
description: "Values of the additional arguments for the polling command, for example: (value1,value2,...)."
playbookInputQuery:
+- key: ExtractMode
+ value: {}
+ required: false
+ description: Indicator Extraction mode for the command sequence. (In XSOAR 8 and above, for first run command set manually the RunPollingCommand task in the playbook, Advanced -> Indicator Extraction mode).
+ playbookInputQuery:
outputs: []
tests:
- No test
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-GenericPolling_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-GenericPolling_README.md
index 7fbf52fa3477..1dd6528e3c2a 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-GenericPolling_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-GenericPolling_README.md
@@ -39,6 +39,7 @@ This playbook does not use any commands.
| dt | The DT filter for polling IDs. Polling will stop when no results are returned. Use single quotes, for example: WildFire.Report(val.Status!=='Success').SHA256. | - | Required |
| AdditionalPollingCommandArgNames | The names of additional arguments for the polling command. For example, "arg1,arg2,...". | - | Optional |
| AdditionalPollingCommandArgValues | The values of the additional arguments for the polling command. For example, "value1,value2,...". | - | Optional |
+| ExtractMode | Indicator Extraction mode for the command sequence. (In XSOAR 8 and above, for first run command set manually the RunPollingCommand task in the playbook, Advanced -> Indicator Extraction mode). | Optional |
## Playbook Outputs
---
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_External_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_External_-_Generic_v2.yml
index b3026ced6cb5..9833fdfa20c3 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_External_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_External_-_Generic_v2.yml
@@ -785,9 +785,27 @@ inputs:
playbookInputQuery:
- key: InternalRange
value:
- simple: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16"
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A CSV list of IP address ranges (in CIDR notation). Use this list to check if an IP address is found within a set of IP address ranges. For example: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A comma-separated list of IP address ranges (in CIDR notation). Use this list to check if an IP address is found within a set of IP address ranges. For example: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: ResolveIP
value:
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_External_-_Generic_v2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_External_-_Generic_v2_README.md
index bbe9a834dd74..08f6fe7afcb1 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_External_-_Generic_v2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_External_-_Generic_v2_README.md
@@ -24,8 +24,8 @@ This playbook does not use any sub-playbooks.
### Commands
-* ip
* vt-private-get-ip-report
+* ip
## Playbook Inputs
@@ -34,12 +34,12 @@ This playbook does not use any sub-playbooks.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| IP | The IP address to enrich. | IP.Address | Optional |
-| InternalRange | A CSV list of IP address ranges \(in CIDR notation\). Use this list to check if an IP address is found within a set of IP address ranges. For example: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16 | Optional |
+| InternalRange | A comma-separated list of IP address ranges \(in CIDR notation\). Use this list to check if an IP address is found within a set of IP address ranges. For example: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| ResolveIP | Whether to convert the IP address to a hostname using a DNS query \(True/False\). The default value is true. | True | Required |
| UseReputationCommand | Define if you would like to use the \!IP command. Note: This input should be used whenever there is no auto-extract enabled in the investigation flow. Possible values: True / False. The default value is false. | False | Required |
| extended_data | Define whether you want the generic reputation command to return extended data \(last_analysis_results\). Possible values: True / False. The default value is false. | False | Optional |
| threat_model_association | Define whether you wish to enhance generic reputation command to include additional information such as Threat Bulletins, Attack patterns, Actors, Campaigns, TTPs, vulnerabilities, etc. Note: If set to true, additional 6 API calls will be performed. Possible values: True / False. The default value is false. | False | Optional |
-| ExecutedFromParent | Whether to execute common logic, like the classification of IP addresses to ranges and resolving, in the the main \(IP Enrichment - Generic v2\) enrichment playbook, instead of in the sub-playbooks. Setting this to True will execute the relevant commands in the main playbook instead of executing them in both sub-playbooks.
Set this to True in the parent playbook if you are using the parent playbook, as opposed to using the sub-playbooks directly in your playbooks, as this will improve the performance of the playbook and reduce the overfall size of the incident. | False | Optional |
+| ExecutedFromParent | Whether to execute common logic, like the classification of IP addresses to ranges and resolving, in the the main \(IP Enrichment - Generic v2\) enrichment playbook, instead of the sub-playbooks. Possible values are: True, False.
Setting this to True and using the parent playbook will execute the relevant commands in the main playbook instead of executing them in both sub-playbooks, which improves the performance of the playbook and reducing the overall size of the incident. | False | Optional |
## Playbook Outputs
@@ -52,7 +52,7 @@ This playbook does not use any sub-playbooks.
| Endpoint | The endpoint's object. | unknown |
| Endpoint.Hostname | The hostname to enrich. | string |
| Endpoint.IP | A list of endpoint IP addresses. | string |
-| IP.Address | The IP address. | string |
+| IP.Address | The IP Address. | string |
| IP.InRange | Is the IP in the input ranges? \(could be 'yes' or 'no\). | string |
| DBotScore.Indicator | The indicator that was tested. | string |
| DBotScore.Type | The indicator type. | string |
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Generic_v2.yml
index 8b0514f90de0..7ad16956db0a 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Generic_v2.yml
@@ -7,7 +7,7 @@ description: |-
- Resolve IP addresses to hostnames (DNS)
- Provide threat information
- - IP address reputation using !ip command
+ - Determine IP address reputation using the !ip command
- Separate internal and external IP addresses
- For internal IP addresses, get host information.
@@ -587,9 +587,27 @@ inputs:
playbookInputQuery:
- key: InternalRange
value:
- simple: 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: ResolveIP
value:
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Generic_v2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Generic_v2_README.md
index b2cb48c3c444..d76874f1952b 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Generic_v2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Generic_v2_README.md
@@ -2,10 +2,12 @@ Enrich IP addresses using one or more integrations.
- Resolve IP addresses to hostnames (DNS)
- Provide threat information
-- IP address reputation using !ip command
+- Determine IP address reputation using the !ip command
- Separate internal and external IP addresses
- For internal IP addresses, get host information.
+When executing this playbook through IP Enrichment - Generic v2, IP classification and resolution will be handled by the main playbook, improving performance.
+
## Dependencies
This playbook uses the following sub-playbooks, integrations, and scripts.
@@ -21,8 +23,8 @@ This playbook does not use any integrations.
### Scripts
-* IsIPInRanges
* IPToHost
+* IsIPInRanges
### Commands
@@ -35,12 +37,12 @@ This playbook does not use any commands.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| IP | The IP address to enrich. | IP.Address | Optional |
-| InternalRange | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16 | Optional |
-| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). The default value is true. | True | Required |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
+| ResolveIP | Determines whether to convert the IP address to a hostname using a DNS query \(True/ False\). The default value is true. | False | Required |
| UseReputationCommand | Define if you would like to use the \!IP command. Note: This input should be used whenever there is no auto-extract enabled in the investigation flow. Possible values: True / False. The default value is false. | False | Required |
| extended_data | Define whether you want the generic reputation command to return extended data \(last_analysis_results\). Possible values: True / False. The default value is false. | False | Optional |
| threat_model_association | Define whether you wish to enhance generic reputation command to include additional information such as Threat Bulletins, Attack patterns, Actors, Campaigns, TTPs, vulnerabilities, etc. Note: If set to true, additional 6 API calls will be performed. Possible values: True / False. The default value is false. | False | Optional |
-| ExecutedFromParent | Whether to execute common logic, like the classification of IP addresses to ranges and resolving, in the the main \(IP Enrichment - Generic v2\) enrichment playbook, instead of in the sub-playbooks. Setting this to True will execute the relevant commands in the main playbook instead of executing them in both sub-playbooks.
Set this to True in the parent playbook if you are using the parent playbook, as opposed to using the sub-playbooks directly in your playbooks, as this will improve the performance of the playbook and reduce the overfall size of the incident. | True | Optional |
+| ExecutedFromParent | Whether to execute common logic, like the classification of IP addresses to ranges and resolving, in the the main \(IP Enrichment - Generic v2\) enrichment playbook, instead of in the sub-playbooks. Possible values are: True, False.
Setting this to True will execute the relevant commands in the main playbook instead of executing them in both sub-playbooks.
Set this to True in the parent playbook if you are using the parent playbook, as opposed to using the sub-playbooks directly in your playbooks, as this will improve the performance of the playbook and reduce the overfall size of the incident. | True | Optional |
## Playbook Outputs
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Internal_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Internal_-_Generic_v2.yml
index 1fcbc2b5d1aa..52501ecaadd1 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Internal_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Internal_-_Generic_v2.yml
@@ -554,9 +554,27 @@ inputs:
playbookInputQuery:
- key: InternalRange
value:
- simple: inputs.InternalRange
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: "A CSV list of IP address ranges (in CIDR notation). Use this list to check if an IP address is found within a set of IP address ranges. \nFor example: \"172.16.0.0/12,10.0.0.0/8,192.168.0.0/16\" (without quotes). If no list is provided, will use default list provided in the IsIPInRanges script (the known IPv4 private address ranges)."
+ description: "A comma-separated list of IP address ranges (in CIDR notation). Use this list to check if an IP address is found within a set of IP address ranges. \nFor example: \"172.16.0.0/12,10.0.0.0/8,192.168.0.0/16\" (without quotes)."
playbookInputQuery:
- key: ResolveIP
value:
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Internal_-_Generic_v2_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Internal_-_Generic_v2_README.md
index 63fa641dd8c3..cb7c271e2d1f 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Internal_-_Generic_v2_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-IP_Enrichment_-_Internal_-_Generic_v2_README.md
@@ -18,8 +18,8 @@ This playbook does not use any integrations.
### Scripts
-* IsIPInRanges
* IPToHost
+* IsIPInRanges
### Commands
@@ -32,9 +32,9 @@ This playbook does not use any commands.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| IP | The IP address to enrich. | IP.Address | Optional |
-| InternalRange | A CSV list of IP address ranges \(in CIDR notation\). Use this list to check if an IP address is found within a set of IP address ranges. For example: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If no list is provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16 | Optional |
-| ResolveIP | Whether to convert the IP address to a hostname using a DNS query \(True/False\). The default value is true. | True | Required |
-| ExecutedFromParent | Whether to execute common logic, like the classification of IP addresses to ranges and resolving, in the the main \(IP Enrichment - Generic v2\) enrichment playbook, instead of in the sub-playbooks. Setting this to True will execute the relevant commands in the main playbook instead of executing them in both sub-playbooks.
Set this to True in the parent playbook if you are using the parent playbook, as opposed to using the sub-playbooks directly in your playbooks, as this will improve the performance of the playbook and reduce the overfall size of the incident. | False | Optional |
+| InternalRange | A comma-separated list of IP address ranges \(in CIDR notation\). Use this list to check if an IP address is found within a set of IP address ranges. For example: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
+| ResolveIP | Whether to convert the IP address to a hostname using a DNS query \(True/False\). The default value is true. | inputs.ResolveIP | Required |
+| ExecutedFromParent | Whether to execute common logic, like the classification of IP addresses to ranges and resolving, in the the main \(IP Enrichment - Generic v2\) enrichment playbook, instead of the sub-playbooks.
Possible values are: True, False. Setting this to True will execute the relevant commands in the main playbook instead of executing them in both sub-playbooks.
Set this to True in the parent playbook if you are using the parent playbook, as opposed to using the sub-playbooks directly in your playbooks, as this will improve the performance of the playbook and reduce the overall size of the incident. | False | Optional |
| Hostnames | Hostnames to enrich. If the ExecutedFromParent playbook is set to True in the IP - Enrichment - Generic v2 playbook, and an internal IP resolves to an endpoint hostname that you want to enrich, the hostnames defined here will be used. | | Optional |
## Playbook Outputs
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Threat_Hunting_-_Generic_6_5.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Threat_Hunting_-_Generic_6_5.yml
index ca41e10367c2..1e9a42f286c5 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Threat_Hunting_-_Generic_6_5.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Threat_Hunting_-_Generic_6_5.yml
@@ -3,7 +3,7 @@ version: -1
contentitemexportablefields:
contentitemfields: {}
name: Threat Hunting - Generic
-description: "This playbook enables threat hunting for IOCs in your enterprise. It currently supports the following integrations: \n- Splunk\n- Qradar\n- Pan-os \n- Cortex data lake \n- Autofocus\n- Microsoft 365 Defender"
+description: "This playbook enables threat hunting for IOCs in your enterprise. It currently supports the following integrations: \n- Splunk\n- Qradar\n- Pan-os \n- Cortex Data Lake \n- Autofocus\n- Microsoft 365 Defender"
starttaskid: "0"
tasks:
"0":
@@ -375,9 +375,28 @@ inputs:
description: Domain or URL to search. Can be a single domain or URL or an array of domains or URLs to search. By default, the LIKE clause is used.
playbookInputQuery:
- key: InternalRange
- value: {}
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, uses the default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: InternalDomainName
value: {}
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Threat_Hunting_-_Generic_6_5_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Threat_Hunting_-_Generic_6_5_README.md
index 1f171b12fadc..f2a6f93a19d8 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Threat_Hunting_-_Generic_6_5_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Threat_Hunting_-_Generic_6_5_README.md
@@ -2,7 +2,7 @@ This playbook enables threat hunting for IOCs in your enterprise. It currently s
- Splunk
- Qradar
- Pan-os
-- Cortex data lake
+- Cortex Data Lake
- Autofocus
- Microsoft 365 Defender
@@ -12,10 +12,10 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Microsoft 365 Defender - Threat Hunting Generic
* Splunk Indicator Hunting
-* Palo Alto Networks - Hunting And Threat Detection
* QRadar Indicator Hunting V2
+* Microsoft 365 Defender - Threat Hunting Generic
+* Palo Alto Networks - Hunting And Threat Detection
### Integrations
@@ -40,7 +40,7 @@ This playbook does not use any commands.
| SHA1 | The SHA1 hash file or an array of hashes to search. | | Optional |
| IPAddress | The source or destination IP address to search. Can be a single address or an array of list of addresses. | | Optional |
| URLDomain | Domain or URL to search. Can be a single domain or URL or an array of domains or URLs to search. By default, the LIKE clause is used. | | Optional |
-| InternalRange | A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, uses the default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | | Optional |
+| InternalRange | A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| InternalDomainName | The organization's internal domain name. This is provided for the script IsInternalHostName that checks if the detected hostnames are internal or external, if the hosts contain the internal domains suffix. For example, paloaltonetworks.com. If there is more than one domain, use the \| character to separate values such as \(paloaltonetworks.com\|test.com\). | | Optional |
| InternalHostRegex | Provided for the script IsInternalHostName that checks if the detected host names are internal or external, if the hosts match the organization's naming convention. For example, the host testpc1 will have the following regex \\w\{6\}\\d\{1\} | | Optional |
| QRadarTimeFrame | The time frame to search in QRadar. | LAST 7 DAYS | Optional |
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-User_Investigation_-_Generic.yml b/Packs/CommonPlaybooks/Playbooks/playbook-User_Investigation_-_Generic.yml
index 728a9db8adec..ca8aee4c67a4 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-User_Investigation_-_Generic.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-User_Investigation_-_Generic.yml
@@ -756,7 +756,7 @@ tasks:
description: |-
This playbook searches for failed logon on a specific user by querying logs from different sources.
- Supported Integrations:
+ Supported integrations:
-Splunk
-Qradar
-Azure Log Analytics
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-User_Investigation_-_Generic_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-User_Investigation_-_Generic_README.md
index 2d46f41659a4..273d4e283e6c 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-User_Investigation_-_Generic_README.md
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-User_Investigation_-_Generic_README.md
@@ -14,10 +14,10 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Okta - User Investigation
-* Cortex XDR - Get entity alerts by MITRE tactics
* Get entity alerts by MITRE tactics
+* Okta - User Investigation
* SIEM - Search for Failed logins
+* Cortex XDR - Get entity alerts by MITRE tactics
### Integrations
@@ -27,6 +27,7 @@ This playbook does not use any integrations.
* Set
* CountArraySize
+* SetAndHandleEmpty
* MathUtil
### Commands
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_14.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_14.md
new file mode 100644
index 000000000000..8de82ff6685c
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_14.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Convert file hash to corresponding hashes
+
+Added local search for hashes in Cortex XSOAR before the enrichment.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_15.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_15.md
new file mode 100644
index 000000000000..0162983629ac
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_15.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Extract Indicators From File - Generic v2
+
+- Added a new filter for json file.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_16.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_16.md
new file mode 100644
index 000000000000..64cf48a525b8
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_16.md
@@ -0,0 +1,10 @@
+
+#### Playbooks
+
+##### Block Email - Generic v2
+Removed the command ***netcraft-report-attack***. This command has been deprecated and has reached its end-of-life (EOL) date.
+The playbook will use the command ***netcraft-attack-report*** instead.
+
+##### Block URL - Generic v2
+Removed the command ***cisco-email-security-list-entry-add***. This command has been deprecated and has reached its end-of-life (EOL) date.
+The playbook will use the command ***cisco-sma-list-entry-append*** from the integration 'CiscoSMA' instead.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_17.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_17.md
new file mode 100644
index 000000000000..24465281dca9
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_17.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Extract Indicators From File - Generic v2
+
+- Added the ability to extract text from QR code images.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_18.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_18.md
new file mode 100644
index 000000000000..0839847f9ca2
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_18.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Endpoint Investigation Plan
+
+Added a Test playbook.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_19.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_19.md
new file mode 100644
index 000000000000..5b39ae938f78
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_19.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Entity Enrichment - Generic v3
+
+- Added a new playbook input for providing the user's domain name for the account enrichment sub-playbook.
\ No newline at end of file
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_20.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_20.md
new file mode 100644
index 000000000000..2693a6765bd8
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_20.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Email Address Enrichment - Generic v2.1
+
+- Added an enrichment task using EWS for retrieving a primary email address.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_21.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_21.md
new file mode 100644
index 000000000000..94458b7054fc
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_21.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### GenericPolling
+
+- Added the **ExtractMode** argument to propagating `auto-extract` to subsequent executions of the command.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_22.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_22.md
new file mode 100644
index 000000000000..cfbf89d3d213
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_22.md
@@ -0,0 +1,21 @@
+
+#### Playbooks
+
+##### Threat Hunting - Generic
+
+Updated the playbook description.
+##### Command-Line Analysis
+
+Updated the playbook description.
+##### File Enrichment - Generic v2
+
+Updated the playbook description.
+##### Account Enrichment - Generic v2.1
+
+Updated the playbook description.
+##### IP Enrichment - Generic v2
+
+Updated the playbook description.
+##### User Investigation - Generic
+
+Updated the playbook description.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_23.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_23.md
new file mode 100644
index 000000000000..12a492c8026c
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_23.md
@@ -0,0 +1,38 @@
+
+#### Playbooks
+
+##### Enrichment for Verdict
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Entity Enrichment - Generic v2
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### IP Enrichment - External - Generic v2
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### IP Enrichment - Internal - Generic v2
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### IP Enrichment - Generic v2
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Threat Hunting - Generic
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Entity Enrichment - Generic v3
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Block IP - Generic v3
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Block Indicators - Generic v3
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_24.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_24.md
new file mode 100644
index 000000000000..c4e81a611b0d
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_24.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Block Account - Generic v2
+
+Added new tasks to verify the existence of the user for both the Microsoft Active Directory integration and Microsoft Graph User integration.
\ No newline at end of file
diff --git a/Packs/CommonPlaybooks/TestPlaybooks/playbook-Containment_Plan-Test.yml b/Packs/CommonPlaybooks/TestPlaybooks/playbook-Containment_Plan-Test.yml
index 9d22cf34646b..cc2a289b80e0 100644
--- a/Packs/CommonPlaybooks/TestPlaybooks/playbook-Containment_Plan-Test.yml
+++ b/Packs/CommonPlaybooks/TestPlaybooks/playbook-Containment_Plan-Test.yml
@@ -28,7 +28,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -3180
+ "y": -3710
}
}
note: false
@@ -56,7 +56,7 @@ tasks:
{
"position": {
"x": 60,
- "y": 390
+ "y": 710
}
}
note: false
@@ -81,7 +81,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "167"
+ - "183"
scriptarguments:
key:
simple: Users
@@ -93,7 +93,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -850
+ "y": -1050
}
}
note: false
@@ -132,7 +132,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -1190
+ "y": -1390
}
}
note: false
@@ -169,7 +169,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -1370
+ "y": -1570
}
}
note: false
@@ -206,7 +206,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -330
+ "y": -10
}
}
note: false
@@ -243,7 +243,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -490
+ "y": -190
}
}
note: false
@@ -282,7 +282,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -145
+ "y": 175
}
}
note: false
@@ -300,6 +300,7 @@ tasks:
id: 37a2166b-7580-4ffe-8017-4d4f8d67f264
version: -1
name: Verify containment action succeed
+ description: Checks if containment action succeeds.
type: condition
iscommand: false
brand: ""
@@ -331,7 +332,7 @@ tasks:
{
"position": {
"x": 60,
- "y": 40
+ "y": 360
}
}
note: false
@@ -366,7 +367,7 @@ tasks:
{
"position": {
"x": 310,
- "y": 220
+ "y": 540
}
}
note: false
@@ -408,8 +409,8 @@ tasks:
view: |-
{
"position": {
- "x": 60,
- "y": -1540
+ "x": 330,
+ "y": -1740
}
}
note: false
@@ -440,7 +441,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -3040
+ "y": -3570
}
}
note: false
@@ -471,7 +472,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -2590
+ "y": -3120
}
}
note: false
@@ -527,7 +528,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -2750
+ "y": -3280
}
}
note: false
@@ -552,7 +553,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "154"
+ - "185"
scriptarguments:
AutoBlockIndicators:
simple: "True"
@@ -602,7 +603,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -670
+ "y": -560
}
}
note: false
@@ -639,7 +640,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -1020
+ "y": -1220
}
}
note: false
@@ -676,7 +677,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -2280
+ "y": -2810
}
}
note: false
@@ -716,7 +717,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -2910
+ "y": -3440
}
}
note: false
@@ -756,7 +757,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -2460
+ "y": -2990
}
}
note: false
@@ -782,7 +783,7 @@ tasks:
'#default#':
- "176"
"yes":
- - "177"
+ - "178"
separatecontext: false
conditions:
- label: "yes"
@@ -809,7 +810,7 @@ tasks:
{
"position": {
"x": 60,
- "y": -2100
+ "y": -2630
}
}
note: false
@@ -834,7 +835,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "177"
+ - "178"
scriptarguments:
message:
simple: "Please check the endpoint status. \nNote: The Endpoint's alias is `TestPlaybook`"
@@ -844,7 +845,7 @@ tasks:
{
"position": {
"x": 320,
- "y": -1920
+ "y": -2450
}
}
note: false
@@ -854,39 +855,225 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "177":
- id: "177"
- taskid: 6f4a3263-a211-4ff7-86eb-a849c9954b37
+ "178":
+ id: "178"
+ taskid: 524e9012-1a1c-4364-8b8c-581452c0e2fa
type: regular
task:
- id: 6f4a3263-a211-4ff7-86eb-a849c9954b37
+ id: 524e9012-1a1c-4364-8b8c-581452c0e2fa
version: -1
- name: Delete file
- description: Initiates a new endpoint script execution to delete the specified file.
- script: '|||core-run-script-delete-file'
+ name: Check if the file exist
+ description: Initiates a new endpoint script execution to check if file exists.
+ script: '|||core-run-script-file-exists'
type: regular
iscommand: true
brand: ""
nexttasks:
'#none#':
- - "162"
+ - "181"
scriptarguments:
endpoint_ids:
- complex:
- root: Core.Endpoint
- accessor: endpoint_id
+ simple: ${Core.Endpoint.endpoint_id}
file_path:
simple: C:\Users\administrator\Downloads\fortestplaybook.txt
- timeout_in_seconds:
- simple: "60"
separatecontext: false
- continueonerror: true
continueonerrortype: ""
view: |-
{
"position": {
"x": 60,
- "y": -1720
+ "y": -2270
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "181":
+ id: "181"
+ taskid: 33ab5dde-4cc0-44a3-82c7-a5ad9a2459ee
+ type: regular
+ task:
+ id: 33ab5dde-4cc0-44a3-82c7-a5ad9a2459ee
+ version: -1
+ name: Check script result
+ description: Retrieve the results of a script execution action.
+ script: '|||core-get-script-execution-results'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "182"
+ scriptarguments:
+ action_id:
+ simple: ${Core.ScriptRun.action_id}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 60,
+ "y": -2090
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "182":
+ id: "182"
+ taskid: 9e43d19a-9964-4209-8327-475c92492176
+ type: condition
+ task:
+ id: 9e43d19a-9964-4209-8327-475c92492176
+ version: -1
+ name: Does the file exist?
+ description: Check if the file exists.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "162"
+ "yes":
+ - "151"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: Core.ScriptResult.results.command_output
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 60,
+ "y": -1920
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "183":
+ id: "183"
+ taskid: 731f27cd-ac92-4ac0-8326-95bf9edc3f08
+ type: regular
+ task:
+ id: 731f27cd-ac92-4ac0-8326-95bf9edc3f08
+ version: -1
+ name: Create a new file indicator
+ description: commands.local.cmd.new.indicator
+ script: Builtin|||createNewIndicator
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "184"
+ scriptarguments:
+ tags:
+ simple: File
+ value:
+ simple: 724e1e4079191f086c3a2bcc15133b4309ce5125dbb31c5443262599ca992601
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 60,
+ "y": -890
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "184":
+ id: "184"
+ taskid: a484b3ff-8604-4e19-81cf-5fbb33e58440
+ type: regular
+ task:
+ id: a484b3ff-8604-4e19-81cf-5fbb33e58440
+ version: -1
+ name: Create a new domain indicator
+ description: commands.local.cmd.new.indicator
+ script: Builtin|||createNewIndicator
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "167"
+ scriptarguments:
+ tags:
+ simple: Domain
+ value:
+ simple: nonofyourbuisness.io
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 60,
+ "y": -730
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "185":
+ id: "185"
+ taskid: b0004f6e-5424-4d03-82e2-85f19a05a803
+ type: condition
+ task:
+ id: b0004f6e-5424-4d03-82e2-85f19a05a803
+ version: -1
+ name: Is Active Directory Integration Enabled?
+ description: Returns 'yes' if integration brand is available. Otherwise returns 'no'
+ scriptName: IsIntegrationAvailable
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "153"
+ "yes":
+ - "154"
+ scriptarguments:
+ brandname:
+ simple: Active Directory Query v2
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 60,
+ "y": -385
}
}
note: false
@@ -903,10 +1090,10 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 3635,
- "width": 640,
+ "height": 4485,
+ "width": 650,
"x": 60,
- "y": -3180
+ "y": -3710
}
}
}
diff --git a/Packs/CommonPlaybooks/TestPlaybooks/playbook-Endpoint_Investigation_Plan_-_Test.yml b/Packs/CommonPlaybooks/TestPlaybooks/playbook-Endpoint_Investigation_Plan_-_Test.yml
new file mode 100644
index 000000000000..65b943bcfa98
--- /dev/null
+++ b/Packs/CommonPlaybooks/TestPlaybooks/playbook-Endpoint_Investigation_Plan_-_Test.yml
@@ -0,0 +1,1076 @@
+id: Endpoint Investigation Plan - Test
+version: -1
+name: Endpoint Investigation Plan - Test
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 6d9d6345-8a8c-4699-84a4-3b6fcd935541
+ type: start
+ task:
+ id: 6d9d6345-8a8c-4699-84a4-3b6fcd935541
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "7"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": -670
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 9d3ed560-5a57-4266-8a53-7c3f366fcd5c
+ type: playbook
+ task:
+ id: 9d3ed560-5a57-4266-8a53-7c3f366fcd5c
+ version: -1
+ name: Endpoint Investigation Plan
+ description: "This playbook handles all the endpoint investigation actions by performing the following tasks on every alert associated with the alert:\n* Pre-defined MITRE Tactics\n* Host fields (Host ID)\n* Attacker fields (Attacker IP, External host)\n* MITRE techniques\n* File hash (currently, the playbook supports only SHA256) \n\nNote: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
+ playbookName: Endpoint Investigation Plan
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ HuntCnCTechniques:
+ simple: "True"
+ HuntCollectionTechniques:
+ simple: "True"
+ HuntDefenseEvasionTechniques:
+ simple: "True"
+ HuntDiscoveryTechniques:
+ simple: "True"
+ HuntExecutionTechniques:
+ simple: "True"
+ HuntImpactTechniques:
+ simple: "True"
+ HuntInitialAccessTechniques:
+ simple: "True"
+ HuntLateralMovementTechniques:
+ simple: "True"
+ HuntPersistenceTechniques:
+ simple: "True"
+ HuntPrivilegeEscalationTechniques:
+ simple: "True"
+ HuntReconnaissanceTechniques:
+ simple: "True"
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": -210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: 7642a09f-40a8-4cec-84bc-5527301c33ba
+ type: playbook
+ task:
+ id: 7642a09f-40a8-4cec-84bc-5527301c33ba
+ version: -1
+ name: Endpoint Investigation Plan
+ description: "This playbook handles all the endpoint investigation actions by performing the following tasks on every alert associated with the alert:\n* Pre-defined MITRE Tactics\n* Host fields (Host ID)\n* Attacker fields (Attacker IP, External host)\n* MITRE techniques\n* File hash (currently, the playbook supports only SHA256) \n\nNote: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
+ playbookName: Endpoint Investigation Plan
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "17"
+ scriptarguments:
+ HuntAttacker:
+ simple: "True"
+ HuntCnCTechniques:
+ simple: "False"
+ HuntCollectionTechniques:
+ simple: "False"
+ HuntDefenseEvasionTechniques:
+ simple: "False"
+ HuntDiscoveryTechniques:
+ simple: "False"
+ HuntExecutionTechniques:
+ simple: "False"
+ HuntImpactTechniques:
+ simple: "False"
+ HuntInitialAccessTechniques:
+ simple: "False"
+ HuntLateralMovementTechniques:
+ simple: "False"
+ HuntPersistenceTechniques:
+ simple: "False"
+ HuntPrivilegeEscalationTechniques:
+ simple: "False"
+ HuntReconnaissanceTechniques:
+ simple: "False"
+ attackerRemoteIP:
+ complex:
+ root: alert
+ accessor: hostip
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 1250
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: 8b7a467c-49d7-4dc9-8612-6d28c09f0663
+ type: regular
+ task:
+ id: 8b7a467c-49d7-4dc9-8612-6d28c09f0663
+ version: -1
+ name: Clear Context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "4"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 100
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: 48987d43-8e32-463a-8f6e-ba3ec4a325d8
+ type: regular
+ task:
+ id: 48987d43-8e32-463a-8f6e-ba3ec4a325d8
+ version: -1
+ name: Get Endpoints
+ description: Gets a list of endpoints, according to the passed filters. If there are no filters, all endpoints are returned. Filtering by multiple fields will be concatenated using AND condition (OR is not supported). Maximum result set size is 100. Offset is the zero-based number of endpoint from the start of the result set (start by counting from 0).
+ script: '|||core-get-endpoints'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ alias_name:
+ simple: TestPlaybook
+ status:
+ simple: connected
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 270
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "5":
+ id: "5"
+ taskid: 53250c44-c54e-4d35-8627-96c39ea971b0
+ type: condition
+ task:
+ id: 53250c44-c54e-4d35-8627-96c39ea971b0
+ version: -1
+ name: Is the EndpointID defined?
+ description: Checks if the EndpointID is defined.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "6"
+ "yes":
+ - "10"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: Core.Endpoint
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: Core.Endpoint.alias
+ iscontext: true
+ right:
+ value:
+ simple: TestPlaybook
+ ignorecase: true
+ accessor: endpoint_id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 440
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: 5f33ec12-8a2c-4506-810c-b7c8c44eca95
+ type: regular
+ task:
+ id: 5f33ec12-8a2c-4506-810c-b7c8c44eca95
+ version: -1
+ name: Make test fail
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ message:
+ simple: "Please check the endpoint status. \nNote: The Endpoint's alias is `TestPlaybook`"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 270,
+ "y": 610
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: d6d73e4a-29f4-46c1-8fbb-ff6bed139b63
+ type: title
+ task:
+ id: d6d73e4a-29f4-46c1-8fbb-ff6bed139b63
+ version: -1
+ name: Test without inputs
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "14"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": -535
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: 4f801f45-a5d4-4208-80d0-b53b3bef3e85
+ type: title
+ task:
+ id: 4f801f45-a5d4-4208-80d0-b53b3bef3e85
+ version: -1
+ name: Test with inputs
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "3"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": -40
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: 42e1da2b-de1b-4d63-89b0-3c788ffc9345
+ type: regular
+ task:
+ id: 42e1da2b-de1b-4d63-89b0-3c788ffc9345
+ version: -1
+ name: Set alert hostip
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setAlert
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "11"
+ scriptarguments:
+ hostip:
+ simple: 1.1.1.1,2.2.2.2
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 780
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: 668483fc-2c36-417e-83e8-4876b6f40acb
+ type: regular
+ task:
+ id: 668483fc-2c36-417e-83e8-4876b6f40acb
+ version: -1
+ name: Set alert initiatorsha256
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setAlert
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "15"
+ scriptarguments:
+ initiatorsha256:
+ simple: fffee99d8b6a3d291eea8cc3441132f721101378c75eadf92fa49fe891845364,fffee99d8b6a3d291eea8cc3441132f721101378c75eadf92fa49fe891845365
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 960
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 244f6d3d-c8c3-4c96-8570-23871a0ba371
+ type: title
+ task:
+ id: 244f6d3d-c8c3-4c96-8570-23871a0ba371
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 2930
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "14":
+ id: "14"
+ taskid: 4330786a-98be-454b-8f16-0dc27909137a
+ type: regular
+ task:
+ id: 4330786a-98be-454b-8f16-0dc27909137a
+ version: -1
+ name: Clear Context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "1"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": -390
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "15":
+ id: "15"
+ taskid: 7e9623f3-3c45-4cb4-8f66-da32403f7c8b
+ type: title
+ task:
+ id: 7e9623f3-3c45-4cb4-8f66-da32403f7c8b
+ version: -1
+ name: Test with ip
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "2"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 1120
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "16":
+ id: "16"
+ taskid: 08b335c0-9ca9-4fd4-8bde-d6b87d277130
+ type: playbook
+ task:
+ id: 08b335c0-9ca9-4fd4-8bde-d6b87d277130
+ version: -1
+ name: Endpoint Investigation Plan
+ description: "This playbook handles all the endpoint investigation actions by performing the following tasks on every alert associated with the alert:\n* Pre-defined MITRE Tactics\n* Host fields (Host ID)\n* Attacker fields (Attacker IP, External host)\n* MITRE techniques\n* File hash (currently, the playbook supports only SHA256) \n\nNote: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
+ playbookName: Endpoint Investigation Plan
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "20"
+ scriptarguments:
+ HuntByHost:
+ simple: "True"
+ HuntCnCTechniques:
+ simple: "False"
+ HuntCollectionTechniques:
+ simple: "False"
+ HuntDefenseEvasionTechniques:
+ simple: "False"
+ HuntDiscoveryTechniques:
+ simple: "False"
+ HuntExecutionTechniques:
+ simple: "False"
+ HuntImpactTechniques:
+ simple: "False"
+ HuntInitialAccessTechniques:
+ simple: "False"
+ HuntLateralMovementTechniques:
+ simple: "False"
+ HuntPersistenceTechniques:
+ simple: "False"
+ HuntPrivilegeEscalationTechniques:
+ simple: "False"
+ HuntReconnaissanceTechniques:
+ simple: "False"
+ agentID:
+ simple: ${Core.Endpoint.endpoint_id}
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 1560
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "17":
+ id: "17"
+ taskid: 7325afaa-96a6-4127-8415-a342a300f6b8
+ type: title
+ task:
+ id: 7325afaa-96a6-4127-8415-a342a300f6b8
+ version: -1
+ name: Test with agent ID
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "16"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 1420
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "18":
+ id: "18"
+ taskid: fa6f60da-58ce-4bc7-8746-f44a05399ff5
+ type: playbook
+ task:
+ id: fa6f60da-58ce-4bc7-8746-f44a05399ff5
+ version: -1
+ name: Endpoint Investigation Plan
+ description: "This playbook handles all the endpoint investigation actions by performing the following tasks on every alert associated with the alert:\n* Pre-defined MITRE Tactics\n* Host fields (Host ID)\n* Attacker fields (Attacker IP, External host)\n* MITRE techniques\n* File hash (currently, the playbook supports only SHA256) \n\nNote: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
+ playbookName: Endpoint Investigation Plan
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "21"
+ scriptarguments:
+ FileSHA256:
+ complex:
+ root: alert
+ accessor: initiatorsha256
+ transformers:
+ - operator: FirstArrayElement
+ HuntByFile:
+ simple: "True"
+ HuntCnCTechniques:
+ simple: "False"
+ HuntCollectionTechniques:
+ simple: "False"
+ HuntDefenseEvasionTechniques:
+ simple: "False"
+ HuntDiscoveryTechniques:
+ simple: "False"
+ HuntExecutionTechniques:
+ simple: "False"
+ HuntImpactTechniques:
+ simple: "False"
+ HuntInitialAccessTechniques:
+ simple: "False"
+ HuntLateralMovementTechniques:
+ simple: "False"
+ HuntPersistenceTechniques:
+ simple: "False"
+ HuntPrivilegeEscalationTechniques:
+ simple: "False"
+ HuntReconnaissanceTechniques:
+ simple: "False"
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 1850
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "19":
+ id: "19"
+ taskid: 7b96b459-a1ff-4d83-83a1-8c99741c44c7
+ type: playbook
+ task:
+ id: 7b96b459-a1ff-4d83-83a1-8c99741c44c7
+ version: -1
+ name: Endpoint Investigation Plan
+ description: "This playbook handles all the endpoint investigation actions by performing the following tasks on every alert associated with the alert:\n* Pre-defined MITRE Tactics\n* Host fields (Host ID)\n* Attacker fields (Attacker IP, External host)\n* MITRE techniques\n* File hash (currently, the playbook supports only SHA256) \n\nNote: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
+ playbookName: Endpoint Investigation Plan
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "25"
+ scriptarguments:
+ HuntAttacker:
+ simple: "True"
+ HuntCnCTechniques:
+ simple: "False"
+ HuntCollectionTechniques:
+ simple: "False"
+ HuntDefenseEvasionTechniques:
+ simple: "False"
+ HuntDiscoveryTechniques:
+ simple: "False"
+ HuntExecutionTechniques:
+ simple: "False"
+ HuntImpactTechniques:
+ simple: "False"
+ HuntInitialAccessTechniques:
+ simple: "False"
+ HuntLateralMovementTechniques:
+ simple: "False"
+ HuntPersistenceTechniques:
+ simple: "False"
+ HuntPrivilegeEscalationTechniques:
+ simple: "False"
+ HuntReconnaissanceTechniques:
+ simple: "False"
+ agentID:
+ simple: '*'
+ attackerExternalHost:
+ simple: ${Endpoint.Hostname}
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 2150
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "20":
+ id: "20"
+ taskid: de69fd48-8a50-4817-85de-0f32db382c3f
+ type: title
+ task:
+ id: de69fd48-8a50-4817-85de-0f32db382c3f
+ version: -1
+ name: Test with hash
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "18"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 1720
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "21":
+ id: "21"
+ taskid: 53b368f9-ee82-42c3-8cbe-45523662264a
+ type: title
+ task:
+ id: 53b368f9-ee82-42c3-8cbe-45523662264a
+ version: -1
+ name: Test with Hosname
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "19"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 2010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "22":
+ id: "22"
+ taskid: b252346f-4701-4348-80dd-fbb8802f1abc
+ type: title
+ task:
+ id: b252346f-4701-4348-80dd-fbb8802f1abc
+ version: -1
+ name: Test with all inputs
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "23"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 2620
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "23":
+ id: "23"
+ taskid: 2fab1c78-9e6a-4cd9-8f65-354b769aadc1
+ type: playbook
+ task:
+ id: 2fab1c78-9e6a-4cd9-8f65-354b769aadc1
+ version: -1
+ name: Endpoint Investigation Plan
+ description: "This playbook handles all the endpoint investigation actions by performing the following tasks on every alert associated with the alert:\n* Pre-defined MITRE Tactics\n* Host fields (Host ID)\n* Attacker fields (Attacker IP, External host)\n* MITRE techniques\n* File hash (currently, the playbook supports only SHA256) \n\nNote: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
+ playbookName: Endpoint Investigation Plan
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ FileSHA256:
+ simple: ${alert.initiatorsha256}
+ HuntAttacker:
+ simple: "True"
+ HuntByFile:
+ simple: "True"
+ HuntByHost:
+ simple: "True"
+ HuntByTechnique:
+ simple: "True"
+ HuntCnCTechniques:
+ simple: "True"
+ HuntCollectionTechniques:
+ simple: "True"
+ HuntDefenseEvasionTechniques:
+ simple: "True"
+ HuntDiscoveryTechniques:
+ simple: "True"
+ HuntExecutionTechniques:
+ simple: "True"
+ HuntImpactTechniques:
+ simple: "True"
+ HuntInitialAccessTechniques:
+ simple: "True"
+ HuntLateralMovementTechniques:
+ simple: "True"
+ HuntPersistenceTechniques:
+ simple: "True"
+ HuntPrivilegeEscalationTechniques:
+ simple: "True"
+ HuntReconnaissanceTechniques:
+ simple: "True"
+ agentID:
+ simple: '*'
+ attackerExternalHost:
+ simple: ${Endpoint.Hostname}
+ attackerRemoteIP:
+ simple: ${alert.hostip}
+ mitreTechniqueID:
+ simple: T1059.001
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 2760
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "24":
+ id: "24"
+ taskid: d29c19d5-dc24-4572-80d5-8b0f2dc39d47
+ type: playbook
+ task:
+ id: d29c19d5-dc24-4572-80d5-8b0f2dc39d47
+ version: -1
+ name: Endpoint Investigation Plan
+ description: "This playbook handles all the endpoint investigation actions by performing the following tasks on every alert associated with the alert:\n* Pre-defined MITRE Tactics\n* Host fields (Host ID)\n* Attacker fields (Attacker IP, External host)\n* MITRE techniques\n* File hash (currently, the playbook supports only SHA256) \n\nNote: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
+ playbookName: Endpoint Investigation Plan
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "22"
+ scriptarguments:
+ HuntByTechnique:
+ simple: "True"
+ HuntCnCTechniques:
+ simple: "False"
+ HuntCollectionTechniques:
+ simple: "False"
+ HuntDefenseEvasionTechniques:
+ simple: "False"
+ HuntDiscoveryTechniques:
+ simple: "False"
+ HuntExecutionTechniques:
+ simple: "False"
+ HuntImpactTechniques:
+ simple: "False"
+ HuntInitialAccessTechniques:
+ simple: "False"
+ HuntLateralMovementTechniques:
+ simple: "False"
+ HuntPersistenceTechniques:
+ simple: "False"
+ HuntPrivilegeEscalationTechniques:
+ simple: "False"
+ HuntReconnaissanceTechniques:
+ simple: "False"
+ agentID:
+ simple: '*'
+ mitreTechniqueID:
+ simple: T1059.001
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 2450
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "25":
+ id: "25"
+ taskid: e99772d8-5e0b-42ab-8c6c-abed602dbfc3
+ type: title
+ task:
+ id: e99772d8-5e0b-42ab-8c6c-abed602dbfc3
+ version: -1
+ name: Test with mitreTechniqueID
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "24"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 2310
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 3665,
+ "width": 640,
+ "x": 10,
+ "y": -670
+ }
+ }
+ }
+inputs: []
+outputs: []
+tests:
+- No tests (auto formatted)
+description: Test Playbook for Endpoint Investigation Plan.
+fromversion: 6.6.0
+marketplaces:
+- marketplacev2
\ No newline at end of file
diff --git a/Packs/CommonPlaybooks/doc_files/Block_Account_-_Generic_v2.png b/Packs/CommonPlaybooks/doc_files/Block_Account_-_Generic_v2.png
index 2fa955b1b91d..2ef5bc380709 100644
Binary files a/Packs/CommonPlaybooks/doc_files/Block_Account_-_Generic_v2.png and b/Packs/CommonPlaybooks/doc_files/Block_Account_-_Generic_v2.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Block_Email_-_Generic_v2.png b/Packs/CommonPlaybooks/doc_files/Block_Email_-_Generic_v2.png
index bcba23a27637..95f62bd6279d 100644
Binary files a/Packs/CommonPlaybooks/doc_files/Block_Email_-_Generic_v2.png and b/Packs/CommonPlaybooks/doc_files/Block_Email_-_Generic_v2.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Convert_file_hash_to_corresponding_hashes.png b/Packs/CommonPlaybooks/doc_files/Convert_file_hash_to_corresponding_hashes.png
new file mode 100644
index 000000000000..a0c0a06d6e54
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/Convert_file_hash_to_corresponding_hashes.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Email_Address_Enrichment_-_Generic_v2.1.png b/Packs/CommonPlaybooks/doc_files/Email_Address_Enrichment_-_Generic_v2.1.png
index 9a2cc3eb0b52..f98331dc0b10 100644
Binary files a/Packs/CommonPlaybooks/doc_files/Email_Address_Enrichment_-_Generic_v2.1.png and b/Packs/CommonPlaybooks/doc_files/Email_Address_Enrichment_-_Generic_v2.1.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Extract_Indicators_From_File_-_Generic_v2.png b/Packs/CommonPlaybooks/doc_files/Extract_Indicators_From_File_-_Generic_v2.png
index 32aaa45bbf94..4f1b2de80840 100644
Binary files a/Packs/CommonPlaybooks/doc_files/Extract_Indicators_From_File_-_Generic_v2.png and b/Packs/CommonPlaybooks/doc_files/Extract_Indicators_From_File_-_Generic_v2.png differ
diff --git a/Packs/CommonPlaybooks/pack_metadata.json b/Packs/CommonPlaybooks/pack_metadata.json
index 631caf643255..dc7fcd6326b9 100644
--- a/Packs/CommonPlaybooks/pack_metadata.json
+++ b/Packs/CommonPlaybooks/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Playbooks",
"description": "Frequently used playbooks pack.",
"support": "xsoar",
- "currentVersion": "2.6.13",
+ "currentVersion": "2.6.24",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonScripts/.secrets-ignore b/Packs/CommonScripts/.secrets-ignore
index 467ec6b009b2..56e847aba51c 100644
--- a/Packs/CommonScripts/.secrets-ignore
+++ b/Packs/CommonScripts/.secrets-ignore
@@ -119,7 +119,199 @@ cipher=ECDHE-RSA-AES128-GCM-SHA256
multipart/signed
123.123.123.123
107.66.225.91
+00:87:37:26:7f:7e:da:e8
+00:e3:d1:a6:3b:81:78:2b
+04:c3:30:94:76:79:f1:fa
+07:81:20:3e:42:e4:a7:ca
+07:f5:e8:e5:08:4d:88:0d
+08:5a:72:f4:37:e5:29:73
+0d:14:9d:8d:e0:9f:0c:2b
+1.3.6.1
+1.7.23.1
+11:72:47:f5:f7:6d:ea:c9
+18:43:48:8e:17:eb:4c:a0
+18:49:53:f8:c4:fb:55:f6
+19:3f:ef:99:44:05:b4:06
+1c:36:b5:ef:30:45:f2:65
+1e:40:50:b2:7e:43:45:90
+2.23.140.1
+2.5.29.14
+2.5.29.15
+2.5.29.17
+2.5.29.19
+2.5.29.31
+2.5.29.32
+2.5.29.35
+2.5.29.37
+20:04:8a:d8:05:f3:68:e1
+21:89:7f:ba:46:a6:c6:d8
+21:92:bd:cd:4c:02:0d:eb
+21:eb:65:ff:5e:f2:10:f4
+22:14:77:d1:8c:ab:4e:84
+23:51:c7:d2:56:f4:40:37
+25:39:47:8a:d0:40:dd:11
+25:f9:89:2f:ed:7f:6d:98
+26:e6:9d:f3:08:96:ea:a7
+28:38:d1:6b:e7:f9:97:f1
+29:3a:2a:20:0f:2a:b5:96
+29:53:35:13:01:de:c5:bf
+2b:30:27:26:69:22:f2:99
+2c:08:e3:d3:8f:2a:33:ae
+30:d6:08:3a:65:9e:d7:df
+32:9e:50:e7:7c:28:08:02
+34:be:87:c9:63:b2:6f:bf
+35:85:82:bd:4b:8d:d2:db
+35:bd:8d:0a:1f:6e:b4:47
+35:cf:17:e7:3b:eb:c9:9d
+37:88:4a:8d:74:f8:0b:97
+37:e5:b7:40:09:55:58:2c
+3f:4d:18:de:1b:c0:ce:fc
+3f:bc:54:c1:c4:0f:b1:22
+42:95:32:71:26:76:ec:66
+42:c3:01:e1:39:67:fc:82
+44:42:6e:6e:b9:fd:d3:d7
+45:c1:99:b4:d0:1b:d7:bb
+46:b1:f4:62:ca:04:12:00
+46:ba:49:d8:8f:67:ce:11
+46:ff:35:0e:75:84:af:8f
+48:73:f7:40:b7:a9:b8:0c
+48:a0:68:c0:c5:ac:7f:1c
+48:a7:86:22:21:7c:fb:40
+48:c2:8a:b2:da:da:ff:36
+49:5e:90:6e:7f:2a:9d:cf
+4a:2a:e7:d4:11:e5:61:82
+4a:ad:66:67:c6:a8:db:62
+4b:96:03:14:bf:d1:86:7e
+4c:2d:bd:a5:41:ca:96:4b
+4c:c2:0b:7a:7d:d0:4d:d2
+5.5.7.1
+50:b6:5d:37:b9:e0:a5:eb
+51:86:d8:91:9c:6b:35:37
+52:0b:3e:2a:6f:15:25:77
+58:34:eb:61:2d:ae:10:e1
+59:b2:e7:d0:91:89:ca:e0
+5c:04:1b:07:d5:45:2e:e4
+5f:eb:c2:ca:a4:89:3d:f2
+60:4d:b0:a7:a1:8a:31:27
+60:7b:1d:3b:e6:27:8f:1b
+61:44:5a:ca:2a:61:ff:17
+62:51:c7:99:83:45:e3:41
+62:cb:a5:2f:87:a1:0b:d7
+63:b1:43:e8:b5:39:3f:c2
+65:b8:91:03:e2:ac:63:6f
+66:ec:57:85:95:9f:97:9d
+69:5b:d9:d7:e6:91:3c:9d
+6a:54:ea:33:eb:1a:c1:3e
+6d:d7:7f:72:fb:3d:e1:b3
+6d:e4:21:a7:e2:0b:a7:b0
+6d:f4:ee:8f:55:7c:d4:30
+6e:90:b3:81:73:70:85:f6
+6f:30:79:a1:7f:4c:87:5e
+71:2d:df:a9:ee:04:50:38
+72:ba:5a:6c:38:d2:17:bb
+74:a6:e1:ba:d1:7a:28:d1
+76:85:c8:ad:e7:c7:d1:e2
+77:33:c4:d5:53:a2:3f:c4
+78:66:a0:5a:53:23:6f:7e
+79:9b:54:80:f2:98:86:b8
+7c:eb:95:00:4c:b6:96:e2
+7d:6a:b5:ed:8c:59:66:e7
+80:64:05:ca:33:75:27:92
+80:be:c0:cc:b2:45:f7:d7
+81:15:9a:67:c4:ed:23:0c
+83:e1:50:40:0f:fa:8b:0e
+84:7c:89:6c:4d:12:ae:29
+88:b3:80:10:4e:de:47:24
+8c:5c:7c:75:59:25:bc:85
+8d:b2:65:2b:4a:c4:7b:40
+90:62:99:42:1d:31:90:5b
+91:68:7a:21:7b:43:a4:cd
+92:24:1d:68:24:21:9f:4b
+93:53:4b:eb:ca:b7:5a:b9
+94:95:af:af:f4:f1:32:ce
+98:4f:f0:31:c1:4e:95:43
+9d:5e:28:ae:ce:4e:a1:9e
+9d:d4:63:27:32:17:47:dd
+a1:8a:bc:f5:56:83:a9:60
+a5:29:a5:d1:1a:4b:7a:12
+a6:ae:a5:7d:a8:79:69:e3
+a9:44:e9:c7:f3:06:d3:c0
+aa:1d:1b:79:39:f5:78:91
+aa:aa:80:35:07:35:13:59
+ac:31:2c:98:af:a0:eb:07
+ad:65:d8:0c:84:75:c3:1c
+ae:65:27:de:e6:45:35:05
+ae:e5:4a:37:2d:21:2c:d5
+af:1b:f5:0c:9b:4c:ac:3f
+af:61:ef:ca:58:48:b7:2d
+af:8b:d6:03:3b:4c:73:1d
+b0:60:43:c1:4e:17:c2:16
+b1:07:68:2d:4a:61:2f:19
+b1:39:2c:0c:d7:92:34:c3
+b1:d7:a7:3a:b7:87:2e:8d
+b4:69:3b:0e:9c:a0:6f:98
+b5:4c:e8:6f:9f:1d:03:80
+bb:4a:ef:12:58:1e:df:5d
+bc:3a:90:44:ac:ba:a7:cf
+bf:3c:ff:69:d3:80:a7:6e
+c0:af:43:46:03:be:af:48
+c2:42:94:35:d5:ec:ea:78
+c3:4d:fc:d2:17:93:4a:c6
+c4:f4:53:7c:7e:8d:f9:62
+c5:3e:4d:0e:d9:01:72:d0
+c6:ce:e9:88:75:a0:da:a3
+c7:ec:5d:4f:4d:1b:38:af
+c9:a5:df:ef:c5:89:e3:cd
+c9:b8:b1:9c:74:cd:93:7b
+ca:b3:06:fc:99:93:a3:32
+cb:53:0d:f7:27:95:71:ab
+cc:8b:23:49:08:d6:84:22
+ce:64:de:57:4b:0f:78:e6
+d1:65:71:14:97:d4:da:e2
+d2:8f:81:38:5a:22:42:4d
+d3:1a:67:6b:81:99:cd:5d
+d3:98:e0:89:c9:fe:b8:a5
+d7:dc:c8:6a:55:12:d8:a9
+d8:36:d1:64:cb:f9:25:47
+d8:6d:37:41:6d:9e:e4:46
+dc:2c:df:ae:af:8d:03:30
+dc:4a:73:81:0a:a5:31:f9
+dc:ad:03:14:6e:17:87:41
+dc:f8:ec:b4:cb:19:e8:0f
+dd:e8:86:45:b1:60:88:ec
+df:e4:b7:4b:de:09:40:2c
+e2:52:17:12:dc:a4:93:06
+e9:07:21:38:ad:82:8c:5d
+e9:a7:bf:cf:cb:81:94:12
+ea:d6:c3:51:1b:4e:4c:48
+eb:b7:dd:bf:80:9c:e3:ca
+ec:1e:3c:1e:a1:45:11:f3
+ed:65:a2:06:73:7e:f3:f5
+f0:55:76:91:d0:b9:aa:30
+f2:ad:75:a4:20:f8:d3:1b
+f4:bd:55:30:44:38:3a:39
+f7:85:10:a2:8e:be:29:cb
+f7:da:1e:b7:38:d4:3f:f3
+f7:e6:9b:40:bf:8d:79:fb
+f8:ee:44:86:7c:99:0a:03
+fa:4a:08:9f:25:e5:59:bf
+fa:8a:25:64:62:7e:c3:51
+fc:71:42:bc:91:0e:73:4d
+fc:f6:3f:a3:19:11:23:9d
+fd:46:d4:28:01:54:eb:65
+fd:5c:7d:31:c5:f8:41:1d
+fe:93:e7:36:7e:b9:ac:56
+ff:8c:f5:3f:b6:8d:c7:46
+ff:e1:6c:8d:8e:0f:3b:0f
+http://cacerts.digicert.com
+http://crl.godaddy.com
+http://crl3.digicert.com
+http://crl4.digicert.com
+http://ocsp.digicert.com
+http://ocsp.godaddy.com
+https://www.digicert.com
http://itunes.apple.com
https://www.linkedin.com
http://en.m.wikipedia.org
-https://xsoar.pan.dev/docs/concepts/demisto-sdk#secrets
\ No newline at end of file
+https://xsoar.pan.dev/docs/concepts/demisto-sdk#secrets
+http://www.yahoo.de
diff --git a/Packs/CommonScripts/Lists/PrivateIPs.json b/Packs/CommonScripts/Lists/PrivateIPs.json
index 38526caef4d5..bd29e9193622 100644
--- a/Packs/CommonScripts/Lists/PrivateIPs.json
+++ b/Packs/CommonScripts/Lists/PrivateIPs.json
@@ -15,6 +15,7 @@
"version": -1,
"fromVersion": "6.5.0",
"marketplaces": [
- "xsoar"
+ "xsoar",
+ "marketplacev2"
]
}
diff --git a/Packs/CommonScripts/ReleaseNotes/1_13_40.md b/Packs/CommonScripts/ReleaseNotes/1_13_40.md
new file mode 100644
index 000000000000..e7e8b03cfe32
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_13_40.md
@@ -0,0 +1,78 @@
+
+#### Scripts
+
+##### LinkIncidentsWithRetry
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CVSSCalculator
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### ExportContextToJSONFile
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### displayUtilitiesResults
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### PopulateCriticalAssets
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### LinkIncidentsButton
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### ConvertDatetoUTC
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### AssignToMeButton
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### IsDomainInternal
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### IndicatorMaliciousRatioCalculation
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### IPToHost
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### MarkAsEvidenceByTag
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CopyContextToField
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### AddDBotScoreToContext
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CloseInvestigationAsDuplicate
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### SetDateField
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CompareIncidentsLabels
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### ContextContains
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### GenerateSummaryReportButton
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### DemistoVersion
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### IsIPPrivate
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### GenerateRandomString
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### EncodeToAscii
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### LoadJSONFileToContext
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
+##### CalculateTimeDifference
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_0.md b/Packs/CommonScripts/ReleaseNotes/1_14_0.md
new file mode 100644
index 000000000000..ccc3ebcf8813
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_0.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### IdentifyAttachedEmail
+
+- Fixed an issue where the script was erroring when there were no File entries in the warroom.
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_1.md b/Packs/CommonScripts/ReleaseNotes/1_14_1.md
new file mode 100644
index 000000000000..0264bf8275dd
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_1.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### Sleep
+
+Fixed an issue where sometimes tasks using Sleep did not continue as expected.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_10.md b/Packs/CommonScripts/ReleaseNotes/1_14_10.md
new file mode 100644
index 000000000000..3da36782d020
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_10.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### GridFieldSetup
+
+Updated the Docker image to: *demisto/python3:3.10.13.89009*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_11.md b/Packs/CommonScripts/ReleaseNotes/1_14_11.md
new file mode 100644
index 000000000000..47cc33c0b050
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_11.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### IsInternalDomainName
+
+- Fixed an issue where the script would return false positive results on edge cases.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_12.md b/Packs/CommonScripts/ReleaseNotes/1_14_12.md
new file mode 100644
index 000000000000..576865ec7e70
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_12.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### DeleteContext
+
+- Added support for comma separated values for the 'key' parameter.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_13.md b/Packs/CommonScripts/ReleaseNotes/1_14_13.md
new file mode 100644
index 000000000000..6646bf262949
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_13.md
@@ -0,0 +1,12 @@
+
+#### Scripts
+
+##### ScheduleGenericPolling
+
+- Added support for propagating the `auto-extract` argument in both the initial and subsequent executions by introducing the **extractMode** argument.
+- Updated the Docker image to: *demisto/python3:3.10.13.89009*.
+
+##### GenericPollingScheduledTask
+
+- Added support for propagating the `auto-extract` argument in both the initial and subsequent executions by introducing the **extractMode** argument.
+
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_14.md b/Packs/CommonScripts/ReleaseNotes/1_14_14.md
new file mode 100644
index 000000000000..2826d7851402
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_14.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### CreateNewIndicatorsOnly
+
+- Added the functionality to associate submitted indicators with an incident.
+- Updated the Docker image to: *demisto/python3:3.10.13.89009*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_15.md b/Packs/CommonScripts/ReleaseNotes/1_14_15.md
new file mode 100644
index 000000000000..9d4f94440172
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_15.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### DeleteContext
+
+- Fixed an issue where provided non-string keys are not deleted.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_16.md b/Packs/CommonScripts/ReleaseNotes/1_14_16.md
new file mode 100644
index 000000000000..65933b3f2718
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_16.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### GetDockerImageLatestTag
+
+- Added support for OPP gateway.
+- Upgraded the Docker image to: *demisto/python3:3.10.13.89009*.
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_17.md b/Packs/CommonScripts/ReleaseNotes/1_14_17.md
new file mode 100644
index 000000000000..aa6e19948bda
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_17.md
@@ -0,0 +1,6 @@
+
+#### Lists
+
+##### PrivateIPs
+
+Added the list 'PrivateIPs' to XSIAM.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_18.md b/Packs/CommonScripts/ReleaseNotes/1_14_18.md
new file mode 100644
index 000000000000..197d814fd8ba
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_18.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### DeleteContext
+
+- Fixed an issue where comma-separated values key parsing was broken.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_19.md b/Packs/CommonScripts/ReleaseNotes/1_14_19.md
new file mode 100644
index 000000000000..8f070b867dea
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_19.md
@@ -0,0 +1,12 @@
+
+#### Scripts
+
+##### AquatoneDiscoverV2
+
+- Updated the Docker image to: *demisto/aquatone:2.0.0.89205*.
+##### FetchIndicatorsFromFile
+
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.89345*.
+##### VerifyJSON
+
+- Updated the Docker image to: *demisto/powershell:7.4.0.80528*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_2.md b/Packs/CommonScripts/ReleaseNotes/1_14_2.md
new file mode 100644
index 000000000000..7d3e5cac3ffc
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_2.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### New: ExtractHyperlinksFromOfficeFiles
+
+- New: Extract hyperlinks from office files. Supported file types are: xlsx, docx, pptx. (Available from Cortex XSOAR 5.5.0).
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_3.md b/Packs/CommonScripts/ReleaseNotes/1_14_3.md
new file mode 100644
index 000000000000..4f27bf5381f0
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_3.md
@@ -0,0 +1,8 @@
+
+#### Scripts
+
+##### AssignAnalystToIncident
+
+<~XSIAM>
+Fixed an issue where the script was not displaying an error when executed from the incident war room.
+~XSIAM>
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_4.md b/Packs/CommonScripts/ReleaseNotes/1_14_4.md
new file mode 100644
index 000000000000..2ab29af0efd0
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_4.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### StixCreator
+
+- Fixed an issue where *Export Stix* failed when exporting a not STIX compatible object of type `Report`.
+- Updated the Docker image to *demisto/py3-tools:1.0.0.88283*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_5.md b/Packs/CommonScripts/ReleaseNotes/1_14_5.md
new file mode 100644
index 000000000000..b1c811c43df9
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_5.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### ScheduleGenericPolling
+
+- Fixed an issue where a value was not sanitized.
+- Updated the Docker image to: *demisto/python3:3.10.13.87159*.
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_6.md b/Packs/CommonScripts/ReleaseNotes/1_14_6.md
new file mode 100644
index 000000000000..0673a0e390d2
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_6.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ConvertFile
+
+- Updated the Docker image to: *demisto/office-utils:2.0.0.88735*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_7.md b/Packs/CommonScripts/ReleaseNotes/1_14_7.md
new file mode 100644
index 000000000000..8b7db0d2cc80
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_7.md
@@ -0,0 +1,12 @@
+#### Scripts
+
+##### CertificateExtract
+- Updated the Docker image to: *demisto/crypto:1.0.0.88857*.
+- New: Extract fields from a certificate file and return the standard context. (Available from Cortex XSOAR 6.0.0).
+
+##### New: CertificateReputation
+- New: Enrich and calculate the reputation of a certificate indicator. (Available from Cortex XSOAR 6.0.0).
+
+##### FileCreateAndUploadV2
+- Updated the Docker image to: *demisto/python3:3.10.13.89009*.
+
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_8.md b/Packs/CommonScripts/ReleaseNotes/1_14_8.md
new file mode 100644
index 000000000000..f35b94c2dd9c
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_8.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### FormatURL
+
+- Fixed an issue that an extra char caught by the regex resulted in the formatter tagging valid CIDRs as URLs.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_9.md b/Packs/CommonScripts/ReleaseNotes/1_14_9.md
new file mode 100644
index 000000000000..bbfc5b2b4d98
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_9.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### FormatURL
+
+- Fixed an issue where URLs without a path and with an email caused the formatter to return an empty string.
diff --git a/Packs/CommonScripts/Scripts/AddDBotScoreToContext/AddDBotScoreToContext.yml b/Packs/CommonScripts/Scripts/AddDBotScoreToContext/AddDBotScoreToContext.yml
index 4c20b2817b37..302f52cc9583 100644
--- a/Packs/CommonScripts/Scripts/AddDBotScoreToContext/AddDBotScoreToContext.yml
+++ b/Packs/CommonScripts/Scripts/AddDBotScoreToContext/AddDBotScoreToContext.yml
@@ -26,7 +26,7 @@ comment: Add DBot score to context for indicators with custom vendor, score, rel
commonfields:
id: AddDBotScoreToContext
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
enabled: true
name: AddDBotScoreToContext
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/AquatoneDiscoverV2/AquatoneDiscoverV2.yml b/Packs/CommonScripts/Scripts/AquatoneDiscoverV2/AquatoneDiscoverV2.yml
index 8e59f1011101..752d54bb70ae 100644
--- a/Packs/CommonScripts/Scripts/AquatoneDiscoverV2/AquatoneDiscoverV2.yml
+++ b/Packs/CommonScripts/Scripts/AquatoneDiscoverV2/AquatoneDiscoverV2.yml
@@ -18,7 +18,7 @@ outputs:
scripttarget: 0
timeout: 1h0m0s
runonce: true
-dockerimage: demisto/aquatone:2.0.0.36846
+dockerimage: demisto/aquatone:2.0.0.89205
comment: 'aquatone-discover will find the targets nameservers and shuffle DNS lookups between them. Should a lookup fail on the target domains nameservers, aquatone-discover will fall back to using Google public DNS servers to maximize discovery.'
fromversion: 6.5.0
tests:
diff --git a/Packs/CommonScripts/Scripts/AssignAnalystToIncident/AssignAnalystToIncident.js b/Packs/CommonScripts/Scripts/AssignAnalystToIncident/AssignAnalystToIncident.js
index 383fd69184a7..4887f8ad18fd 100644
--- a/Packs/CommonScripts/Scripts/AssignAnalystToIncident/AssignAnalystToIncident.js
+++ b/Packs/CommonScripts/Scripts/AssignAnalystToIncident/AssignAnalystToIncident.js
@@ -1,3 +1,21 @@
+// returns true if the current platform is XSIAM.
+isXsiam = function () {
+ res = getDemistoVersion();
+ platform = res.platform;
+ if (platform === "x2") {
+ return true
+ }
+ return false
+}
+
+if (isXsiam()){
+ incidentObject = JSON.parse(incObj);
+ if (incidentObject.length > 0 && incidentObject[0].id.startsWith("INCIDENT")) {
+ throw "AssignAnalystToIncident script can only be used within an alert and not from an incident."
+ }
+}
+
+
if (args.email && args.username) {
throw 'Please provide either username or email';
}
diff --git a/Packs/CommonScripts/Scripts/AssignAnalystToIncident/AssignAnalystToIncident.yml b/Packs/CommonScripts/Scripts/AssignAnalystToIncident/AssignAnalystToIncident.yml
index bf8ddb936e92..bdd1204502f6 100644
--- a/Packs/CommonScripts/Scripts/AssignAnalystToIncident/AssignAnalystToIncident.yml
+++ b/Packs/CommonScripts/Scripts/AssignAnalystToIncident/AssignAnalystToIncident.yml
@@ -14,7 +14,18 @@ comment: |-
top-user: The user that is most commonly owns this type of incident
less-busy-user: The less busy analyst will be picked to be the incident owner.
online: The analyst is picked randomly from all online analysts, according to the provided roles (if no roles provided, will fetch all users).
- current: The user that executed the command
+ current: The user that executed the command.
+comment:marketplacev2: |-
+ Assign analyst to incident.
+ Note: the script should be executed from within an alert.
+ By default, the analyst is picked randomly from the available users, according to the provided roles (if no roles provided, will fetch all users).
+ Otherwise, the analyst will be picked according to the 'assignBy' arguments.
+ machine-learning: DBot will calculated and decide who is the best analyst for the job.
+ top-user: The user that is most commonly owns this type of incident
+ less-busy-user: The less busy analyst will be picked to be the incident owner.
+ online: The analyst is picked randomly from all online analysts, according to the provided roles (if no roles provided, will fetch all users).
+ current: The user that executed the command.
+
enabled: true
args:
- name: roles
@@ -33,7 +44,7 @@ args:
- name: username
description: When specified, the provided user will be assigned as the incident owner (optional).
- name: email
- description: When specified, the user of provided email will be assigned as the incident owner (optional)
+ description: When specified, the user of provided email will be assigned as the incident owner (optional).
- name: onCall
auto: PREDEFINED
predefined:
diff --git a/Packs/CommonScripts/Scripts/AssignToMeButton/AssignToMeButton.yml b/Packs/CommonScripts/Scripts/AssignToMeButton/AssignToMeButton.yml
index c22cb91edddd..1dcc7026d632 100644
--- a/Packs/CommonScripts/Scripts/AssignToMeButton/AssignToMeButton.yml
+++ b/Packs/CommonScripts/Scripts/AssignToMeButton/AssignToMeButton.yml
@@ -2,7 +2,7 @@ comment: 'Assigns the current Incident to the Cortex XSOAR user who clicked the
commonfields:
id: AssignToMeButton
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
enabled: true
name: AssignToMeButton
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/CVSSCalculator/CVSSCalculator.yml b/Packs/CommonScripts/Scripts/CVSSCalculator/CVSSCalculator.yml
index 404f7b353059..eaa0f8fcda78 100644
--- a/Packs/CommonScripts/Scripts/CVSSCalculator/CVSSCalculator.yml
+++ b/Packs/CommonScripts/Scripts/CVSSCalculator/CVSSCalculator.yml
@@ -240,5 +240,5 @@ outputs:
description: Version of CVSS used in the calculation.
type: number
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
fromversion: 5.0.0
diff --git a/Packs/CommonScripts/Scripts/CalculateTimeDifference/CalculateTimeDifference.yml b/Packs/CommonScripts/Scripts/CalculateTimeDifference/CalculateTimeDifference.yml
index 5ebc69947fda..60ba2e3c4487 100644
--- a/Packs/CommonScripts/Scripts/CalculateTimeDifference/CalculateTimeDifference.yml
+++ b/Packs/CommonScripts/Scripts/CalculateTimeDifference/CalculateTimeDifference.yml
@@ -28,4 +28,4 @@ runas: DBotWeakRole
tests:
- Impossible Traveler - Test
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract.py b/Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract.py
similarity index 94%
rename from Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract.py
rename to Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract.py
index 3aae81517bca..a9c618a0b785 100644
--- a/Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract.py
+++ b/Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract.py
@@ -15,7 +15,7 @@
general_name,
certificate_transparency
)
-from typing import Dict, Union, Any, Optional, cast, List
+from typing import Any, cast
_INSTANCE_TO_TYPE = {
general_name.OtherName: 'otherName',
@@ -122,7 +122,7 @@ def load_certificate(path: str) -> x509.Certificate:
return certificate
-def int_to_comma_hex(n: int, blength: Optional[int] = None) -> str:
+def int_to_comma_hex(n: int, blength: int | None = None) -> str:
"""
int_to_comma_hex
Translates an integer in its corresponding hex string
@@ -146,11 +146,11 @@ def int_to_comma_hex(n: int, blength: Optional[int] = None) -> str:
return ':'.join([bhex[i:i + 2] for i in range(0, len(bhex), 2)])
-def public_key_context(pkey: Union[asymmetric.dsa.DSAPublicKey,
- asymmetric.rsa.RSAPublicKey,
- asymmetric.ec.EllipticCurvePublicKey,
- asymmetric.ed25519.Ed25519PublicKey,
- asymmetric.ed448.Ed448PublicKey]) -> Common.CertificatePublicKey:
+def public_key_context(pkey: asymmetric.dsa.DSAPublicKey
+ | asymmetric.rsa.RSAPublicKey
+ | asymmetric.ec.EllipticCurvePublicKey
+ | asymmetric.ed25519.Ed25519PublicKey
+ | asymmetric.ed448.Ed448PublicKey) -> Common.CertificatePublicKey:
"""
public_key_context function
Translates an X509 certificate Public Key into a Common.CertificatePublicKey object
@@ -210,7 +210,7 @@ def map_gn(gn: Any) -> Common.GeneralName:
if gn is None:
raise ValueError('gn cannot be None')
- itype = next((t for t in _INSTANCE_TO_TYPE.keys() if isinstance(gn, t)), None)
+ itype = next((t for t in _INSTANCE_TO_TYPE if isinstance(gn, t)), None)
if itype is not None:
return Common.GeneralName(
gn_type=_INSTANCE_TO_TYPE[itype],
@@ -286,7 +286,7 @@ def extension_context(oid: str, extension_name: str, critical: bool, extension_v
usages=[oid_name(o) for o in extension_value]
)
elif isinstance(extension_value, extensions.CRLDistributionPoints):
- distribution_points: List[Common.CertificateExtension.DistributionPoint] = []
+ distribution_points: list[Common.CertificateExtension.DistributionPoint] = []
dp: extensions.DistributionPoint
for dp in extension_value:
distribution_points.append(Common.CertificateExtension.DistributionPoint(
@@ -301,7 +301,7 @@ def extension_context(oid: str, extension_name: str, critical: bool, extension_v
distribution_points=distribution_points
)
elif isinstance(extension_value, extensions.CertificatePolicies):
- policies: List[Common.CertificateExtension.CertificatePolicy] = []
+ policies: list[Common.CertificateExtension.CertificatePolicy] = []
p: extensions.PolicyInformation
for p in extension_value:
policies.append(Common.CertificateExtension.CertificatePolicy(
@@ -314,7 +314,7 @@ def extension_context(oid: str, extension_name: str, critical: bool, extension_v
certificate_policies=policies
)
elif isinstance(extension_value, extensions.AuthorityInformationAccess):
- descriptions: List[Common.CertificateExtension.AuthorityInformationAccess] = []
+ descriptions: list[Common.CertificateExtension.AuthorityInformationAccess] = []
d: extensions.AccessDescription
for d in extension_value:
descriptions.append(Common.CertificateExtension.AuthorityInformationAccess(
@@ -337,7 +337,7 @@ def extension_context(oid: str, extension_name: str, critical: bool, extension_v
)
)
elif isinstance(extension_value, extensions.PrecertificateSignedCertificateTimestamps):
- presigcerttimestamps: List[Common.CertificateExtension.SignedCertificateTimestamp] = []
+ presigcerttimestamps: list[Common.CertificateExtension.SignedCertificateTimestamp] = []
presct: extensions.SignedCertificateTimestamp
for presct in extension_value:
presigcerttimestamps.append(Common.CertificateExtension.SignedCertificateTimestamp(
@@ -352,7 +352,7 @@ def extension_context(oid: str, extension_name: str, critical: bool, extension_v
signed_certificate_timestamps=presigcerttimestamps
)
elif isinstance(extension_value, extensions.SignedCertificateTimestamps):
- sigcerttimestamps: List[Common.CertificateExtension.SignedCertificateTimestamp] = []
+ sigcerttimestamps: list[Common.CertificateExtension.SignedCertificateTimestamp] = []
sct: extensions.SignedCertificateTimestamp
for sct in extension_value:
sigcerttimestamps.append(Common.CertificateExtension.SignedCertificateTimestamp(
@@ -391,7 +391,7 @@ def certificate_to_context(certificate: x509.Certificate) -> Common.Certificate:
encoding=serialization.Encoding.DER,
format=serialization.PublicFormat.SubjectPublicKeyInfo))
- extensions_contexts: List[Common.CertificateExtension] = []
+ extensions_contexts: list[Common.CertificateExtension] = []
for extension in certificate.extensions:
extension_oid = cast(oid.ObjectIdentifier, extension.oid)
extensions_contexts.append(extension_context(
@@ -431,9 +431,9 @@ def certificate_to_context(certificate: x509.Certificate) -> Common.Certificate:
''' COMMAND FUNCTION '''
-def certificate_extract_command(args: Dict[str, Any]) -> CommandResults:
- pem: Optional[str] = args.get('pem')
- entry_id: Optional[str] = args.get('entry_id')
+def certificate_extract_command(args: dict[str, Any]) -> CommandResults:
+ pem: str | None = args.get('pem')
+ entry_id: str | None = args.get('entry_id')
if pem is None and entry_id is None:
raise ValueError("You should specify pem or entry_id")
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract.yml b/Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract.yml
similarity index 99%
rename from Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract.yml
rename to Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract.yml
index 4f49200ec0d7..72ef4a771eb6 100644
--- a/Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract.yml
+++ b/Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract.yml
@@ -126,7 +126,7 @@ tags: []
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/crypto:1.0.0.87358
+dockerimage: demisto/crypto:1.0.0.88857
fromversion: 6.0.0
tests:
- X509Certificate Test Playbook
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract_test.py b/Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract_test.py
similarity index 97%
rename from Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract_test.py
rename to Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract_test.py
index 7714abfb3833..1a9948fa2968 100644
--- a/Packs/X509Certificate/Scripts/CertificateExtract/CertificateExtract_test.py
+++ b/Packs/CommonScripts/Scripts/CertificateExtract/CertificateExtract_test.py
@@ -10,7 +10,7 @@ def handle_calling_context(mocker):
def load_json_data(path):
import json
- with open(path, 'r') as f:
+ with open(path) as f:
return json.load(f)
@@ -25,7 +25,7 @@ def test_pem():
"""
from CertificateExtract import certificate_extract_command
- with open('test_data/test.pem', 'r') as f:
+ with open('test_data/test.pem') as f:
contents = f.read()
context = certificate_extract_command({'pem': contents}).to_context()
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/README.md b/Packs/CommonScripts/Scripts/CertificateExtract/README.md
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateExtract/README.md
rename to Packs/CommonScripts/Scripts/CertificateExtract/README.md
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/test_data/pandev.der b/Packs/CommonScripts/Scripts/CertificateExtract/test_data/pandev.der
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateExtract/test_data/pandev.der
rename to Packs/CommonScripts/Scripts/CertificateExtract/test_data/pandev.der
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/test_data/pandev.pem b/Packs/CommonScripts/Scripts/CertificateExtract/test_data/pandev.pem
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateExtract/test_data/pandev.pem
rename to Packs/CommonScripts/Scripts/CertificateExtract/test_data/pandev.pem
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/test_data/pandev_result.json b/Packs/CommonScripts/Scripts/CertificateExtract/test_data/pandev_result.json
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateExtract/test_data/pandev_result.json
rename to Packs/CommonScripts/Scripts/CertificateExtract/test_data/pandev_result.json
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/test_data/pem_result.json b/Packs/CommonScripts/Scripts/CertificateExtract/test_data/pem_result.json
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateExtract/test_data/pem_result.json
rename to Packs/CommonScripts/Scripts/CertificateExtract/test_data/pem_result.json
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/test_data/test.pem b/Packs/CommonScripts/Scripts/CertificateExtract/test_data/test.pem
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateExtract/test_data/test.pem
rename to Packs/CommonScripts/Scripts/CertificateExtract/test_data/test.pem
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/test_data/test2.pem b/Packs/CommonScripts/Scripts/CertificateExtract/test_data/test2.pem
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateExtract/test_data/test2.pem
rename to Packs/CommonScripts/Scripts/CertificateExtract/test_data/test2.pem
diff --git a/Packs/X509Certificate/Scripts/CertificateExtract/test_data/test2_result.json b/Packs/CommonScripts/Scripts/CertificateExtract/test_data/test2_result.json
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateExtract/test_data/test2_result.json
rename to Packs/CommonScripts/Scripts/CertificateExtract/test_data/test2_result.json
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation.py b/Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation.py
similarity index 84%
rename from Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation.py
rename to Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation.py
index 4041de85e97b..eea4657ef4fb 100644
--- a/Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation.py
+++ b/Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation.py
@@ -5,7 +5,7 @@
import dateparser
from datetime import timedelta
from enum import Enum
-from typing import List, Dict, Any, Optional, Tuple
+from typing import Any
# Threshold defining "long expiration". When validity_not_after - validity_not_before is greater than
@@ -39,7 +39,7 @@ def get_indicator_from_value(indicator_value: str):
return None
-def indicator_set_validation_checks(ivalue: str, checks: List[CertificateValidationTag]) -> None:
+def indicator_set_validation_checks(ivalue: str, checks: list[CertificateValidationTag]) -> None:
# we call setIndicator for each check because if you pass the full list to setIndicator at once
# it will just set the field with the stringified version of the list
for c in checks:
@@ -50,8 +50,8 @@ def indicator_set_validation_checks(ivalue: str, checks: List[CertificateValidat
})
-def certificate_fields_to_context(certindicator_fields: Dict[str, Any]) -> Optional[Dict[str, Any]]:
- pem: Optional[str]
+def certificate_fields_to_context(certindicator_fields: dict[str, Any]) -> dict[str, Any] | None:
+ pem: str | None
if (pem := certindicator_fields.get('pem')) is None:
return None
@@ -72,10 +72,10 @@ def certificate_fields_to_context(certindicator_fields: Dict[str, Any]) -> Optio
return entry_context
-def dbot_context(value: str, certificate_context: Dict[str, Any]
- ) -> Tuple[List[CertificateValidationTag], List[str], Dict[str, Any]]:
- comments: List[str] = []
- tags: List[CertificateValidationTag] = []
+def dbot_context(value: str, certificate_context: dict[str, Any]
+ ) -> tuple[list[CertificateValidationTag], list[str], dict[str, Any]]:
+ comments: list[str] = []
+ tags: list[CertificateValidationTag] = []
some_checks_not_performed: bool = False
current_score = Common.DBotScore.NONE
@@ -185,8 +185,10 @@ def dbot_context(value: str, certificate_context: Dict[str, Any]
comments.append('No Extensions available, some checks could not be performed')
some_checks_not_performed = True
else:
- subject_key_identifier = next((e.get('Value') for e in extensions if e.get('OID') == '2.5.29.14'), None)
- authority_key_identifier = next((e.get('Value') for e in extensions if e.get('OID') == '2.5.29.35'), None)
+ subject_key_identifier = next((e.get('Value') for e in extensions
+ if e.get('OID') == '2.5.29.14'), None) # disable-secrets-detection
+ authority_key_identifier = next((e.get('Value') for e in extensions
+ if e.get('OID') == '2.5.29.35'), None) # disable-secrets-detection
subject_key_identifier_digest = None
authority_key_identifier_ki = None
@@ -199,25 +201,31 @@ def dbot_context(value: str, certificate_context: Dict[str, Any]
some_checks_not_performed = True
comments.append('Valid AuthorityKeyIdentifier Extension not available, some checks not performed')
- if subject_key_identifier_digest is not None and authority_key_identifier_ki is not None:
- if subject_key_identifier_digest == authority_key_identifier_ki:
- comments.append(f'{CertificateValidationTag.SELF_SIGNED.value} Self-Signed Certificate')
- tags.append(CertificateValidationTag.SELF_SIGNED)
- current_score = max(current_score, Common.DBotScore.SUSPICIOUS)
+ if (subject_key_identifier_digest is not None
+ and authority_key_identifier_ki is not None
+ and subject_key_identifier_digest == authority_key_identifier_ki):
+ comments.append(f'{CertificateValidationTag.SELF_SIGNED.value} Self-Signed Certificate')
+ tags.append(CertificateValidationTag.SELF_SIGNED)
+ current_score = max(current_score, Common.DBotScore.SUSPICIOUS)
- elif subject_key_identifier_digest is not None and authority_key_identifier_ki is None:
- if subject_dn is not None and issuer_dn is not None and subject_dn == issuer_dn:
- comments.append(f'{CertificateValidationTag.SELF_SIGNED.value} Self-Signed Certificate')
- tags.append(CertificateValidationTag.SELF_SIGNED)
- current_score = max(current_score, Common.DBotScore.SUSPICIOUS)
+ elif (subject_key_identifier_digest is not None
+ and authority_key_identifier_ki is None
+ and subject_dn is not None
+ and issuer_dn is not None
+ and subject_dn == issuer_dn):
+ comments.append(f'{CertificateValidationTag.SELF_SIGNED.value} Self-Signed Certificate')
+ tags.append(CertificateValidationTag.SELF_SIGNED)
+ current_score = max(current_score, Common.DBotScore.SUSPICIOUS)
# if self-signed we also check this is self-issued
- if CertificateValidationTag.SELF_SIGNED in tags:
- if subject_dn is not None and issuer_dn is not None and subject_dn != issuer_dn:
- comments.append(f'{CertificateValidationTag.INVALID_DISTINGUISHED_NAMES.value}'
- ' Self-Signed Certificate with different Issuer DN and Subject DN')
- tags.append(CertificateValidationTag.INVALID_DISTINGUISHED_NAMES)
- current_score = Common.DBotScore.BAD
+ if (CertificateValidationTag.SELF_SIGNED in tags
+ and subject_dn is not None
+ and issuer_dn is not None
+ and subject_dn != issuer_dn):
+ comments.append(f'{CertificateValidationTag.INVALID_DISTINGUISHED_NAMES.value}'
+ ' Self-Signed Certificate with different Issuer DN and Subject DN')
+ tags.append(CertificateValidationTag.INVALID_DISTINGUISHED_NAMES)
+ current_score = Common.DBotScore.BAD
if not some_checks_not_performed:
# if we didn't have to skip any check we can mark the cert as good
@@ -237,7 +245,7 @@ def dbot_context(value: str, certificate_context: Dict[str, Any]
''' COMMAND FUNCTION '''
-def certificate_reputation_command(args: Dict[str, Any]) -> Dict[str, Any]:
+def certificate_reputation_command(args: dict[str, Any]) -> dict[str, Any]:
input_ = args.get('input')
if input_ is None:
raise ValueError("input argument is required")
@@ -253,7 +261,7 @@ def certificate_reputation_command(args: Dict[str, Any]) -> Dict[str, Any]:
'ReadableContentsFormat': formats['markdown']
}
- comments: List[str] = []
+ comments: list[str] = []
indicator_value = indicator.get('value')
if indicator_value is None:
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation.yml b/Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation.yml
similarity index 98%
rename from Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation.yml
rename to Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation.yml
index 0e4154a0a28b..ce187709004a 100644
--- a/Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation.yml
+++ b/Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation.yml
@@ -133,5 +133,7 @@ tags:
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.13.89009
fromversion: 6.0.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation_test.py b/Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation_test.py
similarity index 97%
rename from Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation_test.py
rename to Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation_test.py
index 7a9f875b3c1d..e288583bcc85 100644
--- a/Packs/X509Certificate/Scripts/CertificateReputation/CertificateReputation_test.py
+++ b/Packs/CommonScripts/Scripts/CertificateReputation/CertificateReputation_test.py
@@ -4,7 +4,7 @@
def load_json_data(path):
import json
- with open(path, 'r') as f:
+ with open(path) as f:
return json.load(f)
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/README.md b/Packs/CommonScripts/Scripts/CertificateReputation/README.md
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateReputation/README.md
rename to Packs/CommonScripts/Scripts/CertificateReputation/README.md
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/test_data/ca.json b/Packs/CommonScripts/Scripts/CertificateReputation/test_data/ca.json
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateReputation/test_data/ca.json
rename to Packs/CommonScripts/Scripts/CertificateReputation/test_data/ca.json
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/test_data/ca.pem b/Packs/CommonScripts/Scripts/CertificateReputation/test_data/ca.pem
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateReputation/test_data/ca.pem
rename to Packs/CommonScripts/Scripts/CertificateReputation/test_data/ca.pem
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/test_data/ca_result.json b/Packs/CommonScripts/Scripts/CertificateReputation/test_data/ca_result.json
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateReputation/test_data/ca_result.json
rename to Packs/CommonScripts/Scripts/CertificateReputation/test_data/ca_result.json
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/test_data/messy.json b/Packs/CommonScripts/Scripts/CertificateReputation/test_data/messy.json
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateReputation/test_data/messy.json
rename to Packs/CommonScripts/Scripts/CertificateReputation/test_data/messy.json
diff --git a/Packs/X509Certificate/Scripts/CertificateReputation/test_data/messy_result.json b/Packs/CommonScripts/Scripts/CertificateReputation/test_data/messy_result.json
similarity index 100%
rename from Packs/X509Certificate/Scripts/CertificateReputation/test_data/messy_result.json
rename to Packs/CommonScripts/Scripts/CertificateReputation/test_data/messy_result.json
diff --git a/Packs/CommonScripts/Scripts/CloseInvestigationAsDuplicate/CloseInvestigationAsDuplicate.yml b/Packs/CommonScripts/Scripts/CloseInvestigationAsDuplicate/CloseInvestigationAsDuplicate.yml
index 56955bd0c093..d7f69cc2abc4 100644
--- a/Packs/CommonScripts/Scripts/CloseInvestigationAsDuplicate/CloseInvestigationAsDuplicate.yml
+++ b/Packs/CommonScripts/Scripts/CloseInvestigationAsDuplicate/CloseInvestigationAsDuplicate.yml
@@ -15,6 +15,6 @@ args:
description: Duplicate incident id
scripttarget: 0
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/CompareIncidentsLabels/CompareIncidentsLabels.yml b/Packs/CommonScripts/Scripts/CompareIncidentsLabels/CompareIncidentsLabels.yml
index 618df6055f96..f4115a842ef6 100644
--- a/Packs/CommonScripts/Scripts/CompareIncidentsLabels/CompareIncidentsLabels.yml
+++ b/Packs/CommonScripts/Scripts/CompareIncidentsLabels/CompareIncidentsLabels.yml
@@ -23,6 +23,6 @@ timeout: '0'
type: python
subtype: python3
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- CompareIncidentsLabels-test-playbook
diff --git a/Packs/CommonScripts/Scripts/ContextContains/ContextContains.yml b/Packs/CommonScripts/Scripts/ContextContains/ContextContains.yml
index 452fa71184d2..03d722c57b30 100644
--- a/Packs/CommonScripts/Scripts/ContextContains/ContextContains.yml
+++ b/Packs/CommonScripts/Scripts/ContextContains/ContextContains.yml
@@ -17,6 +17,6 @@ args:
description: Value to search
scripttarget: 0
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/ConvertDateToUTC/ConvertDateToUTC.yml b/Packs/CommonScripts/Scripts/ConvertDateToUTC/ConvertDateToUTC.yml
index 3daa070b3f41..d2b792355662 100644
--- a/Packs/CommonScripts/Scripts/ConvertDateToUTC/ConvertDateToUTC.yml
+++ b/Packs/CommonScripts/Scripts/ConvertDateToUTC/ConvertDateToUTC.yml
@@ -28,7 +28,7 @@ outputs:
type: Unknown
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 5.0.0
tests:
diff --git a/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile.py b/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile.py
index 0706fe36a9a9..07fbdc292278 100644
--- a/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile.py
+++ b/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile.py
@@ -7,7 +7,6 @@
import tempfile
import shutil
import traceback
-from typing import List
def find_zombie_processes():
@@ -29,35 +28,35 @@ def find_zombie_processes():
return zombies, ps_out
-def convert_file(file_path: str, out_format: str, all_files: bool, outdir: str) -> List[str]:
+def convert_file(file_path: str, out_format: str, all_files: bool, outdir: str) -> list[str]:
try:
run_cmd = ['soffice', '--headless', '-env:UserInstallation=file:///tmp/convertfile/.config',
'--convert-to', out_format, file_path, '--outdir', outdir]
env = os.environ.copy()
env['HOME'] = '/tmp/convertfile'
res = subprocess.check_output(run_cmd, stderr=subprocess.STDOUT, universal_newlines=True, env=env)
- demisto.debug("completed running: {}. With result: {}".format(run_cmd, res))
+ demisto.debug(f"completed running: {run_cmd}. With result: {res}")
if all_files:
files = glob.glob(outdir + '/*')
else:
ext = out_format.split(':')[0]
files = glob.glob(outdir + '/*.' + ext)
if not files:
- raise ValueError('Failed convert for output format: {}. Convert process log: {}'.format(out_format, res))
+ raise ValueError(f'Failed convert for output format: {out_format}. Convert process log: {res}')
return files
finally:
# make sure we don't have zombie processes (seen when converting pdf to html)
try:
zombies, ps_out = find_zombie_processes()
if zombies: # pragma no cover
- demisto.info("Found zombie processes will waitpid: {}".format(ps_out))
+ demisto.info(f"Found zombie processes will waitpid: {ps_out}")
for pid in zombies:
waitres = os.waitpid(int(pid), os.WNOHANG)
- demisto.info("waitpid result: {}".format(waitres))
+ demisto.info(f"waitpid result: {waitres}")
else:
- demisto.debug("No zombie processes found for ps output: {}".format(ps_out))
+ demisto.debug(f"No zombie processes found for ps output: {ps_out}")
except Exception as ex:
- demisto.error("Failed checking for zombie processes: {}. Trace: {}".format(ex, traceback.format_exc()))
+ demisto.error(f"Failed checking for zombie processes: {ex}. Trace: {traceback.format_exc()}")
def main():
@@ -68,8 +67,8 @@ def main():
try:
result = demisto.getFilePath(entry_id)
if not result:
- return_error("Couldn't find entry id: {}".format(entry_id))
- demisto.debug('going to convert: {}'.format(result))
+ return_error(f"Couldn't find entry id: {entry_id}")
+ demisto.debug(f'going to convert: {result}')
file_path = result['path']
file_path_name_only = os.path.splitext(os.path.basename(file_path))[0]
file_name = result.get('name')
@@ -78,7 +77,7 @@ def main():
with tempfile.TemporaryDirectory() as outdir:
files = convert_file(file_path, out_format, all_files, outdir)
if not files:
- return_error('No file result returned for convert format: {}'.format(out_format))
+ return_error(f'No file result returned for convert format: {out_format}')
return
for f in files:
temp = demisto.uniqueFile()
@@ -94,9 +93,9 @@ def main():
'FileID': temp
})
except subprocess.CalledProcessError as e:
- return_error("Failed converting file. Output: {}. Error: {}".format(e.output, e))
+ return_error(f"Failed converting file. Output: {e.output}. Error: {e}")
except Exception as e:
- return_error("Failed converting file. General exception: {}.\n\nTrace:\n{}".format(e, traceback.format_exc()))
+ return_error(f"Failed converting file. General exception: {e}.\n\nTrace:\n{traceback.format_exc()}")
# python2 uses __builtin__ python3 uses builtins
diff --git a/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile.yml b/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile.yml
index 163e094272d9..e4f15dc9ee3f 100644
--- a/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile.yml
+++ b/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile.yml
@@ -42,7 +42,7 @@ outputs:
description: The file type.
type: String
scripttarget: 0
-dockerimage: demisto/office-utils:2.0.0.82639
+dockerimage: demisto/office-utils:2.0.0.88735
runas: DBotWeakRole
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile_test.py b/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile_test.py
index 8ea0a7d8945a..a7fdfa1ca89c 100644
--- a/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile_test.py
+++ b/Packs/CommonScripts/Scripts/ConvertFile/ConvertFile_test.py
@@ -53,9 +53,9 @@ def test_convert_to_html(mocker):
assert results[0]['Type'] == entryTypes['file']
assert results[0]['File'] == 'test.html'
glob_list = glob.glob('./*' + results[0]['FileID'])
- logging.getLogger().info('glob list for results: {}. list: {}'.format(results[0], glob_list))
+ logging.getLogger().info(f'glob list for results: {results[0]}. list: {glob_list}')
assert glob_list
- with open(glob_list[0], "r") as f:
+ with open(glob_list[0]) as f:
contents = f.read()
assert 'Extensions to the Office Open XML' in contents
# assert the next result is an image
@@ -79,7 +79,7 @@ def test_convert_pdf_to_html(mocker):
assert results[0]['Type'] == entryTypes['file']
assert results[0]['File'] == 'test.html'
glob_list = glob.glob('./*' + results[0]['FileID'])
- logging.getLogger().info('glob list for results: {}. list: {}'.format(results[0], glob_list))
+ logging.getLogger().info(f'glob list for results: {results[0]}. list: {glob_list}')
assert glob_list
# check no defunct processed
zombies, output = find_zombie_processes()
diff --git a/Packs/CommonScripts/Scripts/CopyContextToField/CopyContextToField.yml b/Packs/CommonScripts/Scripts/CopyContextToField/CopyContextToField.yml
index ec9d6f17b085..1777780cda3f 100644
--- a/Packs/CommonScripts/Scripts/CopyContextToField/CopyContextToField.yml
+++ b/Packs/CommonScripts/Scripts/CopyContextToField/CopyContextToField.yml
@@ -32,6 +32,6 @@ timeout: '0'
type: python
subtype: python3
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- CopyContextToFieldTest
diff --git a/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly.py b/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly.py
index f4a1c9dcc546..d26fef85d59b 100644
--- a/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly.py
+++ b/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly.py
@@ -10,6 +10,24 @@
KEY_CREATION_STATUS = 'CreationStatus'
+def associate_indicator_to_incident(indicator_value: Any) -> None:
+ """
+ Associate an indicator to this incident. Raise an exception if an error occurs.
+ """
+
+ incident_id = demisto.incidents()[0].get('id')
+
+ cmd_args = {
+ 'incidentId': incident_id,
+ 'value': f"{indicator_value}" # Force an error
+ }
+
+ res = execute_command('associateIndicatorToIncident', cmd_args)
+
+ if (res != 'done'):
+ raise Exception(f"Failed to associate {indicator_value} with incident {incident_id}")
+
+
def normalize_indicator_value(indicator_value: Any) -> str:
if isinstance(indicator_value, int):
return str(indicator_value)
@@ -47,6 +65,7 @@ def add_new_indicator(indicator_value: Any,
}
else:
raise DemistoException(f'Unknown response from createNewIndicator: str{indicator_value}')
+ associate_indicator_to_incident(indicator_value)
return indicator
diff --git a/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly.yml b/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly.yml
index 75e2d7503515..ddd4f005676f 100644
--- a/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly.yml
+++ b/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly.yml
@@ -6,7 +6,7 @@ script: ''
type: python
tags:
- Utility
-comment: Create indicators to the Threat Intel database only if they are not registered. When using the script with many indicators, or when the Threat Intel Management database is highly populated, this script may have low performance issue.
+comment: Create indicators to the Threat Intel database only if they are not registered. All submitted indicators will be associated with the parent incident. When using the script with many indicators, or when the Threat Intel Management database is highly populated, this script may have low performance issue.
enabled: true
args:
- name: indicator_values
@@ -69,7 +69,7 @@ outputs:
type: string
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.13.89009
runas: DBotWeakRole
fromversion: 6.5.0
tests:
diff --git a/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly_test.py b/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly_test.py
index e719c1470776..40701e96bb85 100644
--- a/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly_test.py
+++ b/Packs/CommonScripts/Scripts/CreateNewIndicatorsOnly/CreateNewIndicatorsOnly_test.py
@@ -1,410 +1,425 @@
-import demistomock as demisto
-from CommonServerPython import * # noqa: F401
-import CreateNewIndicatorsOnly
-from typing import Any
-
-
-def equals_object(obj1, obj2) -> bool:
- if not isinstance(obj1, type(obj2)):
- return False
- elif isinstance(obj1, dict):
- for k1, v1 in obj1.items():
- if k1 not in obj2:
- return False
- if not equals_object(v1, obj2[k1]):
- return False
- return not (set(obj1.keys()) ^ set(obj2.keys()))
- elif isinstance(obj1, list):
- # Compare lists (ignore order)
- list2 = list(obj2)
- for _i1, v1 in enumerate(obj1):
- for i2, v2 in enumerate(list2):
- if equals_object(v1, v2):
- list2.pop(i2)
- break
- else:
- return False
- return not list2
- else:
- return obj1 == obj2
-
-
-def test_no_values(mocker):
- """
- Given:
- No values are given to the 'indicator_values'.
-
- When:
- Running the script
-
- Then:
- Validate the right response returns.
- """
- mocker.patch.object(demisto, 'args', return_value={
- 'indicator_values': [],
- })
-
- expected_entry_context = {}
-
- mocker.patch.object(demisto, 'results')
- CreateNewIndicatorsOnly.main()
- assert demisto.results.call_count == 1
- results = demisto.results.call_args[0][0]
- assert '0 new indicators have been added' in results.get('HumanReadable')
- assert equals_object(expected_entry_context, results.get('EntryContext'))
-
-
-def test_all_indicators_exist_with_single_value(mocker):
- """
- Given:
- A single indicator existing in the threat intel is given to the 'indicator_values'.
-
- When:
- Running the script
-
- Then:
- Validate the right response returns.
- """
- def __execute_command(cmd, args) -> Any:
- if cmd == 'findIndicators':
- return [{
- 'id': '0',
- 'value': args.get('value'),
- 'score': 0,
- 'indicator_type': args.get('type', 'Unknown')
- }]
- raise ValueError('Unexpected calls')
-
- mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
-
- mocker.patch.object(demisto, 'args', return_value={
- 'indicator_values': '1.1.1.1',
- })
-
- expected_entry_context = {
- 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
- 'CreationStatus': 'existing',
- 'ID': '0',
- 'Score': 0,
- 'Type': 'Unknown',
- 'Value': '1.1.1.1'
- }
- ]
- }
-
- mocker.patch.object(demisto, 'results')
- CreateNewIndicatorsOnly.main()
- assert demisto.results.call_count == 1
- results = demisto.results.call_args[0][0]
- assert '0 new indicators have been added' in results.get('HumanReadable')
- assert equals_object(expected_entry_context, results.get('EntryContext'))
-
-
-def test_all_indicators_exist_with_multiple_value(mocker):
- """
- Given:
- All indicators existing in the threat intel are given to the 'indicator_values'.
-
- When:
- Running the script
-
- Then:
- Validate the right response returns.
- """
- def __execute_command(cmd, args) -> Any:
- if cmd == 'findIndicators':
- return [{
- 'id': '0',
- 'value': args.get('value'),
- 'score': 0,
- 'indicator_type': args.get('type', 'Unknown')
- }]
- raise ValueError('Unexpected calls')
-
- mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
-
- mocker.patch.object(demisto, 'args', return_value={
- 'indicator_values': [
- '1.1.1.1',
- '2.2.2.2'
- ],
- })
-
- expected_entry_context = {
- 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
- 'CreationStatus': 'existing',
- 'ID': '0',
- 'Score': 0,
- 'Type': 'Unknown',
- 'Value': '1.1.1.1'
- }, {
- 'CreationStatus': 'existing',
- 'ID': '0',
- 'Score': 0,
- 'Type': 'Unknown',
- 'Value': '2.2.2.2'
- }
- ]
- }
-
- mocker.patch.object(demisto, 'results')
- CreateNewIndicatorsOnly.main()
- assert demisto.results.call_count == 1
- results = demisto.results.call_args[0][0]
- assert '0 new indicators have been added' in results.get('HumanReadable')
- assert equals_object(expected_entry_context, results.get('EntryContext'))
-
-
-def test_some_indicators_exist_with_multiple_value(mocker):
- """
- Given:
- Some indicators existing in the threat intel are given to the 'indicator_values'.
-
- When:
- Running the script
-
- Then:
- Validate the right response returns.
- """
- def __execute_command(cmd, args) -> Any:
- if cmd == 'findIndicators':
- value = args.get('value')
- if value != '1.1.1.1':
- return []
- else:
- return [{
- 'id': '0',
- 'value': args.get('value'),
- 'score': 0,
- 'indicator_type': args.get('type', 'Unknown')
- }]
- elif cmd == 'createNewIndicator':
- return {
- 'id': '0',
- 'value': args.get('value'),
- 'score': 0,
- 'indicator_type': args.get('type', 'Unknown')
- }
- raise ValueError('Unexpected calls')
-
- mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
-
- mocker.patch.object(demisto, 'args', return_value={
- 'indicator_values': [
- '1.1.1.1',
- '2.2.2.2'
- ],
- })
-
- expected_entry_context = {
- 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
- 'CreationStatus': 'existing',
- 'ID': '0',
- 'Score': 0,
- 'Type': 'Unknown',
- 'Value': '1.1.1.1'
- }, {
- 'CreationStatus': 'new',
- 'ID': '0',
- 'Score': 0,
- 'Type': 'Unknown',
- 'Value': '2.2.2.2'
- }
- ]
- }
-
- mocker.patch.object(demisto, 'results')
- CreateNewIndicatorsOnly.main()
- assert demisto.results.call_count == 1
- results = demisto.results.call_args[0][0]
- assert '1 new indicators have been added' in results.get('HumanReadable')
- assert equals_object(expected_entry_context, results.get('EntryContext'))
-
-
-def test_some_indicators_are_excluded(mocker):
- """
- Given:
- Some indicators given to the 'indicator_values' are in the exclusion list.
-
- When:
- Running the script
-
- Then:
- Validate the right response returns.
- """
- def __execute_command(cmd, args) -> Any:
- if cmd == 'findIndicators':
- return []
- elif cmd == 'createNewIndicator':
- value = args.get('value')
- if value == '1.1.1.1':
- return 'done - Indicator was not created'
- else:
- return {
- 'id': '0',
- 'value': args.get('value'),
- 'score': 0,
- 'indicator_type': args.get('type', 'Unknown')
- }
- raise ValueError('Unexpected calls')
-
- mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
-
- mocker.patch.object(demisto, 'args', return_value={
- 'indicator_values': [
- '1.1.1.1',
- '2.2.2.2'
- ],
- })
-
- expected_entry_context = {
- 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
- 'CreationStatus': 'unavailable',
- 'Type': 'Unknown',
- 'Value': '1.1.1.1'
- }, {
- 'CreationStatus': 'new',
- 'ID': '0',
- 'Score': 0,
- 'Type': 'Unknown',
- 'Value': '2.2.2.2'
- }
- ]
- }
-
- mocker.patch.object(demisto, 'results')
- CreateNewIndicatorsOnly.main()
- assert demisto.results.call_count == 1
- results = demisto.results.call_args[0][0]
- assert '1 new indicators have been added' in results.get('HumanReadable')
- assert equals_object(expected_entry_context, results.get('EntryContext'))
-
-
-def test_indicator_including_commas(mocker):
- """
- Given:
- An indicator given to the 'indicator_values' contains commas
-
- When:
- Running the script
-
- Then:
- Validate the right response returns.
- """
- def __execute_command(cmd, args) -> Any:
- if cmd == 'findIndicators':
- return []
- elif cmd == 'createNewIndicator':
- return {
- 'id': '0',
- 'value': args.get('value'),
- 'score': 0,
- 'indicator_type': args.get('type', 'Unknown')
- }
- raise ValueError('Unexpected calls')
-
- mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
-
- mocker.patch.object(demisto, 'args', return_value={
- 'indicator_values': 'http://www.paloaltonetworks.com/?q=,123',
- })
-
- expected_entry_context = {
- 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
- 'CreationStatus': 'new',
- 'ID': '0',
- 'Score': 0,
- 'Type': 'Unknown',
- 'Value': 'http://www.paloaltonetworks.com/?q=,123'
- }
- ]
- }
-
- mocker.patch.object(demisto, 'results')
- CreateNewIndicatorsOnly.main()
- assert demisto.results.call_count == 1
- results = demisto.results.call_args[0][0]
- assert '1 new indicators have been added' in results.get('HumanReadable')
- assert equals_object(expected_entry_context, results.get('EntryContext'))
-
-
-def test_print_verbose(mocker):
- """
- Given:
- `verbose=true` is given to the argument parameters
-
- When:
- Running the script
-
- Then:
- Validate the right response returns.
- """
- def __execute_command(cmd, args) -> Any:
- if cmd == 'findIndicators':
- return []
- elif cmd == 'createNewIndicator':
- return {
- 'id': '0',
- 'value': args.get('value'),
- 'score': 0,
- 'indicator_type': args.get('type', 'Unknown')
- }
- raise ValueError('Unexpected calls')
-
- mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
-
- mocker.patch.object(demisto, 'args', return_value={
- 'indicator_values': '1.1.1.1',
- 'verbose': 'true'
- })
-
- expected_entry_context = {
- 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
- 'CreationStatus': 'new',
- 'ID': '0',
- 'Score': 0,
- 'Type': 'Unknown',
- 'Value': '1.1.1.1'
- }
- ]
- }
-
- mocker.patch.object(demisto, 'results')
- CreateNewIndicatorsOnly.main()
- assert demisto.results.call_count == 1
- results = demisto.results.call_args[0][0]
- assert '|ID|Score|CreationStatus|Type|Value' in results.get('HumanReadable')
- assert equals_object(expected_entry_context, results.get('EntryContext'))
-
-
-def test_findIndicators_called_with_escaped_quotes(mocker):
- """
- Given:
- indicator_value = "(External):Test \"test2 test (unsigned)\""
- When:
- The 'add_new_indicator' function is called with the indicator_value = "(External):Test \"test2 test (unsigned)\""
- (when the user runs in cli:!CreateNewIndicatorsOnlyTest indicator_values=`(External):Test "test2 test (unsigned)"`)
- Then:
- 1. The 'execute_command' function should be called with the correct escaped value.
- 2. The 'add_new_indicator' function should return the expected result as a dictionary.
- """
- from CreateNewIndicatorsOnly import add_new_indicator
- indicator_value = "(External):Test \"test2 test (unsigned)\""
- expected_value = indicator_value.replace('"', r"\"")
-
- def __execute_command(cmd, args) -> Any:
- assert args == {'value': expected_value}
- if cmd == 'findIndicators':
- return [{
- 'id': '0',
- 'value': '(External):Test "test2 test (unsigned)"',
- 'score': 0,
- 'indicator_type': args.get('type', 'Unknown')
- }]
- return None
-
- mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
-
- result = add_new_indicator(indicator_value, {})
- assert result == {'id': '0', 'value': '(External):Test "test2 test (unsigned)"',
- 'score': 0, 'indicator_type': 'Unknown', 'CreationStatus': 'existing'}
+import demistomock as demisto
+from CommonServerPython import * # noqa: F401
+import CreateNewIndicatorsOnly
+from typing import Any
+
+
+def equals_object(obj1, obj2) -> bool:
+ if not isinstance(obj1, type(obj2)):
+ return False
+ elif isinstance(obj1, dict):
+ for k1, v1 in obj1.items():
+ if k1 not in obj2:
+ return False
+ if not equals_object(v1, obj2[k1]):
+ return False
+ return not (set(obj1.keys()) ^ set(obj2.keys()))
+ elif isinstance(obj1, list):
+ # Compare lists (ignore order)
+ list2 = list(obj2)
+ for _i1, v1 in enumerate(obj1):
+ for i2, v2 in enumerate(list2):
+ if equals_object(v1, v2):
+ list2.pop(i2)
+ break
+ else:
+ return False
+ return not list2
+ else:
+ return obj1 == obj2
+
+
+def test_no_values(mocker):
+ """
+ Given:
+ No values are given to the 'indicator_values'.
+
+ When:
+ Running the script
+
+ Then:
+ Validate the right response returns.
+ """
+ mocker.patch.object(demisto, 'args', return_value={
+ 'indicator_values': [],
+ })
+
+ expected_entry_context = {}
+
+ mocker.patch.object(demisto, 'results')
+ CreateNewIndicatorsOnly.main()
+ assert demisto.results.call_count == 1
+ results = demisto.results.call_args[0][0]
+ assert '0 new indicators have been added' in results.get('HumanReadable')
+ assert equals_object(expected_entry_context, results.get('EntryContext'))
+
+
+def test_all_indicators_exist_with_single_value(mocker):
+ """
+ Given:
+ A single indicator existing in the threat intel is given to the 'indicator_values'.
+
+ When:
+ Running the script
+
+ Then:
+ Validate the right response returns.
+ """
+ def __execute_command(cmd, args) -> Any:
+ if cmd == 'findIndicators':
+ return [{
+ 'id': '0',
+ 'value': args.get('value'),
+ 'score': 0,
+ 'indicator_type': args.get('type', 'Unknown')
+ }]
+ elif cmd == 'associateIndicatorToIncident':
+ return 'done'
+ raise ValueError('Unexpected calls')
+
+ mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'indicator_values': '1.1.1.1',
+ })
+
+ expected_entry_context = {
+ 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
+ 'CreationStatus': 'existing',
+ 'ID': '0',
+ 'Score': 0,
+ 'Type': 'Unknown',
+ 'Value': '1.1.1.1'
+ }
+ ]
+ }
+
+ mocker.patch.object(demisto, 'results')
+ CreateNewIndicatorsOnly.main()
+ assert demisto.results.call_count == 1
+ results = demisto.results.call_args[0][0]
+ assert '0 new indicators have been added' in results.get('HumanReadable')
+ assert equals_object(expected_entry_context, results.get('EntryContext'))
+
+
+def test_all_indicators_exist_with_multiple_value(mocker):
+ """
+ Given:
+ All indicators existing in the threat intel are given to the 'indicator_values'.
+
+ When:
+ Running the script
+
+ Then:
+ Validate the right response returns.
+ """
+ def __execute_command(cmd, args) -> Any:
+ if cmd == 'findIndicators':
+ return [{
+ 'id': '0',
+ 'value': args.get('value'),
+ 'score': 0,
+ 'indicator_type': args.get('type', 'Unknown')
+ }]
+ elif cmd == 'associateIndicatorToIncident':
+ return 'done'
+ raise ValueError('Unexpected calls')
+
+ mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'indicator_values': [
+ '1.1.1.1',
+ '2.2.2.2'
+ ],
+ })
+
+ expected_entry_context = {
+ 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
+ 'CreationStatus': 'existing',
+ 'ID': '0',
+ 'Score': 0,
+ 'Type': 'Unknown',
+ 'Value': '1.1.1.1'
+ }, {
+ 'CreationStatus': 'existing',
+ 'ID': '0',
+ 'Score': 0,
+ 'Type': 'Unknown',
+ 'Value': '2.2.2.2'
+ }
+ ]
+ }
+
+ mocker.patch.object(demisto, 'results')
+ CreateNewIndicatorsOnly.main()
+ assert demisto.results.call_count == 1
+ results = demisto.results.call_args[0][0]
+ assert '0 new indicators have been added' in results.get('HumanReadable')
+ assert equals_object(expected_entry_context, results.get('EntryContext'))
+
+
+def test_some_indicators_exist_with_multiple_value(mocker):
+ """
+ Given:
+ Some indicators existing in the threat intel are given to the 'indicator_values'.
+
+ When:
+ Running the script
+
+ Then:
+ Validate the right response returns.
+ """
+ def __execute_command(cmd, args) -> Any:
+ if cmd == 'findIndicators':
+ value = args.get('value')
+ if value != '1.1.1.1':
+ return []
+ else:
+ return [{
+ 'id': '0',
+ 'value': args.get('value'),
+ 'score': 0,
+ 'indicator_type': args.get('type', 'Unknown')
+ }]
+ elif cmd == 'createNewIndicator':
+ return {
+ 'id': '0',
+ 'value': args.get('value'),
+ 'score': 0,
+ 'indicator_type': args.get('type', 'Unknown')
+ }
+ elif cmd == 'associateIndicatorToIncident':
+ return 'done'
+ raise ValueError('Unexpected calls')
+
+ mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'indicator_values': [
+ '1.1.1.1',
+ '2.2.2.2'
+ ],
+ })
+
+ expected_entry_context = {
+ 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
+ 'CreationStatus': 'existing',
+ 'ID': '0',
+ 'Score': 0,
+ 'Type': 'Unknown',
+ 'Value': '1.1.1.1'
+ }, {
+ 'CreationStatus': 'new',
+ 'ID': '0',
+ 'Score': 0,
+ 'Type': 'Unknown',
+ 'Value': '2.2.2.2'
+ }
+ ]
+ }
+
+ mocker.patch.object(demisto, 'results')
+ CreateNewIndicatorsOnly.main()
+ assert demisto.results.call_count == 1
+ results = demisto.results.call_args[0][0]
+ assert '1 new indicators have been added' in results.get('HumanReadable')
+ assert equals_object(expected_entry_context, results.get('EntryContext'))
+
+
+def test_some_indicators_are_excluded(mocker):
+ """
+ Given:
+ Some indicators given to the 'indicator_values' are in the exclusion list.
+
+ When:
+ Running the script
+
+ Then:
+ Validate the right response returns.
+ """
+ def __execute_command(cmd, args) -> Any:
+ if cmd == 'findIndicators':
+ return []
+ elif cmd == 'createNewIndicator':
+ value = args.get('value')
+ if value == '1.1.1.1':
+ return 'done - Indicator was not created'
+ else:
+ return {
+ 'id': '0',
+ 'value': args.get('value'),
+ 'score': 0,
+ 'indicator_type': args.get('type', 'Unknown')
+ }
+ elif cmd == 'associateIndicatorToIncident':
+ return 'done'
+ raise ValueError('Unexpected calls')
+
+ mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'indicator_values': [
+ '1.1.1.1',
+ '2.2.2.2'
+ ],
+ })
+
+ expected_entry_context = {
+ 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
+ 'CreationStatus': 'unavailable',
+ 'Type': 'Unknown',
+ 'Value': '1.1.1.1'
+ }, {
+ 'CreationStatus': 'new',
+ 'ID': '0',
+ 'Score': 0,
+ 'Type': 'Unknown',
+ 'Value': '2.2.2.2'
+ }
+ ]
+ }
+
+ mocker.patch.object(demisto, 'results')
+ CreateNewIndicatorsOnly.main()
+ assert demisto.results.call_count == 1
+ results = demisto.results.call_args[0][0]
+ assert '1 new indicators have been added' in results.get('HumanReadable')
+ assert equals_object(expected_entry_context, results.get('EntryContext'))
+
+
+def test_indicator_including_commas(mocker):
+ """
+ Given:
+ An indicator given to the 'indicator_values' contains commas
+
+ When:
+ Running the script
+
+ Then:
+ Validate the right response returns.
+ """
+ def __execute_command(cmd, args) -> Any:
+ if cmd == 'findIndicators':
+ return []
+ elif cmd == 'createNewIndicator':
+ return {
+ 'id': '0',
+ 'value': args.get('value'),
+ 'score': 0,
+ 'indicator_type': args.get('type', 'Unknown')
+ }
+ elif cmd == 'associateIndicatorToIncident':
+ return 'done'
+ raise ValueError('Unexpected calls')
+
+ mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'indicator_values': 'http://www.paloaltonetworks.com/?q=,123',
+ })
+
+ expected_entry_context = {
+ 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
+ 'CreationStatus': 'new',
+ 'ID': '0',
+ 'Score': 0,
+ 'Type': 'Unknown',
+ 'Value': 'http://www.paloaltonetworks.com/?q=,123'
+ }
+ ]
+ }
+
+ mocker.patch.object(demisto, 'results')
+ CreateNewIndicatorsOnly.main()
+ assert demisto.results.call_count == 1
+ results = demisto.results.call_args[0][0]
+ assert '1 new indicators have been added' in results.get('HumanReadable')
+ assert equals_object(expected_entry_context, results.get('EntryContext'))
+
+
+def test_print_verbose(mocker):
+ """
+ Given:
+ `verbose=true` is given to the argument parameters
+
+ When:
+ Running the script
+
+ Then:
+ Validate the right response returns.
+ """
+ def __execute_command(cmd, args) -> Any:
+ if cmd == 'findIndicators':
+ return []
+ elif cmd == 'createNewIndicator':
+ return {
+ 'id': '0',
+ 'value': args.get('value'),
+ 'score': 0,
+ 'indicator_type': args.get('type', 'Unknown')
+ }
+ elif cmd == 'associateIndicatorToIncident':
+ return 'done'
+ raise ValueError('Unexpected calls')
+
+ mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'indicator_values': '1.1.1.1',
+ 'verbose': 'true'
+ })
+
+ expected_entry_context = {
+ 'CreateNewIndicatorsOnly(val.Value && val.Value == obj.Value && val.Type && val.Type == obj.Type)': [{
+ 'CreationStatus': 'new',
+ 'ID': '0',
+ 'Score': 0,
+ 'Type': 'Unknown',
+ 'Value': '1.1.1.1'
+ }
+ ]
+ }
+
+ mocker.patch.object(demisto, 'results')
+ CreateNewIndicatorsOnly.main()
+ assert demisto.results.call_count == 1
+ results = demisto.results.call_args[0][0]
+ assert '|ID|Score|CreationStatus|Type|Value' in results.get('HumanReadable')
+ assert equals_object(expected_entry_context, results.get('EntryContext'))
+
+
+def test_findIndicators_called_with_escaped_quotes(mocker):
+ """
+ Given:
+ indicator_value = "(External):Test \"test2 test (unsigned)\""
+ When:
+ The 'add_new_indicator' function is called with the indicator_value = "(External):Test \"test2 test (unsigned)\""
+ (when the user runs in cli:!CreateNewIndicatorsOnlyTest indicator_values=`(External):Test "test2 test (unsigned)"`)
+ Then:
+ 1. The 'execute_command' function should be called with the correct escaped value.
+ 2. The 'add_new_indicator' function should return the expected result as a dictionary.
+ """
+ from CreateNewIndicatorsOnly import add_new_indicator
+ indicator_value = "(External):Test \"test2 test (unsigned)\""
+ expected_value = indicator_value.replace('"', r"\"")
+
+ def __execute_command(cmd, args) -> Any:
+ if cmd == 'findIndicators':
+ assert args == {'value': expected_value}
+ return [{
+ 'id': '0',
+ 'value': '(External):Test "test2 test (unsigned)"',
+ 'score': 0,
+ 'indicator_type': args.get('type', 'Unknown')
+ }]
+ elif cmd == 'associateIndicatorToIncident':
+ assert args == {'incidentId': '1', 'value': indicator_value}
+ return 'done'
+ return None
+
+ mocker.patch('CreateNewIndicatorsOnly.execute_command', side_effect=__execute_command)
+
+ result = add_new_indicator(indicator_value, {})
+ assert result == {'id': '0', 'value': '(External):Test "test2 test (unsigned)"',
+ 'score': 0, 'indicator_type': 'Unknown', 'CreationStatus': 'existing'}
diff --git a/Packs/CommonScripts/Scripts/DeleteContext/DeleteContext.js b/Packs/CommonScripts/Scripts/DeleteContext/DeleteContext.js
index 85cbd29947c4..3318c9f548aa 100644
--- a/Packs/CommonScripts/Scripts/DeleteContext/DeleteContext.js
+++ b/Packs/CommonScripts/Scripts/DeleteContext/DeleteContext.js
@@ -6,9 +6,45 @@ function errorEntry(text) {
};
}
-var fieldsToDelete;
+function hasDuplicates(arr) {
+ return arr.some( function(item) {
+ return arr.indexOf(item) !== arr.lastIndexOf(item);
+ });
+}
+
+/**
+ * Deletes keys from the context and handles errors.
+ * @param {Array} keys - An array of keys to delete.
+ * @returns {string} A message summarizing the outcome of the delete operation.
+ */
+function deleteKeys(keysToDelete) {
+ var deletedKeys = []
+ var errors = []
+ var message = "";
+ for (var key of keysToDelete) {
+ const originalKey = typeof key === "string" ? key.trim() : key;
+ if (!dq(invContext, originalKey)) {
+ errors.push(`key does not exist: ${originalKey}`);
+ continue;
+ }
+ const keyToDelete = isSubPlaybookKey ? 'subplaybook-${currentPlaybookID}.' + originalKey: originalKey;
+ const result = executeCommand('delContext', { key: keyToDelete });
+
+ if (!result || result.type === entryTypes.error) {
+ errors.push(result.Contents);
+ } else {
+ deletedKeys.push(key);
+ }
+ }
+ if (deletedKeys.length > 0) {
+ message += `\nSuccessfully deleted keys '${deletedKeys.join("', '")}' from context.`;
+ }
+ return message;
+}
+
var shouldDeleteAll = (args.all === 'yes');
var isSubPlaybookKey = (args.subplaybook === 'yes');
+
if (args.subplaybook === 'auto') {
var res = executeCommand('Print', { value: 'id=${currentPlaybookID}' });
if (res && res[0].Contents && res[0].Contents.startsWith('id=')) {
@@ -25,14 +61,8 @@ if (!shouldDeleteAll && !args.key) {
Type: entryTypes.error};
}
-function hasDuplicates(arr) {
- return arr.some( function(item) {
- return arr.indexOf(item) !== arr.lastIndexOf(item);
- });
-}
-
if (shouldDeleteAll) {
- var keysToKeep = (args.keysToKeep) ? args.keysToKeep.split(',').map(function(item) { return item.trim(); }) : [];
+ var keysToKeep = (args.keysToKeep) ? args.keysToKeep.split(',').map(item => item.trim()) : [];
var keysToKeepObj = {};
var KeepDBotScoreKey = false;
index = keysToKeep.indexOf("DBotScore");
@@ -62,29 +92,9 @@ if (shouldDeleteAll) {
}
}
}
- fieldsToDelete = Object.keys(invContext);
+ var keysToDelete = Object.keys(invContext);
- // delete each field in context
- var errorsStr = "";
- for (var i = 0; i < fieldsToDelete.length; i++) {
- var key = fieldsToDelete[i];
- if (isSubPlaybookKey) {
- key = 'subplaybook-${currentPlaybookID}.' + key;
- }
- if (key !== "DBotScore" || !KeepDBotScoreKey) {
- var result = executeCommand('delContext', {key: key});
- if(!result || result.type === entryTypes.error) {
- errorsStr = errorsStr + "\n" + result.Contents;
- }
- }
- }
-
- var message;
- if (errorsStr) {
- message = "Context cleared with the following errors:" + errorsStr;
- } else {
- message = "Context cleared";
- }
+ var message = deleteKeys(keysToDelete, isSubPlaybookKey)
return {
Type: entryTypes.note,
@@ -96,7 +106,7 @@ if (shouldDeleteAll) {
};
} else if (args.index !== undefined) {
- // delete key in a specific index
+ // Delete key in a specific index.
var index = parseInt(args.index);
if (isNaN(index)) {
return errorEntry("Invalid index " + args.index)
@@ -134,9 +144,17 @@ if (shouldDeleteAll) {
return "Successfully deleted index " + index + " from key " + args.key;
} else {
- var key = args.key;
- if (isSubPlaybookKey) {
- key = 'subplaybook-${currentPlaybookID}.' + key;
- }
- return executeCommand('delContext', {key: key});
+ // Supporting comma separated list of keys to be deleted.
+ var keysToDelete = (typeof args.key === "string") ? args.key.split(',') : [args.key]
+
+ var message = deleteKeys(keysToDelete, isSubPlaybookKey)
+ return {
+ Type: entryTypes.note,
+ Contents: message,
+ ContentsFormat: formats.json,
+ HumanReadable: message,
+ ReadableContentsFormat: formats.markdown,
+ EntryContext: keysToKeepObj
+ };
+
}
diff --git a/Packs/CommonScripts/Scripts/DeleteContext/DeleteContext.yml b/Packs/CommonScripts/Scripts/DeleteContext/DeleteContext.yml
index 54137fb8c99b..6493866d754a 100644
--- a/Packs/CommonScripts/Scripts/DeleteContext/DeleteContext.yml
+++ b/Packs/CommonScripts/Scripts/DeleteContext/DeleteContext.yml
@@ -16,13 +16,13 @@ enabled: true
args:
- name: key
default: true
- description: The key to delete from the context
+ description: List of comma-separated keys to delete from context.
- name: all
auto: PREDEFINED
predefined:
- "yes"
- "no"
- description: If you choose yes - all context will be deleted
+ description: If you choose yes - all context will be deleted.
- name: subplaybook
auto: PREDEFINED
predefined:
@@ -33,7 +33,7 @@ args:
- name: keysToKeep
description: Context keys to keep when deleting all context. Supports comma separated values and nested objects, e.g. URL.Data,IP.Address
- name: index
- description: index to delete in case 'key' argument was specified
+ description: index to delete in case 'key' argument was specified.
scripttarget: 0
runas: DBotRole
sensitive: true
diff --git a/Packs/CommonScripts/Scripts/DemistoVersion/DemistoVersion.yml b/Packs/CommonScripts/Scripts/DemistoVersion/DemistoVersion.yml
index 3eaa43086f7a..d4ef4905475f 100644
--- a/Packs/CommonScripts/Scripts/DemistoVersion/DemistoVersion.yml
+++ b/Packs/CommonScripts/Scripts/DemistoVersion/DemistoVersion.yml
@@ -16,7 +16,7 @@ outputs:
type: string
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 5.0.0
marketplaces:
diff --git a/Packs/CommonScripts/Scripts/EncodeToAscii/EncodeToAscii.yml b/Packs/CommonScripts/Scripts/EncodeToAscii/EncodeToAscii.yml
index 21c387674fd1..a199048a3558 100644
--- a/Packs/CommonScripts/Scripts/EncodeToAscii/EncodeToAscii.yml
+++ b/Packs/CommonScripts/Scripts/EncodeToAscii/EncodeToAscii.yml
@@ -18,6 +18,6 @@ outputs:
type: string
scripttarget: 0
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/ExportContextToJSONFile/ExportContextToJSONFile.yml b/Packs/CommonScripts/Scripts/ExportContextToJSONFile/ExportContextToJSONFile.yml
index 21d5cbb1257d..b0ad63b4922e 100644
--- a/Packs/CommonScripts/Scripts/ExportContextToJSONFile/ExportContextToJSONFile.yml
+++ b/Packs/CommonScripts/Scripts/ExportContextToJSONFile/ExportContextToJSONFile.yml
@@ -9,7 +9,7 @@ commonfields:
contentitemexportablefields:
contentitemfields:
fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
enabled: true
name: ExportContextToJSONFile
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py
new file mode 100644
index 000000000000..8786248758d3
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py
@@ -0,0 +1,100 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+import openpyxl
+from docx import Document
+from pptx import Presentation
+import zipfile
+import pandas as pd
+
+
+def extract_hyperlinks_from_xlsx(file_path: str) -> Set:
+ with zipfile.ZipFile(file_path, "r") as zf:
+ xmls = [zf.read(fn) for fn in zf.infolist()
+ if fn.filename.startswith("xl/drawings/_rels/")]
+
+ urls = set()
+
+ for xml_data in xmls:
+ df = pd.read_xml(xml_data)
+
+ if "TargetMode" in df.columns:
+ filtered_df = df.loc[df["TargetMode"].eq("External"), "Target"]
+ urls |= set(filtered_df)
+
+ wb = openpyxl.load_workbook(file_path)
+ for sheet in wb:
+ for row in sheet.iter_rows():
+ for cell in row:
+ if cell.hyperlink:
+ urls.add(cell.hyperlink.target)
+
+ return urls
+
+
+def extract_hyperlinks_from_docx(file_path: str) -> Set:
+ doc = Document(file_path)
+ links = set()
+ for para in doc.paragraphs:
+ for hyper in para.hyperlinks:
+ if hyper.address:
+ links.add(hyper.address)
+ return links
+
+
+def extract_hyperlinks_from_pptx(file_path: str) -> Set:
+ prs = Presentation(file_path)
+ links = set()
+ for slide in prs.slides:
+ for shape in slide.shapes:
+ if shape.has_text_frame:
+ for paragraph in shape.text_frame.paragraphs:
+ for run in paragraph.runs:
+ if run.hyperlink and run.hyperlink.address:
+ links.add(run.hyperlink.address)
+ if shape.click_action and shape.click_action.hyperlink.address:
+ links.add(shape.click_action.hyperlink.address)
+
+ return links
+
+
+def extract_hyperlink_by_file_type(file_name: str, file_path: str) -> CommandResults:
+ if file_name.endswith('.xlsx'):
+ result = extract_hyperlinks_from_xlsx(file_path)
+ elif file_name.endswith('.docx'):
+ result = extract_hyperlinks_from_docx(file_path)
+ elif file_name.endswith('.pptx'):
+ result = extract_hyperlinks_from_pptx(file_path)
+ else:
+ raise ValueError("Unsupported file type. Supported types are: 'xlsx, docx, pptx'")
+ if result:
+ urls_str = "\n".join(result)
+ hr = f'### Extracted Hyperlinks\n\n{urls_str}'
+ else:
+ hr = '**No hyperlinks.**'
+
+ output = [{'URL': url, 'FileName': file_name} for url in result]
+ return CommandResults(
+ outputs=output,
+ outputs_prefix='ExtractedHyperLink',
+ outputs_key_field=['URL', 'FileName'],
+ readable_output=hr,
+ raw_response=list(result)
+ )
+
+
+def main(): # pragma: no cover
+ try:
+ entry_id = demisto.args().get("entry_id")
+ file_result = demisto.getFilePath(entry_id)
+ if not file_result:
+ raise ValueError(f"Couldn't find entry id: {entry_id}")
+ file_name = file_result.get('name')
+ file_path = file_result.get('path')
+ os.rename(f'./{file_path}', file_name)
+ return_results(extract_hyperlink_by_file_type(file_name=file_name, file_path=os.path.realpath(file_name)))
+ except Exception as ex:
+ return_error(f'Failed to execute ExtractHyperlinksFromOfficeFiles. Error: {str(ex)}')
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml
new file mode 100644
index 000000000000..2842d24fe96a
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml
@@ -0,0 +1,22 @@
+args:
+- description: 'The entry id of the file to extract hyperlinks from.'
+ name: entry_id
+ required: true
+comment: 'Extracts hyperlinks from office files. Supported file types are: xlsx, docx, pptx.'
+commonfields:
+ id: ExtractHyperlinksFromOfficeFiles
+ version: -1
+name: ExtractHyperlinksFromOfficeFiles
+outputs:
+- contextPath: ExtractedHyperLink.URL
+ description: The URL of the extracted hyperlink.
+ type: String
+- contextPath: ExtractedHyperLink.FileName
+ description: The office file from which the hyperlinks are extracted.
+ type: String
+script: '-'
+timeout: '0'
+type: python
+subtype: python3
+dockerimage: demisto/office-utils:2.0.0.88298
+fromversion: 5.5.0
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py
new file mode 100644
index 000000000000..7f26494c3a78
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py
@@ -0,0 +1,38 @@
+import pytest
+from ExtractHyperlinksFromOfficeFiles import extract_hyperlink_by_file_type
+
+
+@pytest.mark.parametrize('file_path, expected_output', [
+ ('test_data/d1.docx',
+ {'https://xsoar.pan.dev/', 'https://www.paloaltonetworks.com/', 'https://jobs.paloaltonetworks.com/en/'}),
+ ('test_data/d2.docx', set()),
+ ('test_data/e1.xlsx', {'http://www.google.com', 'http://www.yahoo.de/'}),
+ ('test_data/e2.xlsx', set()),
+ ('test_data/e3.xlsx', {'https://www.paloaltonetworks.com/'}),
+ ('test_data/p1.pptx', {'https://xsoar.pan.dev/', 'https://www.paloaltonetworks.com/'}),
+ ('test_data/p2.pptx', set()),
+])
+def test_basescript_dummy(file_path, expected_output):
+ """
+ Given:
+ 1. docx file with hyperlinks on a picture and text.
+ 2. docx file without hyperlinks
+ 3. excel file with hyperlinks on a picture and inside text cell.
+ 4. excel file with no hyperlinks.
+ 5. excel file with hyperlinks inside text cell.
+ 6. power point file with hyperlinks on a picture and text.
+ 7. power point file without hyperlinks.
+ When:
+ Extracting hyperlinks from file using ExtractHyperlinksFromOfficeFiles script.
+ Then:
+ Validate that:
+ 1. hyperlinks extracted from docx file
+ 2. no hyperlinks extracted from docx file
+ 3. hyperlinks extracted from excel file
+ 4. no hyperlinks extracted from excel file
+ 5. hyperlinks extracted from excel file
+ 6. hyperlinks extracted from power point file
+ 7. no hyperlinks extracted from power point file
+ """
+ response = extract_hyperlink_by_file_type(file_name=file_path, file_path=file_path)
+ assert set(response.raw_response) == expected_output
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/README.md b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/README.md
new file mode 100644
index 000000000000..338d38078620
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/README.md
@@ -0,0 +1,50 @@
+Extract hyperlinks from office files. Supported file types are: xlsx, docx, pptx.
+
+## Script Data
+
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Cortex XSOAR Version | 5.5.0 |
+
+## Inputs
+
+---
+
+| **Argument Name** | **Description** |
+| --- | --- |
+| entry_id | The entry id of the file to extract hyperlinks from. |
+
+## Outputs
+
+---
+
+| **Path** | **Description** | **Type** |
+| --- | --- | --- |
+| ExtractedHyperLink.URL | The extracted hyperlinks URL. | String |
+| ExtractedHyperLink.FileName | The office file that the hyperlinks extracted from. | String |
+
+## Script Examples
+
+### Example command
+
+```!ExtractHyperlinksFromOfficeFiles entry_id=1249@93725c86-540d-4ee4-8728-f0ab82b1cb46```
+
+### Context Example
+
+```json
+{
+ "ExtractedHyperLink": {
+ "FileName": "Link.docx",
+ "URL": "https://www.paloaltonetworks.com/"
+ }
+}
+```
+
+### Human Readable Output
+
+># Extracted hyperlinks are:
+>
+>https://www.paloaltonetworks.com/
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d1.docx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d1.docx
new file mode 100644
index 000000000000..8eb83b6fb230
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d1.docx differ
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d2.docx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d2.docx
new file mode 100644
index 000000000000..9cf4d1b6f6fc
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d2.docx differ
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e1.xlsx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e1.xlsx
new file mode 100644
index 000000000000..6647e2bf9886
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e1.xlsx differ
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e2.xlsx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e2.xlsx
new file mode 100644
index 000000000000..3fc536b83572
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e2.xlsx differ
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e3.xlsx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e3.xlsx
new file mode 100644
index 000000000000..8589c2d77791
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/e3.xlsx differ
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p1.pptx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p1.pptx
new file mode 100644
index 000000000000..3281441f85ae
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p1.pptx differ
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p2.pptx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p2.pptx
new file mode 100644
index 000000000000..31d0919dc2d1
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p2.pptx differ
diff --git a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.py b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.py
index 38e9b43cff7e..5b8242a6b3c5 100644
--- a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.py
+++ b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.py
@@ -47,6 +47,9 @@ def xls_file_to_indicator_list(file_path, sheet_name, col_num, starting_row, aut
# TODO: add run on all columns functionality
+ # Ensure that the has_iter will not be reseted after opening the workbook.
+ xlrd.xlsx.ensure_elementtree_imported(False, None)
+ xlrd.xlsx.Element_has_iter = True
xl_woorkbook = xlrd.open_workbook(file_path)
if sheet_name and sheet_name != 'None':
xl_sheet = xl_woorkbook.sheet_by_name(sheet_name)
diff --git a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.yml b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.yml
index 1b31d1146be5..40b143180884 100644
--- a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.yml
+++ b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.yml
@@ -64,7 +64,7 @@ tags:
- indicators
timeout: '0'
type: python
-dockerimage: demisto/py3-tools:1.0.0.46591
+dockerimage: demisto/py3-tools:1.0.0.89345
fromversion: 6.5.0
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/FileCreateAndUploadV2/FileCreateAndUploadV2.yml b/Packs/CommonScripts/Scripts/FileCreateAndUploadV2/FileCreateAndUploadV2.yml
index aeed086c22f4..4f3500d8690e 100644
--- a/Packs/CommonScripts/Scripts/FileCreateAndUploadV2/FileCreateAndUploadV2.yml
+++ b/Packs/CommonScripts/Scripts/FileCreateAndUploadV2/FileCreateAndUploadV2.yml
@@ -27,8 +27,8 @@ args:
defaultValue: raw
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.13.89009
runas: DBotWeakRole
-fromversion: 6.2.0
+fromversion: 6.0.0
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/FormatURL/FormatURL.py b/Packs/CommonScripts/Scripts/FormatURL/FormatURL.py
index 60c8e584b253..d619902ae4b6 100644
--- a/Packs/CommonScripts/Scripts/FormatURL/FormatURL.py
+++ b/Packs/CommonScripts/Scripts/FormatURL/FormatURL.py
@@ -89,15 +89,18 @@ def __init__(self, original_url: str):
# The URL seems to have a scheme indicated by presence of "//" or "%3A"
self.scheme_check()
- try:
- # First slash after the scheme (if exists)
- first_slash = self.modified_url[self.base:].index("/")
+ host_end_position = -1
+ special_chars = ("/", "?", "#") # Any one of these states the end of the host / authority part in a URL
- except ValueError:
- first_slash = -1
+ for char in special_chars:
+ try:
+ host_end_position = self.modified_url[self.base:].index(char)
+ break # index for the end of the part found, breaking loop
+ except ValueError:
+ continue # no reserved char found, URL has no path, query or fragment parts.
try:
- if "@" in self.modified_url[:first_slash]:
+ if "@" in self.modified_url[:host_end_position]:
# Checks if url has '@' sign in its authority part
self.user_info_check()
@@ -732,6 +735,9 @@ def _is_valid_cidr(cidr: str) -> bool:
True if inout is a valid CIDR
"""
+ if not cidr[-1].isdigit(): # precaution incase the regex caught an extra char by mistake
+ cidr = cidr[:-1]
+
try:
ipaddress.ip_network(cidr)
return True
diff --git a/Packs/CommonScripts/Scripts/FormatURL/FormatURL.yml b/Packs/CommonScripts/Scripts/FormatURL/FormatURL.yml
index 3a902c8ebf19..6ad12230be0a 100644
--- a/Packs/CommonScripts/Scripts/FormatURL/FormatURL.yml
+++ b/Packs/CommonScripts/Scripts/FormatURL/FormatURL.yml
@@ -18,7 +18,7 @@ tags:
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/python3:3.10.13.87159
+dockerimage: demisto/python3:3.10.13.89009
fromversion: 5.5.0
tests:
- FormatURL-Test
diff --git a/Packs/CommonScripts/Scripts/FormatURL/FormatURL_test.py b/Packs/CommonScripts/Scripts/FormatURL/FormatURL_test.py
index d34ec8b7eb90..03b8c7c6d2c5 100644
--- a/Packs/CommonScripts/Scripts/FormatURL/FormatURL_test.py
+++ b/Packs/CommonScripts/Scripts/FormatURL/FormatURL_test.py
@@ -177,7 +177,9 @@
('https://www.test.test.com/test.html?paramaters=testagain', # disable-secrets-detection
'https://www.test.test.com/test.html?paramaters=testagain'), # disable-secrets-detection
('https://test.test.com/v2/test?test&test=[test]test', # disable-secrets-detection
- 'https://test.test.com/v2/test?test&test=[test]test') # disable-secrets-detection
+ 'https://test.test.com/v2/test?test&test=[test]test'), # disable-secrets-detection
+ ('https://test.dev?email=some@email.addres', # disable-secrets-detection
+ 'https://test.dev?email=some@email.addres'), # disable-secrets-detection
]
FORMAT_FRAGMENT = [
@@ -186,6 +188,8 @@
'http://_23_11.redacted.com./#redactedredactedredacted'), # disable-secrets-detection
('https://test.com?a=b#fragment3', 'https://test.com?a=b#fragment3'), # disable-secrets-detection
('https://test.com/?a=b#fragment3', 'https://test.com/?a=b#fragment3'), # disable-secrets-detection
+ ('https://test.dev#fragment', # disable-secrets-detection
+ 'https://test.dev#fragment') # disable-secrets-detection
]
FORMAT_REFANG = [
@@ -298,6 +302,28 @@ def test_hex_chars(self, non_formatted_url: str, expected: str):
hex = non_formatted_url.find('%')
assert url.hex_check(hex)
+ cidr_strings = [
+ ("192.168.0.0/16", True), # Valid CIDR
+ ("192.168.0.0/16.", True), # Valid CIDR with an extra char caught by the regex
+ ("192.168.0.1/16", False), # Invalid CIDR
+ ("192.168.0.1/16.", False), # Invalid CIDR with an extra char caught by the regex
+ ]
+
+ @pytest.mark.parametrize('input, expected', cidr_strings)
+ def test_is_valid_cidr(self, input: str, expected: str):
+ from FormatURL import _is_valid_cidr
+ """
+ Given:
+ - non_formatted_url: A CIDR input.
+
+ When:
+ - Regex caught a string with a CIDR structure.
+
+ Then:
+ - Ensure the formatter avoids valid CIDRs.
+ """
+ assert _is_valid_cidr(input) == expected
+
@pytest.mark.parametrize('url_, expected', FORMAT_URL_TEST_DATA)
def test_format_url(self, url_: str, expected: str):
"""
diff --git a/Packs/CommonScripts/Scripts/GenerateRandomString/GenerateRandomString.yml b/Packs/CommonScripts/Scripts/GenerateRandomString/GenerateRandomString.yml
index 080930a8f49e..d69a332f03a5 100644
--- a/Packs/CommonScripts/Scripts/GenerateRandomString/GenerateRandomString.yml
+++ b/Packs/CommonScripts/Scripts/GenerateRandomString/GenerateRandomString.yml
@@ -49,4 +49,4 @@ scripttarget: 0
tests:
- RandomStringGenerateTest
fromversion: 6.2.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
diff --git a/Packs/CommonScripts/Scripts/GenerateSummaryReportButton/GenerateSummaryReportButton.yml b/Packs/CommonScripts/Scripts/GenerateSummaryReportButton/GenerateSummaryReportButton.yml
index 770c564852f5..f1dd182289a5 100644
--- a/Packs/CommonScripts/Scripts/GenerateSummaryReportButton/GenerateSummaryReportButton.yml
+++ b/Packs/CommonScripts/Scripts/GenerateSummaryReportButton/GenerateSummaryReportButton.yml
@@ -2,7 +2,7 @@ comment: This button will generate summary 'Case Report' template for a given In
commonfields:
id: GenerateSummaryReportButton
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
enabled: true
name: GenerateSummaryReportButton
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/GenericPollingScheduledTask/GenericPollingScheduledTask.js b/Packs/CommonScripts/Scripts/GenericPollingScheduledTask/GenericPollingScheduledTask.js
index d63a9c978896..7d49c9ae7b10 100644
--- a/Packs/CommonScripts/Scripts/GenericPollingScheduledTask/GenericPollingScheduledTask.js
+++ b/Packs/CommonScripts/Scripts/GenericPollingScheduledTask/GenericPollingScheduledTask.js
@@ -65,7 +65,7 @@ function finish(playbookId, tag, err, entryGUID) {
}
-function setNextRun(ids, playbookId, pollingCommand, pollingCommandArgName, pendingIds, interval, timeout, tag, additionalArgNames, additionalArgValues) {
+function setNextRun(ids, playbookId, pollingCommand, pollingCommandArgName, pendingIds, interval, timeout, tag, additionalArgNames, additionalArgValues, extractMode) {
var idsStr = ids.replace(/"/g, '\\"');
var playbookIdStr = '';
if (playbookId !== undefined) {
@@ -74,6 +74,9 @@ function setNextRun(ids, playbookId, pollingCommand, pollingCommandArgName, pend
var cmd = '!GenericPollingScheduledTask pollingCommand="' + pollingCommand + '" pollingCommandArgName="' + pollingCommandArgName + '"' + playbookIdStr;
cmd += ' ids="' + idsStr + '" pendingIds="' + pendingIds.replace(/"/g,'\\"') + '" interval="' + interval + '" timeout="' + (parseInt(timeout) - parseInt(interval)) + '" tag="' + tag + '"';
cmd += ' additionalPollingCommandArgNames="' + additionalArgNames + '" additionalPollingCommandArgValues="' + additionalArgValues + '"';
+ if (extractMode !== undefined) {
+ cmd += ' extractMode="' + extractMode + '" auto-extract="' + extractMode + '"';
+ }
return executeCommand("ScheduleCommand", {
'command': cmd,
'cron': '*/' + interval + ' * * * *',
@@ -173,7 +176,7 @@ function genericPollingScheduled(){
if (!shouldRunWithGuid) {
// Schedule the next iteration, old version.
- var scheduleTaskRes = setNextRun(args.ids, args.playbookId, args.pollingCommand, args.pollingCommandArgName, args.pendingIds, args.interval, args.timeout, args.tag, args.additionalPollingCommandArgNames, args.additionalPollingCommandArgValues);
+ var scheduleTaskRes = setNextRun(args.ids, args.playbookId, args.pollingCommand, args.pollingCommandArgName, args.pendingIds, args.interval, args.timeout, args.tag, args.additionalPollingCommandArgNames, args.additionalPollingCommandArgValues, args.extractMode);
if (isError(scheduleTaskRes[0])) {
res.push(scheduleTaskRes);
}
diff --git a/Packs/CommonScripts/Scripts/GenericPollingScheduledTask/GenericPollingScheduledTask.yml b/Packs/CommonScripts/Scripts/GenericPollingScheduledTask/GenericPollingScheduledTask.yml
index 8885a6c38fcc..04d703340249 100644
--- a/Packs/CommonScripts/Scripts/GenericPollingScheduledTask/GenericPollingScheduledTask.yml
+++ b/Packs/CommonScripts/Scripts/GenericPollingScheduledTask/GenericPollingScheduledTask.yml
@@ -43,6 +43,8 @@ args:
description: The GUID of the scheduled entry that runs the polling command.
- name: endTime
description: The time to end the polling.
+- name: extractMode
+ description: Extraction mode for subsequent runs of the command.
scripttarget: 0
tests:
- Generic Polling Test
diff --git a/Packs/CommonScripts/Scripts/GetDockerImageLatestTag/GetDockerImageLatestTag.py b/Packs/CommonScripts/Scripts/GetDockerImageLatestTag/GetDockerImageLatestTag.py
index 2c6e695f971e..4e405788c401 100644
--- a/Packs/CommonScripts/Scripts/GetDockerImageLatestTag/GetDockerImageLatestTag.py
+++ b/Packs/CommonScripts/Scripts/GetDockerImageLatestTag/GetDockerImageLatestTag.py
@@ -30,7 +30,7 @@ def parse_www_auth(www_auth):
return (match.group(1), match.group(2))
-def docker_auth(image_name, verify_ssl=True, registry=DEFAULT_REGISTRY):
+def docker_auth(image_name, verify_ssl=True, registry=DEFAULT_REGISTRY, gateway_creds=""):
"""
Authenticate to the docker service. Return an authentication token if authentication is required.
"""
@@ -55,9 +55,12 @@ def docker_auth(image_name, verify_ssl=True, registry=DEFAULT_REGISTRY):
else:
demisto.info('Failed extracting www-authenticate header from registry: {}, final url: {}'.format(
registry, res.url))
+ headers = ACCEPT_HEADER.copy()
+ if gateway_creds and registry != DEFAULT_REGISTRY:
+ headers['Authorization'] = "Basic {}".format(gateway_creds)
res = requests.get(
'{}?scope=repository:{}:pull&service={}'.format(realm, image_name, service),
- headers=ACCEPT_HEADER,
+ headers=headers,
timeout=TIMEOUT,
verify=verify_ssl
)
@@ -143,12 +146,13 @@ def main():
del os.environ['https_proxy']
verify_ssl = demisto.args().get('trust_any_certificate') != 'yes'
docker_full_name = demisto.args()['docker_image']
+ gateway_creds = demisto.args().get('creds_for_opp', "")
registry = DEFAULT_REGISTRY
image_name = docker_full_name
if docker_full_name.count('/') > 1:
registry, image_name = docker_full_name.split('/', 1)
try:
- auth_token = docker_auth(image_name, verify_ssl, registry)
+ auth_token = docker_auth(image_name, verify_ssl, registry, gateway_creds)
headers = ACCEPT_HEADER.copy()
if auth_token:
headers['Authorization'] = "Bearer {}".format(auth_token)
diff --git a/Packs/CommonScripts/Scripts/GetDockerImageLatestTag/GetDockerImageLatestTag.yml b/Packs/CommonScripts/Scripts/GetDockerImageLatestTag/GetDockerImageLatestTag.yml
index 4f4b5fdcfdd4..be042fe40696 100644
--- a/Packs/CommonScripts/Scripts/GetDockerImageLatestTag/GetDockerImageLatestTag.yml
+++ b/Packs/CommonScripts/Scripts/GetDockerImageLatestTag/GetDockerImageLatestTag.yml
@@ -25,7 +25,9 @@ args:
- "no"
description: Trust any certificate (not secure).
defaultValue: "no"
+- name: creds_for_opp
+ description: Credentials for OPP gateway.
tests:
- No test - no testplaybook, but unit tests exist
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.13.89009
diff --git a/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.py b/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.py
index bed84542bccb..eb873a1cb4da 100644
--- a/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.py
+++ b/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.py
@@ -2,10 +2,7 @@
from CommonServerPython import * # noqa: F401
-from typing import Dict, List
-
-
-def grid_field_setup(keys: List[str], vals: Dict, res_list: List) -> List[str]:
+def grid_field_setup(keys: list[str], vals: dict, res_list: list) -> list[str]:
"""Returns a list of dictionaries based on the key/values provided.
:type keys: ``str``
:type vals: ``dict``
@@ -29,7 +26,7 @@ def grid_field_setup(keys: List[str], vals: Dict, res_list: List) -> List[str]:
''' COMMAND FUNCTION '''
-def grid_field_setup_command(args: Dict[str, str]) -> CommandResults:
+def grid_field_setup_command(args: dict[str, str]) -> CommandResults:
keys = argToList(args.pop('keys', []))
overwrite = args.pop('overwrite', False)
gridfield = args.pop('gridfield', None)
diff --git a/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.yml b/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.yml
index c80eb5186523..63087160bd54 100644
--- a/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.yml
+++ b/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.yml
@@ -39,7 +39,7 @@ comment: |-
commonfields:
id: GridFieldSetup
version: -1
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.13.89009
enabled: true
name: GridFieldSetup
runas: DBotWeakRole
@@ -51,4 +51,4 @@ tags:
type: python
fromversion: 6.5.0
tests:
-- GridFieldSetup_test
+- No tests
diff --git a/Packs/CommonScripts/Scripts/IPToHost/IPToHost.yml b/Packs/CommonScripts/Scripts/IPToHost/IPToHost.yml
index efeb51647d73..ea005c717028 100644
--- a/Packs/CommonScripts/Scripts/IPToHost/IPToHost.yml
+++ b/Packs/CommonScripts/Scripts/IPToHost/IPToHost.yml
@@ -24,6 +24,6 @@ outputs:
type: string
scripttarget: 0
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- IPToHost - Test
diff --git a/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail.py b/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail.py
index a47585fae16e..307c6de93ab0 100644
--- a/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail.py
+++ b/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail.py
@@ -66,6 +66,9 @@ def identify_attached_mail(args):
else:
entries = demisto.executeCommand('getEntries', {"filter": {"categories": ["attachments"]}})
+ if not entries:
+ return 'no', None
+
for e in entries:
id = is_entry_email(e)
if id:
diff --git a/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail.yml b/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail.yml
index dbb7afd937ce..8f1db8a80336 100644
--- a/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail.yml
+++ b/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail.yml
@@ -28,4 +28,4 @@ tests:
- Process Email - Generic - Test - Incident Starter
- Phishing v2 - Test - Incident Starter
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.13.86272
+dockerimage: demisto/python3:3.10.13.87159
diff --git a/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail_test.py b/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail_test.py
index df84b466f8db..ece17fa892c4 100644
--- a/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail_test.py
+++ b/Packs/CommonScripts/Scripts/IdentifyAttachedEmail/IdentifyAttachedEmail_test.py
@@ -166,3 +166,85 @@ def execute_command(command, args):
results = identify_attached_mail({})
assert results == ('yes', {'reportedemailentryid': '23@2'})
+
+
+def test_identify_attached_mail_no_email_found(mocker):
+ """
+ Given
+ - no email entries in the warroom
+ - the platform is xsoar saas
+
+ When
+ - running the script to get the entries
+
+ Then
+ - no entries to be found
+
+ """
+ import CommonServerPython
+ mocker.patch.object(CommonServerPython, 'get_demisto_version', return_value={
+ 'version': '8.2.0',
+ 'buildNumber': '12345'
+ })
+
+ def execute_command(command, args):
+ if command == 'getEntries' and args == {"filter": {"categories": ["attachments"]}}:
+ return
+ else:
+ pytest.fail()
+
+ mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
+
+ results = identify_attached_mail({})
+ assert results == ('no', None)
+
+
+def test_list_of_entries_passed_in_xsoar_saas_but_no_file_entries(mocker):
+ """
+ Given
+ - two entries with ids 23@2 24@2 which are not file entries
+ - the platform is xsoar saas
+
+ When
+ - running the script to get the entries
+
+ Then
+ - expect the getEntriesByIDs to be called
+ - expect no email entries to be found
+
+ """
+ entry_ids = """[\"23@2\",\"24@2\"]"""
+ import CommonServerPython
+ mocker.patch.object(CommonServerPython, 'get_demisto_version', return_value={
+ 'version': '8.2.0',
+ 'buildNumber': '12345'
+ })
+
+ def execute_command(command, args):
+ if command == 'getEntriesByIDs' and args.get('entryIDs') == '23@2,24@2':
+ return [
+ {
+ 'File': 'msg.txt',
+ 'FileMetadata': {
+ 'info': 'ASCII text, with CRLF line terminators'
+ },
+ 'ID': '23@2'
+ },
+ {
+ 'File': 'foo.txt',
+ 'FileMetadata': {
+ 'info': 'ASCII text, with CRLF line terminators'
+ },
+ 'ID': '24@2'
+ }
+ ]
+ else:
+ pytest.fail()
+
+ mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
+
+ args = {
+ 'entryid': entry_ids
+ }
+ results = identify_attached_mail(args)
+ assert results == ('no', None)
diff --git a/Packs/CommonScripts/Scripts/IndicatorMaliciousRatioCalculation/IndicatorMaliciousRatioCalculation.yml b/Packs/CommonScripts/Scripts/IndicatorMaliciousRatioCalculation/IndicatorMaliciousRatioCalculation.yml
index 3ac1449baed7..d9d8fba6562b 100644
--- a/Packs/CommonScripts/Scripts/IndicatorMaliciousRatioCalculation/IndicatorMaliciousRatioCalculation.yml
+++ b/Packs/CommonScripts/Scripts/IndicatorMaliciousRatioCalculation/IndicatorMaliciousRatioCalculation.yml
@@ -44,4 +44,4 @@ timeout: 300ns
fromversion: 5.0.0
tests:
- IndicatorMaliciousRatioCalculation_test
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
diff --git a/Packs/CommonScripts/Scripts/IsDomainInternal/IsDomainInternal.yml b/Packs/CommonScripts/Scripts/IsDomainInternal/IsDomainInternal.yml
index ceba2474b991..8554b2b00b2b 100644
--- a/Packs/CommonScripts/Scripts/IsDomainInternal/IsDomainInternal.yml
+++ b/Packs/CommonScripts/Scripts/IsDomainInternal/IsDomainInternal.yml
@@ -28,7 +28,7 @@ outputs:
type: boolean
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 6.5.0
marketplaces:
diff --git a/Packs/CommonScripts/Scripts/IsEmailAddressInternal/README.md b/Packs/CommonScripts/Scripts/IsEmailAddressInternal/README.md
index 6a6c2f2fdae1..754328cbbbe0 100644
--- a/Packs/CommonScripts/Scripts/IsEmailAddressInternal/README.md
+++ b/Packs/CommonScripts/Scripts/IsEmailAddressInternal/README.md
@@ -5,7 +5,7 @@ Checks if the email address is part of the internal domain.
| **Name** | **Description** |
| --- | --- |
-| Script Type | python |
+| Script Type | javascript |
| Tags | email |
diff --git a/Packs/CommonScripts/Scripts/IsIPPrivate/IsIPPrivate.yml b/Packs/CommonScripts/Scripts/IsIPPrivate/IsIPPrivate.yml
index a701eab16e9c..290998bb8e47 100644
--- a/Packs/CommonScripts/Scripts/IsIPPrivate/IsIPPrivate.yml
+++ b/Packs/CommonScripts/Scripts/IsIPPrivate/IsIPPrivate.yml
@@ -32,7 +32,7 @@ outputs:
description: Any tags that were added to the indicator. The tags are added by this script if they were specified for the IP or IP range in the Cortex XSOAR list.
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 6.5.0
marketplaces:
diff --git a/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName.py b/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName.py
index 7d5a34aa21bb..3fb1cf4d7e69 100644
--- a/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName.py
+++ b/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName.py
@@ -6,6 +6,25 @@
MAXIMUM_NUMBER_OF_RECORDS = 10
+def is_sub_domain_contained(main_domain: str, sub_domain: str) -> bool:
+ """Check if the subdomain is contained within the main domain.
+
+ Args:
+ main_domain (str): The main domain.
+ sub_domain (str): The sub domain.
+
+ Returns:
+ bool: True if the main_domain contains the sub_domain, False otherwise
+ """
+ main_domain_parts = list(reversed(main_domain.split(".")))
+ sub_domain_pats = list(reversed(sub_domain.split(".")))
+
+ if len(main_domain_parts) > len(sub_domain_pats):
+ return False
+
+ return all(value == sub_domain_pats[i] for i, value in enumerate(main_domain_parts))
+
+
def check_sub_domains_in_domain(domains_to_compare: list, sub_domains_to_check: list):
"""
@@ -30,7 +49,7 @@ def check_sub_domains_in_domain(domains_to_compare: list, sub_domains_to_check:
headers = ["DomainToTest", "DomainToCompare", "IsInternal"]
for sub_domain in sub_domains_to_check:
# in case sub domain is in at least one of the given main domains
- is_in_domain = any(main_domain in sub_domain for main_domain in domains_to_compare)
+ is_in_domain = any(is_sub_domain_contained(main_domain, sub_domain) for main_domain in domains_to_compare)
context_entry.append({
'DomainToTest': sub_domain,
'DomainToCompare': domains_to_compare,
diff --git a/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName.yml b/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName.yml
index c18eefc876f6..a8648d4a54f2 100644
--- a/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName.yml
+++ b/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName.yml
@@ -32,5 +32,5 @@ tags:
- Utility
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.13.86272
+dockerimage: demisto/python3:3.10.13.89009
fromversion: 5.0.0
diff --git a/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName_test.py b/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName_test.py
index 6b0bc5d49865..6658ceab5119 100644
--- a/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName_test.py
+++ b/Packs/CommonScripts/Scripts/IsInternalDomainName/IsInternalDomainName_test.py
@@ -1,4 +1,4 @@
-from IsInternalDomainName import check_sub_domains_in_domain
+from IsInternalDomainName import check_sub_domains_in_domain, is_sub_domain_contained
import pytest
@@ -23,6 +23,10 @@
("paloaltonetworkss.com", ["paloaltonetworks.com"], False),
("apps.paloaltonetworks.com", ["paloaltonetworks.com"], True)]
),
+ (["cig.eu"], ["abcdcig.eu"], [("abcdcig.eu", ["cig.eu"], False)]),
+ (["cd.com"], ["ab-cd.com"], [("ab-cd.com", ["cd.com"], False)]),
+ (["ab-cd.com"], ["zz.ab-cd.com"], [("zz.ab-cd.com", ["ab-cd.com"], True)]),
+
])
def test_check_in_domain(domain_name, domain_to_check, expected_output):
"""
@@ -41,3 +45,19 @@ def test_check_in_domain(domain_name, domain_to_check, expected_output):
assert sub_domain["DomainToTest"] == expected_output[index][0]
assert sub_domain["DomainToCompare"] == expected_output[index][1]
assert sub_domain["IsInternal"] == expected_output[index][2]
+
+
+@pytest.mark.parametrize('main_domain, sub_domain, expected_output',
+ [("paloaltonetworks.com", "apps.paloaltonetworks.com", True),
+ ("cd.com", "ab-cd.com", False),
+ ("cig.eu", "abcdcig.eu", False)])
+def test_extract_main_domain(main_domain, sub_domain, expected_output):
+ """
+ Given:
+ - A main domain and a sub domain
+ When:
+ - Checking if the main_domain contains the sub_domain
+ Then:
+ - Return True if the main_domain contains the sub_domain, False otherwise
+ """
+ assert is_sub_domain_contained(main_domain, sub_domain) == expected_output
diff --git a/Packs/CommonScripts/Scripts/LinkIncidentsButton/LinkIncidentsButton.yml b/Packs/CommonScripts/Scripts/LinkIncidentsButton/LinkIncidentsButton.yml
index 958ac323d95a..1b2d980c89c9 100644
--- a/Packs/CommonScripts/Scripts/LinkIncidentsButton/LinkIncidentsButton.yml
+++ b/Packs/CommonScripts/Scripts/LinkIncidentsButton/LinkIncidentsButton.yml
@@ -14,7 +14,7 @@ comment: |
commonfields:
id: LinkIncidentsButton
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
enabled: true
name: LinkIncidentsButton
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/LinkIncidentsWithRetry/LinkIncidentsWithRetry.yml b/Packs/CommonScripts/Scripts/LinkIncidentsWithRetry/LinkIncidentsWithRetry.yml
index 39678c11236f..a843ae9e8b7a 100644
--- a/Packs/CommonScripts/Scripts/LinkIncidentsWithRetry/LinkIncidentsWithRetry.yml
+++ b/Packs/CommonScripts/Scripts/LinkIncidentsWithRetry/LinkIncidentsWithRetry.yml
@@ -18,7 +18,7 @@ scripttarget: 0
comment: |-
Use this script to avoid DB version errors when simultaneously running multiple linked incidents.
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- No tests (auto formatted)
marketplaces:
diff --git a/Packs/CommonScripts/Scripts/LoadJSONFileToContext/LoadJSONFileToContext.yml b/Packs/CommonScripts/Scripts/LoadJSONFileToContext/LoadJSONFileToContext.yml
index 1027daa2de0c..1a678553ea86 100644
--- a/Packs/CommonScripts/Scripts/LoadJSONFileToContext/LoadJSONFileToContext.yml
+++ b/Packs/CommonScripts/Scripts/LoadJSONFileToContext/LoadJSONFileToContext.yml
@@ -12,7 +12,7 @@ commonfields:
contentitemexportablefields:
contentitemfields:
fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
enabled: true
name: LoadJSONFileToContext
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/MarkAsEvidenceByTag/MarkAsEvidenceByTag.yml b/Packs/CommonScripts/Scripts/MarkAsEvidenceByTag/MarkAsEvidenceByTag.yml
index d236cf303063..1903af81892e 100644
--- a/Packs/CommonScripts/Scripts/MarkAsEvidenceByTag/MarkAsEvidenceByTag.yml
+++ b/Packs/CommonScripts/Scripts/MarkAsEvidenceByTag/MarkAsEvidenceByTag.yml
@@ -23,6 +23,6 @@ tags:
- Utility
type: python
fromversion: 6.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/PopulateCriticalAssets/PopulateCriticalAssets.yml b/Packs/CommonScripts/Scripts/PopulateCriticalAssets/PopulateCriticalAssets.yml
index 346a356798cb..04a38e185ba2 100644
--- a/Packs/CommonScripts/Scripts/PopulateCriticalAssets/PopulateCriticalAssets.yml
+++ b/Packs/CommonScripts/Scripts/PopulateCriticalAssets/PopulateCriticalAssets.yml
@@ -18,4 +18,4 @@ runas: DBotWeakRole
tests:
- Calculate Severity - Generic v2 - Test
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
diff --git a/Packs/CommonScripts/Scripts/ScheduleCommand/README.md b/Packs/CommonScripts/Scripts/ScheduleCommand/README.md
index c5832107d7ec..0da2ad138c5b 100644
--- a/Packs/CommonScripts/Scripts/ScheduleCommand/README.md
+++ b/Packs/CommonScripts/Scripts/ScheduleCommand/README.md
@@ -23,8 +23,8 @@ This script is used in the following playbooks and scripts.
| **Argument Name** | **Description** |
| --- | --- |
-| command | The command to schedule |
-| cron | The scheduled time to run |
+| command | The command to schedule. |
+| cron | The scheduled time to run. Uses UTC (Coordinated Universal Time). |
| endDate | When should we end the schedule. Will be only relevant if times is not provided. Optional. Format is 'Mon, 02 Jan 2006 15:04:05 MST' |
| times | The number of times to run. Optional. |
| scheduledEntryGuid | The GUID of the scheduled entry that runs the polling command. |
diff --git a/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.py b/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.py
index 54497bd09b19..51ca2c91ea4b 100644
--- a/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.py
+++ b/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.py
@@ -8,6 +8,11 @@
MINIMUM_XSOAR_VERSION = '8.2.0'
MINIMUM_BUILD_NUMBER_XSOAR = 309463
+# Order is important! See is_command_sanitized implementation
+SANITIZED_ARG_NAMES = ['additionalPollingCommandArgValues', 'additionalPollingCommandArgNames', 'pollingCommandArgName',
+ 'pollingCommand',
+ ]
+
# Returns a comma-separated string representation of a list
# Possible inputs: null, int, str, bytes, ["","",...], [int, int], 'a,b,...', '"a","b",...', '["","",...]'
@@ -51,10 +56,33 @@ def calculate_end_time(timeout):
def is_value_sanitized(value):
- arg_names = ['pollingCommand', 'pollingCommandArgName',
- 'additionalPollingCommandArgNames', 'additionalPollingCommandArgValues',
- ]
- return all(current_arg_name not in value for current_arg_name in arg_names)
+ return all(current_arg_name not in value for current_arg_name in SANITIZED_ARG_NAMES)
+
+
+def is_command_sanitized(command):
+ malformed_args = []
+ for current_sanitized_arg_name in SANITIZED_ARG_NAMES:
+ if command.count(current_sanitized_arg_name) > 1:
+ malformed_args.append(current_sanitized_arg_name)
+ command = command.replace(current_sanitized_arg_name, '')
+ if malformed_args:
+ return False, f'The value of {", ".join(malformed_args)} is malformed.'
+ return True, None
+
+
+def get_command_string(ids, pollingCommand, pollingCommandArgName, playbookId, dt, interval, timeout, tag, args_names,
+ args_values, extract_mode):
+
+ command_string = '''!GenericPollingScheduledTask ids="{}" pollingCommand="{}" pollingCommandArgName="{}"{} \
+ pendingIds="{}" interval="{}" timeout="{}" tag="{}" additionalPollingCommandArgNames="{}" \
+ additionalPollingCommandArgValues="{}"'''.format(ids.replace('"', r'\"'), pollingCommand,
+ pollingCommandArgName, playbookId,
+ dt.replace('"', r'\"'), interval, timeout,
+ tag, args_names, args_values)
+ if extract_mode:
+ command_string += f" auto-extract={extract_mode} extractMode={extract_mode}"
+
+ return command_string
def main(): # pragma: no cover
@@ -68,17 +96,18 @@ def main(): # pragma: no cover
pollingCommandArgName = args.get('pollingCommandArgName')
tag = args.get('tag')
playbookId = f' playbookId="{args.get("playbookId", "")}"'
+
interval = int(args.get('interval'))
timeout = int(args.get('timeout'))
+ extract_mode = args.get("extractMode")
+ if interval <= 0 or timeout <= 0:
+ return_error("Interval and timeout must be positive numbers")
args_names = args.get('additionalPollingCommandArgNames').strip() \
if args.get('additionalPollingCommandArgNames') else None
args_values = args.get('additionalPollingCommandArgValues').strip() \
if args.get('additionalPollingCommandArgValues') else None
- if interval <= 0 or timeout <= 0:
- return_error("Interval and timeout must be positive numbers")
-
# Verify correct dt path (does not verify condition!)
if not demisto.dt(demisto.context(), dt):
if not demisto.dt(demisto.context(), re.sub('\(.*\)', '', dt)):
@@ -87,12 +116,13 @@ def main(): # pragma: no cover
demisto.results("Warning: no ids matching the dt condition were found.\nVerify that the condition is correct and "
"that all ids have finished running.")
- command_string = '''!GenericPollingScheduledTask ids="{}" pollingCommand="{}" pollingCommandArgName="{}"{} \
- pendingIds="{}" interval="{}" timeout="{}" tag="{}" additionalPollingCommandArgNames="{}" \
- additionalPollingCommandArgValues="{}"'''.format(ids.replace('"', r'\"'), pollingCommand,
- pollingCommandArgName, playbookId,
- dt.replace('"', r'\"'), interval, timeout,
- tag, args_names, args_values)
+ command_string = get_command_string(ids, pollingCommand, pollingCommandArgName, playbookId, dt, interval, timeout, tag,
+ args_names, args_values, extract_mode)
+
+ command_sanitized, message = is_command_sanitized(command_string)
+ if not command_sanitized:
+ return_error(message)
+
schedule_command_args = {
'command': command_string,
'cron': f'*/{interval} * * * *',
diff --git a/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.yml b/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.yml
index 014551922068..97181578a62c 100644
--- a/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.yml
+++ b/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.yml
@@ -44,7 +44,9 @@ args:
- name: additionalPollingCommandArgValues
description: Values of the additional arguments for the polling command (e.g. value1,value2,...).
isArray: true
+- name: extractMode
+ description: Extraction mode for subsequent runs of the command.
scripttarget: 0
tests:
- Generic Polling Test
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.13.89009
diff --git a/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling_test.py b/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling_test.py
index fab4eb7b997b..ab9a29f393d6 100644
--- a/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling_test.py
+++ b/Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling_test.py
@@ -1,7 +1,8 @@
-
+import demistomock as demisto
from freezegun import freeze_time
import pytest
-from ScheduleGenericPolling import calculate_end_time, is_value_sanitized, parseIds
+from ScheduleGenericPolling import calculate_end_time, get_command_string, is_command_sanitized, is_value_sanitized, main, \
+ parseIds
@pytest.mark.parametrize('value, expected_result',
@@ -37,3 +38,256 @@ def test_calculate_end_time(value, expected_result):
def test_is_value_sanitized(value, expected_result):
result = is_value_sanitized(value)
assert result == expected_result
+
+
+def test_is_command_sanitized():
+
+ # Trivial - pass
+ command = "1234"
+ result = is_command_sanitized(command)
+ assert result == (True, None)
+
+ # Twice additionalPollingCommandArgNames - fail
+ command = "additionalPollingCommandArgNames additionalPollingCommandArgNames"
+ result = is_command_sanitized(command)
+ assert result == (False, 'The value of additionalPollingCommandArgNames is malformed.')
+
+ # 2 different args - pass
+ command = "pollingCommandArgName additionalPollingCommandArgNames"
+ result = is_command_sanitized(command)
+ assert result == (True, None)
+
+ # 2 and 2 - fail
+ command = "pollingCommandArgName additionalPollingCommandArgValues pollingCommandArgName additionalPollingCommandArgValues"
+ result = is_command_sanitized(command)
+ assert result == (False, 'The value of additionalPollingCommandArgValues, pollingCommandArgName is malformed.')
+
+ # 2 and 2 and 2 - fail
+ command = "pollingCommand pollingCommandArgName additionalPollingCommandArgValues pollingCommand " \
+ "pollingCommandArgName additionalPollingCommandArgValues"
+ result = is_command_sanitized(command)
+
+ result_message = 'The value of additionalPollingCommandArgValues, pollingCommandArgName, pollingCommand is malformed.'
+ assert result == (False, result_message)
+
+
+def test_get_command_string_pass():
+ """
+ Given
+ Sample input values
+ When
+ Calling get_command_string
+ Then
+ Test the command result structure
+ """
+ good_input = {
+ 'ids': "123",
+ 'pollingCommand': "jira-get-issue",
+ 'pollingCommandArgName': "issueId",
+ 'playbookId': "pi",
+ 'dt': "Ticket(val.Status != 'Done').Id",
+ 'interval': "3",
+ 'timeout': "5",
+ 'tag': "polling",
+ 'args_names': "my_arg_name",
+ 'args_values': "my_arg_value",
+ }
+
+ command_String = get_command_string(good_input.get('ids'),
+ good_input.get('pollingCommand'),
+ good_input.get('pollingCommandArgName'),
+ good_input.get('playbookId'),
+ good_input.get('dt'),
+ good_input.get('interval'),
+ good_input.get('timeout'),
+ good_input.get('tag'),
+ good_input.get('args_names'),
+ good_input.get('args_values'),
+ None,
+ )
+
+ expected_command = '!GenericPollingScheduledTask ids="123" pollingCommand="jira-get-issue" pollingCommandArgName=' \
+ '"issueId"pi pendingIds="Ticket(val.Status != \'Done\').Id" interval="3"' \
+ ' timeout="5" tag="polling" additionalPollingCommandArgNames="my_arg_name"' \
+ ' additionalPollingCommandArgValues="my_arg_value"'
+
+ assert command_String == expected_command
+ result = is_command_sanitized(command_String)
+
+ expected_result = (True, None)
+ assert result == expected_result
+
+
+def test_get_command_string_fail():
+ """
+ Given
+ Sample bad input values
+ When
+ Calling get_command_string
+ Then
+ Test the command result indicates the wrong input
+ """
+ fail_input = {
+ 'ids': "123",
+ 'pollingCommand': "jira-get-issue",
+ 'pollingCommandArgName': "issueId",
+ 'playbookId': "pi",
+ 'dt': "Ticket(val.Status != 'Done').Id",
+ 'interval': "3",
+ 'timeout': "5",
+ 'tag': "polling",
+ 'args_names': "my_arg_name",
+ 'args_values': "hihi\" pollingCommand=\"Set\" ids=\"payload\" pendingIds=\".='payload'\""
+ " pollingCommandArgName=\"key\" additionalPollingCommandArgNames=\"value\""
+ " additionalPollingCommandArgValues=\"bar",
+ }
+
+ command_String = get_command_string(fail_input.get('ids'),
+ fail_input.get('pollingCommand'),
+ fail_input.get('pollingCommandArgName'),
+ fail_input.get('playbookId'),
+ fail_input.get('dt'),
+ fail_input.get('interval'),
+ fail_input.get('timeout'),
+ fail_input.get('tag,'),
+ fail_input.get('args_names'),
+ fail_input.get('args_values'),
+ None,
+ )
+
+ expected_command_String = '!GenericPollingScheduledTask ids="123" pollingCommand="jira-get-issue" pollingCommandArgName=' \
+ '"issueId"pi pendingIds="Ticket(val.Status != \'Done\').Id" interval="3"' \
+ ' timeout="5" tag="None" additionalPollingCommandArgNames="my_arg_name"' \
+ ' additionalPollingCommandArgValues="hihi" pollingCommand="Set" ids="payload"' \
+ ' pendingIds=".=\'payload\'" pollingCommandArgName="key"' \
+ ' additionalPollingCommandArgNames="value" additionalPollingCommandArgValues="bar"'
+
+ assert command_String == expected_command_String
+ result = is_command_sanitized(command_String)
+
+ expected_result = (False, 'The value of additionalPollingCommandArgValues, additionalPollingCommandArgNames, '
+ 'pollingCommandArgName, pollingCommand is malformed.')
+ assert result == expected_result
+
+
+def test_get_command_string_with_extract_mode():
+ '''
+ Given:
+ - inputs with extractMode
+ When:
+ - run get_command_string function
+ Then:
+ - Ensure the `auto-extract` and `extractMode` is present in the command_string
+ '''
+ inputs = {
+ 'ids': "123",
+ 'pollingCommand': "jira-get-issue",
+ 'pollingCommandArgName': "issueId",
+ 'playbookId': "pi",
+ 'dt': "Ticket(val.Status != 'Done').Id",
+ 'interval': "3",
+ 'timeout': "5",
+ 'tag': "polling",
+ 'args_names': "my_arg_name",
+ 'args_values': "test",
+ 'extractMode': 'none',
+ }
+
+ command_string = get_command_string(
+ inputs['ids'],
+ inputs['pollingCommand'],
+ inputs['pollingCommandArgName'],
+ inputs['playbookId'],
+ inputs['dt'],
+ inputs['interval'],
+ inputs['timeout'],
+ inputs['tag'],
+ inputs['args_names'],
+ inputs['args_values'],
+ inputs['extractMode'],
+ )
+
+ assert 'auto-extract=none' in command_string
+ assert 'extractMode=none' in command_string
+
+
+def test_main_pass(mocker):
+ """
+ Given
+ Sample input values
+ When
+ Calling main
+ Then
+ Test the command result structure
+ """
+ good_input = {
+ 'ids': "123",
+ 'pollingCommand': "jira-get-issue",
+ 'pollingCommandArgName': "issueId",
+ 'playbookId': "pi",
+ 'dt': "Ticket(val.Status != 'Done').Id",
+ 'interval': "3",
+ 'timeout': "5",
+ 'tag': "polling",
+ 'additionalPollingCommandArgNames': "my_arg_name",
+ 'additionalPollingCommandArgValues': "my_arg_value",
+ }
+
+ mocker.patch.object(demisto, 'args', return_value=good_input)
+ # mocker.patch.object(demisto, 'command', return_value='threatstream-import-indicator-without-approval')
+
+ execute_command_mocker = mocker.patch("ScheduleGenericPolling.demisto.executeCommand")
+ mocker.patch("ScheduleGenericPolling.demisto.dt", return_value='abc')
+ main()
+
+ assert execute_command_mocker.call_count == 1
+
+ command = execute_command_mocker.call_args_list[0][0][1]['command']
+
+ expected_command = '!GenericPollingScheduledTask ids="123" pollingCommand="jira-get-issue" pollingCommandArgName=' \
+ '"issueId" playbookId="pi" pendingIds="Ticket(val.Status != \'Done\').Id" interval="3"' \
+ ' timeout="5" tag="polling" additionalPollingCommandArgNames="my_arg_name"' \
+ ' additionalPollingCommandArgValues="my_arg_value"'
+
+ assert command == expected_command
+
+
+def test_main_fail(mocker):
+ """
+ Given
+ Sample bad input values
+ When
+ Calling main
+ Then
+ Test the command result indicates the wrong input
+ """
+
+ fail_input = {
+ 'ids': "123",
+ 'pollingCommand': "jira-get-issue",
+ 'pollingCommandArgName': "issueId",
+ 'playbookId': "pi",
+ 'dt': "Ticket(val.Status != 'Done').Id",
+ 'interval': "3",
+ 'timeout': "5",
+ 'tag': "polling",
+ 'additionalPollingCommandArgNames': "my_arg_name",
+ 'additionalPollingCommandArgValues': "hihi\" pollingCommand=\"Set\" ids=\"payload\" pendingIds=\".='payload'\""
+ " pollingCommandArgName=\"key\" additionalPollingCommandArgNames=\"value\""
+ " additionalPollingCommandArgValues=\"bar",
+
+ }
+
+ mocker.patch.object(demisto, 'args', return_value=fail_input)
+
+ return_error_mock = mocker.patch("ScheduleGenericPolling.return_error")
+ execute_command_mocker = mocker.patch("ScheduleGenericPolling.demisto.executeCommand")
+ mocker.patch("ScheduleGenericPolling.demisto.dt", return_value='abc')
+ main()
+
+ assert return_error_mock.call_count == 1
+ assert execute_command_mocker.call_count == 1
+
+ err_msg = return_error_mock.call_args[0][0]
+ assert err_msg == 'The value of additionalPollingCommandArgValues, additionalPollingCommandArgNames, ' \
+ 'pollingCommandArgName, pollingCommand is malformed.'
diff --git a/Packs/CommonScripts/Scripts/SetAndHandleEmpty/README.md b/Packs/CommonScripts/Scripts/SetAndHandleEmpty/README.md
index 7fb09ebf4498..06097d47fce3 100644
--- a/Packs/CommonScripts/Scripts/SetAndHandleEmpty/README.md
+++ b/Packs/CommonScripts/Scripts/SetAndHandleEmpty/README.md
@@ -8,7 +8,7 @@ For more information, see the section about permissions here: [https://docs-cort
| **Name** | **Description** |
| --- | --- |
-| Script Type | python3 |
+| Script Type | javascript |
| Tags | Utility |
| Cortex XSOAR Version | 5.0.0 |
diff --git a/Packs/CommonScripts/Scripts/SetDateField/SetDateField.yml b/Packs/CommonScripts/Scripts/SetDateField/SetDateField.yml
index 9b7f34ace338..a14cdff04683 100644
--- a/Packs/CommonScripts/Scripts/SetDateField/SetDateField.yml
+++ b/Packs/CommonScripts/Scripts/SetDateField/SetDateField.yml
@@ -15,6 +15,6 @@ args:
description: "The name of the incident custom field of type date"
scripttarget: 0
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/Sleep/Sleep.js b/Packs/CommonScripts/Scripts/Sleep/Sleep.js
index fdb524ccc18a..214141fa5c05 100644
--- a/Packs/CommonScripts/Scripts/Sleep/Sleep.js
+++ b/Packs/CommonScripts/Scripts/Sleep/Sleep.js
@@ -1,22 +1,24 @@
-pollingThreshold = 300;
-
-if (isDemistoVersionGE('8.4.0', 649563)) {
- configThreshold = executeCommand('getServerConfig', {key: 'content.automation.sleep.threshold.seconds'});
- if (configThreshold[0] && !isError(configThreshold[0])) {
- pollingThreshold = parseInt(configThreshold[0].Contents);
+if (isDemistoVersionGE('8.0.0')) {
+ pollingThreshold = 300;
+ if (isDemistoVersionGE('8.4.0', 649563)) {
+ configThreshold = executeCommand('getServerConfig', {key: 'content.automation.sleep.threshold.seconds'});
+ if (configThreshold[0] && !isError(configThreshold[0])) {
+ pollingThreshold = parseInt(configThreshold[0].Contents);
+ }
}
-}
-
-if (parseInt(args.seconds) >= pollingThreshold) {
- // Polling implementation
- return {
- Type: entryTypes.note,
- Contents: 'Sleep will complete in ' + args.seconds + ' seconds',
- PollingCommand: 'Print',
- NextRun: args.seconds + '',
- PollingArgs: {value: 'Sleep completed in ' + args.seconds + ' seconds'},
- Timeout: String(parseInt(args.seconds) + 60)
+
+ if (parseInt(args.seconds) >= pollingThreshold) {
+ // Polling implementation
+ return {
+ Type: entryTypes.note,
+ Contents: 'Sleep will complete in ' + args.seconds + ' seconds',
+ PollingCommand: 'Print',
+ NextRun: args.seconds + '',
+ PollingArgs: {value: 'Sleep completed in ' + args.seconds + ' seconds'},
+ Timeout: String(parseInt(args.seconds) + 60)
+ }
}
+
}
// Sleep for the given number of seconds
diff --git a/Packs/CommonScripts/Scripts/StixCreator/StixCreator.py b/Packs/CommonScripts/Scripts/StixCreator/StixCreator.py
index 012e1e2039c5..05706d5bf1a5 100644
--- a/Packs/CommonScripts/Scripts/StixCreator/StixCreator.py
+++ b/Packs/CommonScripts/Scripts/StixCreator/StixCreator.py
@@ -9,7 +9,7 @@
from stix2 import Bundle, ExternalReference, Indicator, Vulnerability
from stix2 import AttackPattern, Campaign, Malware, Infrastructure, IntrusionSet, Report, ThreatActor
from stix2 import Tool, CourseOfAction
-from stix2.exceptions import InvalidValueError
+from stix2.exceptions import InvalidValueError, MissingPropertiesError
from typing import Any
from collections.abc import Callable
@@ -85,9 +85,9 @@
}
-def search_related_indicators(value: str) -> list[dict]:
+def search_related_indicators(value: str) -> list[dict]: # pragma: no cover
relationships = demisto.searchRelationships({"entities": [value]}).get("data", [])
-
+ demisto.debug(f"found {len(relationships)} relationships")
query = ""
for rel in relationships:
entity_a = rel.get("entityA", "").lower()
@@ -103,7 +103,10 @@ def search_related_indicators(value: str) -> list[dict]:
if not query:
demisto.info(f"No relevant relationship found for indicator: {value}")
return []
+ query = query[:-4]
+ demisto.debug(f"using query: {query}")
demisto_indicators = demisto.searchIndicators(query=query).get("iocs", [])
+ demisto.debug(f"found {len(demisto_indicators)} related indicators")
return demisto_indicators
@@ -370,13 +373,13 @@ def main():
demisto.info(f"Export failure exception: {traceback.format_exc()}")
continue
- except InvalidValueError:
+ except (InvalidValueError, MissingPropertiesError):
demisto.info(
f"Indicator type: {demisto_indicator_type}, with the value: {value} is not STIX compatible. Skipping.")
demisto.info(f"Export failure exception: {traceback.format_exc()}")
continue
- except InvalidValueError:
+ except (InvalidValueError, MissingPropertiesError):
demisto.info(
f"Indicator type: {demisto_indicator_type}, with the value: {value} is not STIX compatible. Skipping.")
demisto.info(f"Export failure exception: {traceback.format_exc()}")
diff --git a/Packs/CommonScripts/Scripts/StixCreator/StixCreator.yml b/Packs/CommonScripts/Scripts/StixCreator/StixCreator.yml
index 6ba4e15e5b7a..4f93207e2ee0 100644
--- a/Packs/CommonScripts/Scripts/StixCreator/StixCreator.yml
+++ b/Packs/CommonScripts/Scripts/StixCreator/StixCreator.yml
@@ -38,7 +38,7 @@ outputs:
description: The date/time that the indicator was last seen.
type: date
scripttarget: 0
-dockerimage: demisto/py3-tools:1.0.0.74403
+dockerimage: demisto/py3-tools:1.0.0.88283
subtype: python3
runas: DBotWeakRole
tests:
diff --git a/Packs/CommonScripts/Scripts/VerifyJSON/VerifyJSON.yml b/Packs/CommonScripts/Scripts/VerifyJSON/VerifyJSON.yml
index c7b2e341367b..e842c58b4200 100644
--- a/Packs/CommonScripts/Scripts/VerifyJSON/VerifyJSON.yml
+++ b/Packs/CommonScripts/Scripts/VerifyJSON/VerifyJSON.yml
@@ -18,7 +18,7 @@ tags:
- JSON
- Utility
type: powershell
-dockerimage: demisto/powershell:7.1.3.22028
+dockerimage: demisto/powershell:7.4.0.80528
fromversion: 5.5.0
tests:
- VerifyJSON - Test
diff --git a/Packs/CommonScripts/Scripts/displayUtilitiesResults/displayUtilitiesResults.yml b/Packs/CommonScripts/Scripts/displayUtilitiesResults/displayUtilitiesResults.yml
index 6c164a280547..e43c7863e7df 100644
--- a/Packs/CommonScripts/Scripts/displayUtilitiesResults/displayUtilitiesResults.yml
+++ b/Packs/CommonScripts/Scripts/displayUtilitiesResults/displayUtilitiesResults.yml
@@ -10,7 +10,7 @@ tags:
enabled: true
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
runas: DBotWeakRole
fromversion: 6.10.0
tests:
diff --git a/Packs/X509Certificate/TestPlaybooks/playbook-X509Certificate_Test_Playbook.yml b/Packs/CommonScripts/TestPlaybooks/playbook-X509Certificate_Test_Playbook.yml
similarity index 58%
rename from Packs/X509Certificate/TestPlaybooks/playbook-X509Certificate_Test_Playbook.yml
rename to Packs/CommonScripts/TestPlaybooks/playbook-X509Certificate_Test_Playbook.yml
index 4b9295256144..0baa072fa18f 100644
--- a/Packs/X509Certificate/TestPlaybooks/playbook-X509Certificate_Test_Playbook.yml
+++ b/Packs/CommonScripts/TestPlaybooks/playbook-X509Certificate_Test_Playbook.yml
@@ -1,25 +1,25 @@
-id: X509Certificate Test Playbook
+id: X509Certificate_Test_Playbook
version: -1
-contentitemexportablefields:
- contentitemfields: {}
-name: X509Certificate Test Playbook
-description: Test Playbook for X509Certificate Scripts
+name: X509Certificate_Test_Playbook
+description: Test Playbook for X509Certificate Scripts.
starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 0ba97589-575e-4130-8fe5-a51f4831b6e7
+ taskid: 013183af-2763-416b-88d0-830eeb3f4b9c
type: start
task:
- id: 0ba97589-575e-4130-8fe5-a51f4831b6e7
+ id: 013183af-2763-416b-88d0-830eeb3f4b9c
version: -1
name: ""
iscommand: false
brand: ""
+ description: ''
nexttasks:
'#none#':
- "1"
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -32,12 +32,14 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: f17e17ea-c941-4865-853f-c95093597b04
+ taskid: 932b9aaa-c167-496c-8002-81868b85d28c
type: regular
task:
- id: f17e17ea-c941-4865-853f-c95093597b04
+ id: 932b9aaa-c167-496c-8002-81868b85d28c
version: -1
name: Delete Context
description: Delete field from context
@@ -52,11 +54,8 @@ tasks:
scriptarguments:
all:
simple: "yes"
- index: {}
- key: {}
- keysToKeep: {}
- subplaybook: {}
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -69,12 +68,14 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"2":
id: "2"
- taskid: cecdcbbd-665b-46e3-8c92-1468d0f47b81
+ taskid: 003a9ead-efe5-4985-8c76-99b04991cadf
type: regular
task:
- id: cecdcbbd-665b-46e3-8c92-1468d0f47b81
+ id: 003a9ead-efe5-4985-8c76-99b04991cadf
version: -1
name: Create Certificate Indicator
description: commands.local.cmd.new.indicator
@@ -86,174 +87,23 @@ tasks:
'#none#':
- "10"
scriptarguments:
- accounttype: {}
- actor: {}
- admincountry: {}
- adminemail: {}
- adminname: {}
- adminphone: {}
- asn: {}
- assignedrole: {}
- assigneduser: {}
- associatedfilenames: {}
- associations: {}
- biosversion: {}
- campaign: {}
- category: {}
- certificatenames: {}
- certificatesignature: {}
- certificatevalidationchecks: {}
- city: {}
- costcenter: {}
- costcentercode: {}
- countryname: {}
- creationdate: {}
- customFields: {}
- cvedescription: {}
- cvemodified: {}
- cvss: {}
- department: {}
- description: {}
- detectionengines: {}
- devicemodel: {}
- dhcpserver: {}
- displayname: {}
- dns: {}
- domainidnname: {}
- domainname: {}
- domainreferringips: {}
- domainreferringsubnets: {}
- domainstatus: {}
- email: {}
- emailaddress: {}
- entryid: {}
- expirationdate: {}
- extension: {}
- feedrelatedindicators: {}
- fileextension: {}
- filetype: {}
- firstseenbysource: {}
- geocountry: {}
- geolocation: {}
- givenname: {}
- groups: {}
- hostname: {}
- imphash: {}
- indicatoridentification: {}
- internal: {}
- ipaddress: {}
- issuerdn: {}
- jobcode: {}
- jobfamily: {}
- jobfunction: {}
- lastseenbysource: {}
- leadership: {}
- location: {}
- locationregion: {}
- macaddress: {}
- malwarefamily: {}
- malwaretypes: {}
- manageremailaddress: {}
- managername: {}
md5:
simple: ${Certificate.MD5}
- memory: {}
- merge: {}
- mobilephone: {}
- name: {}
- namefield: {}
- nameservers: {}
- office365category: {}
- office365expressroute: {}
- office365required: {}
- operatingsystem: {}
- operatingsystemversion: {}
- organization: {}
- organizationalunitou: {}
- osversion: {}
- path: {}
pem:
simple: ${Certificate.PEM}
- personalemail: {}
- port: {}
- positivedetections: {}
- primarymotivation: {}
- processor: {}
- processors: {}
- publickey: {}
- published: {}
- quarantined: {}
- region: {}
- registrantcountry: {}
- registrantemail: {}
- registrantname: {}
- registrantphone: {}
- registrarabuseemail: {}
- registrarabusephone: {}
- registrarname: {}
- relateToIncident: {}
- relatedIncidents: {}
- reportedby: {}
- reputation: {}
- seenNow: {}
- serialnumber: {}
- service: {}
sha1:
simple: ${Certificate.SHA1}
sha256:
simple: ${Certificate.SHA256}
- sha512: {}
- signatureauthentihash: {}
- signaturecopyright: {}
- signaturedescription: {}
- signaturefileversion: {}
- signatureinternalname: {}
- signed: {}
- size: {}
source:
simple: DBot
- sourceTimeStamp: {}
- sourceoriginalseverity: {}
- spkisha256: {}
- ssdeep: {}
- state: {}
- stixaliases: {}
- stixdescription: {}
- stixgoals: {}
- stixid: {}
- stixismalwarefamily: {}
- stixkillchainphases: {}
- stixmalwaretypes: {}
- stixprimarymotivation: {}
- stixresourcelevel: {}
- stixroles: {}
- stixsecondarymotivations: {}
- stixsophistication: {}
- stixthreatactortypes: {}
- stixtooltypes: {}
- stixtoolversion: {}
- streetaddress: {}
- subdomains: {}
- subjectalternativenames: {}
- subjectdn: {}
- surname: {}
- tags: {}
- threattypes: {}
- title: {}
- trafficlightprotocol: {}
type:
- simple: Certificate
- updateddate: {}
- username: {}
- validitynotafter: {}
- validitynotbefore: {}
+ simple: X509 Certificate
value:
simple: ${Certificate.SHA256}
- workphone: {}
- xdrstatus: {}
- zipcode: {}
reputationcalc: 1
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -266,16 +116,17 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: e7d04c4b-8466-452e-853a-539e8aff2c76
+ taskid: be8688c9-c287-432a-8d69-73aba8bdd8fa
type: regular
task:
- id: e7d04c4b-8466-452e-853a-539e8aff2c76
+ id: be8688c9-c287-432a-8d69-73aba8bdd8fa
version: -1
name: CertificateExtract
- description: Extract fields from a certificate file and return the standard
- context
+ description: Extract fields from a certificate file and return the standard context
scriptName: CertificateExtract
type: regular
iscommand: false
@@ -284,8 +135,6 @@ tasks:
'#none#':
- "2"
scriptarguments:
- entry_id: {}
- input: {}
pem:
simple: |-
-----BEGIN CERTIFICATE-----
@@ -322,6 +171,7 @@ tasks:
-----END CERTIFICATE-----
reputationcalc: 1
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -334,12 +184,14 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: 7706e680-e265-47f2-8db0-49ecfafbf11b
+ taskid: b36b893d-b856-416a-8cc0-5c144b5155cf
type: regular
task:
- id: 7706e680-e265-47f2-8db0-49ecfafbf11b
+ id: b36b893d-b856-416a-8cc0-5c144b5155cf
version: -1
name: Certificate Reputation
description: Enrich and calculate reputation of a Certificate indicator.
@@ -355,9 +207,9 @@ tasks:
simple: ${Certificate.SHA256}
update_checks:
simple: "true"
- update_indicator: {}
reputationcalc: 1
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -370,18 +222,22 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"5":
id: "5"
- taskid: 345ed494-22d6-4be2-8c1c-0576fcf6a8c8
+ taskid: 1e92a081-dd66-4a27-8aac-53441707fe32
type: title
task:
- id: 345ed494-22d6-4be2-8c1c-0576fcf6a8c8
+ id: 1e92a081-dd66-4a27-8aac-53441707fe32
version: -1
name: Done
type: title
iscommand: false
brand: ""
+ description: ''
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -394,17 +250,17 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: 23f7189e-a3c2-4acd-8e77-041a24f27243
+ taskid: 9136192b-ef87-4407-846a-facb6a125151
type: regular
task:
- id: 23f7189e-a3c2-4acd-8e77-041a24f27243
+ id: 9136192b-ef87-4407-846a-facb6a125151
version: -1
name: Find Indicator
- description: Gets a list of indicator objects and the associated indicator outputs
- that match the specified query and filters. The results are returned in a
- structured data file.
+ description: Gets a list of indicator objects and the associated indicator outputs that match the specified query and filters. The results are returned in a structured data file.
scriptName: GetIndicatorsByQuery
type: regular
iscommand: false
@@ -413,31 +269,15 @@ tasks:
'#none#':
- "7"
scriptarguments:
- addRandomSalt: {}
- dontPopulateFields: {}
extend-context:
simple: FoundIndicator=.
- fieldsToHash: {}
- limit:
- simple: "1"
- offset: {}
populateFields:
simple: id
query:
- complex:
- root: Certificate
- accessor: SHA256
- transformers:
- - operator: concat
- args:
- prefix:
- value:
- simple: 'value:'
- suffix:
- value:
- simple: ' and -certificatevalidationchecks:"" and type:Certificate'
+ simple: ' type:"X509 Certificate"'
reputationcalc: 1
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -450,12 +290,14 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"7":
id: "7"
- taskid: a48bef9e-1759-4365-82ee-450a149b9f65
+ taskid: a85150f5-78dc-4aca-8c42-89d9b9e843fc
type: condition
task:
- id: a48bef9e-1759-4365-82ee-450a149b9f65
+ id: a85150f5-78dc-4aca-8c42-89d9b9e843fc
version: -1
name: Indicator found?
description: Check if find indicator found the updated indicator
@@ -477,6 +319,7 @@ tasks:
right:
value:
simple: "2"
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -489,17 +332,19 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"8":
id: "8"
- taskid: 5e99dadf-b4a8-4a80-84a7-e1e123b38b5d
+ taskid: 7988c60c-cd32-4839-85c9-de67141e22d2
type: regular
task:
- id: 5e99dadf-b4a8-4a80-84a7-e1e123b38b5d
+ id: 7988c60c-cd32-4839-85c9-de67141e22d2
version: -1
name: Create File
description: |
- Will create a file (using the given data input or entry ID) and upload it to current investigation war room.
- scriptName: FileCreateAndUpload
+ Creates a file (using the given data input or entry ID) and uploads it to the current investigation War Room.
+ scriptName: FileCreateAndUploadV2
type: regular
iscommand: false
brand: ""
@@ -546,13 +391,13 @@ tasks:
6vGpxPC0KxalQsB7P5A/B5j9Dbf/ATcy7cTqyRXdPr/6cNNzk2SRHXFzx7lHIkWb
35JJ9NHFDwQrc2c6YK6xnDUoZUq6lOsi
-----END CERTIFICATE-----
- entryId: {}
filename:
simple: panw.pem
ignore-outputs:
simple: "false"
reputationcalc: 1
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -565,16 +410,17 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"9":
id: "9"
- taskid: c14d6bfd-a3ac-4a65-8258-fcf3d373e480
+ taskid: bf62df8e-d4fa-45c6-8eb9-986d06ec0215
type: regular
task:
- id: c14d6bfd-a3ac-4a65-8258-fcf3d373e480
+ id: bf62df8e-d4fa-45c6-8eb9-986d06ec0215
version: -1
name: CertificateExtract
- description: Extract fields from a certificate file and return the standard
- context
+ description: Extract fields from a certificate file and return the standard context
scriptName: CertificateExtract
type: regular
iscommand: false
@@ -596,10 +442,9 @@ tasks:
value:
simple: panw.pem
accessor: EntryID
- input: {}
- pem: {}
reputationcalc: 1
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -612,17 +457,17 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"10":
id: "10"
- taskid: f0729bb4-a84a-45a7-8aed-878a150d76ee
+ taskid: c6153f7a-74db-4895-8f1f-a6180de93ea3
type: regular
task:
- id: f0729bb4-a84a-45a7-8aed-878a150d76ee
+ id: c6153f7a-74db-4895-8f1f-a6180de93ea3
version: -1
name: Set Indicator Type
- description: Change the type of the new Indicators to Certificate. This should
- not be needed, but it may happen that server autoextracted the sha256s of
- the certificates into File indicators *before* createNewIndicator was called.
+ description: Change the type of the new Indicators to Certificate. This should not be needed, but it may happen that server autoextracted the sha256s of the certificates into File indicators *before* createNewIndicator was called.
script: Builtin|||setIndicator
type: regular
iscommand: true
@@ -631,163 +476,12 @@ tasks:
'#none#':
- "11"
scriptarguments:
- accounttype: {}
- actor: {}
- admincountry: {}
- adminemail: {}
- adminname: {}
- adminphone: {}
- asn: {}
- assignedrole: {}
- assigneduser: {}
- associatedfilenames: {}
- associations: {}
- biosversion: {}
- campaign: {}
- category: {}
- certificatenames: {}
- certificatesignature: {}
- certificatevalidationchecks: {}
- city: {}
- costcenter: {}
- costcentercode: {}
- countryname: {}
- creationdate: {}
- customFields: {}
- cvedescription: {}
- cvemodified: {}
- cvss: {}
- department: {}
- description: {}
- detectionengines: {}
- devicemodel: {}
- dhcpserver: {}
- displayname: {}
- dns: {}
- domainidnname: {}
- domainname: {}
- domainreferringips: {}
- domainreferringsubnets: {}
- domainstatus: {}
- email: {}
- emailaddress: {}
- entryid: {}
- expiration: {}
- expirationdate: {}
- extension: {}
- feedrelatedindicators: {}
- fileextension: {}
- filetype: {}
- firstseenbysource: {}
- geocountry: {}
- geolocation: {}
- givenname: {}
- groups: {}
- hostname: {}
id:
simple: ${CreatedIndicator.ID}
- imphash: {}
- indicatoridentification: {}
- internal: {}
- ipaddress: {}
- issuerdn: {}
- jobcode: {}
- jobfamily: {}
- jobfunction: {}
- lastseenbysource: {}
- leadership: {}
- location: {}
- locationregion: {}
- macaddress: {}
- malwarefamily: {}
- malwaretypes: {}
- manageremailaddress: {}
- managername: {}
- md5: {}
- memory: {}
- mobilephone: {}
- name: {}
- namefield: {}
- nameservers: {}
- office365category: {}
- office365expressroute: {}
- office365required: {}
- operatingsystem: {}
- operatingsystemversion: {}
- organization: {}
- organizationalunitou: {}
- osversion: {}
- path: {}
- pem: {}
- personalemail: {}
- port: {}
- positivedetections: {}
- primarymotivation: {}
- processor: {}
- processors: {}
- publickey: {}
- published: {}
- quarantined: {}
- region: {}
- registrantcountry: {}
- registrantemail: {}
- registrantname: {}
- registrantphone: {}
- registrarabuseemail: {}
- registrarabusephone: {}
- registrarname: {}
- reportedby: {}
- reputation: {}
- serialnumber: {}
- service: {}
- sha1: {}
- sha256: {}
- sha512: {}
- signatureauthentihash: {}
- signaturecopyright: {}
- signaturedescription: {}
- signaturefileversion: {}
- signatureinternalname: {}
- signed: {}
- size: {}
- sourceoriginalseverity: {}
- spkisha256: {}
- ssdeep: {}
- state: {}
- stixaliases: {}
- stixdescription: {}
- stixgoals: {}
- stixid: {}
- stixismalwarefamily: {}
- stixkillchainphases: {}
- stixmalwaretypes: {}
- stixprimarymotivation: {}
- stixresourcelevel: {}
- stixroles: {}
- stixsecondarymotivations: {}
- stixsophistication: {}
- stixthreatactortypes: {}
- stixtooltypes: {}
- stixtoolversion: {}
- streetaddress: {}
- subdomains: {}
- subjectalternativenames: {}
- subjectdn: {}
- surname: {}
- tags: {}
- threattypes: {}
- title: {}
- trafficlightprotocol: {}
type:
- simple: Certificate
- updateddate: {}
- username: {}
- validitynotafter: {}
- validitynotbefore: {}
- value: {}
- workphone: {}
- zipcode: {}
+ simple: X509 Certificate
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -800,12 +494,14 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: aef8d0f4-b361-4751-8f2a-47005d557adf
+ taskid: 40c085f4-eee4-454a-8db3-5b0f794f65d8
type: regular
task:
- id: aef8d0f4-b361-4751-8f2a-47005d557adf
+ id: 40c085f4-eee4-454a-8db3-5b0f794f65d8
version: -1
name: Sleep for 10 seconds
description: Sleep for X seconds
@@ -820,6 +516,7 @@ tasks:
seconds:
simple: "10"
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -832,12 +529,14 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"12":
id: "12"
- taskid: 9ef9e56d-5751-4107-8d73-1b07d1203159
+ taskid: 849f556f-f525-4879-8cfa-2b5a6e79a5cf
type: regular
task:
- id: 9ef9e56d-5751-4107-8d73-1b07d1203159
+ id: 849f556f-f525-4879-8cfa-2b5a6e79a5cf
version: -1
name: Sleep for 15 seconds
description: Sleep for X seconds
@@ -852,6 +551,7 @@ tasks:
seconds:
simple: "15"
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
@@ -864,6 +564,8 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {},
diff --git a/Packs/CommonScripts/pack_metadata.json b/Packs/CommonScripts/pack_metadata.json
index c5f5b73d6cc4..ff1bffdaab87 100644
--- a/Packs/CommonScripts/pack_metadata.json
+++ b/Packs/CommonScripts/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Scripts",
"description": "Frequently used scripts pack.",
"support": "xsoar",
- "currentVersion": "1.13.39",
+ "currentVersion": "1.14.19",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonTypes/.pack-ignore b/Packs/CommonTypes/.pack-ignore
index 9a1cb8fd38df..edb4f94a2cf1 100644
--- a/Packs/CommonTypes/.pack-ignore
+++ b/Packs/CommonTypes/.pack-ignore
@@ -303,6 +303,9 @@ ignore=IF115
[file:incidentfield-Last_Mirrored_Time_Stamp.json]
ignore=IF115
+[file:incidentfield-Alert_tags.json]
+ignore=IF100
+
[known_words]
SysAid
longText
@@ -329,4 +332,7 @@ mailto
Misconfiguration
CloudTrail
ThreatCommand
-Cyberint
\ No newline at end of file
+Cyberint
+DN
+PEM
+SPKI
\ No newline at end of file
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Additional_Email_Addresses.json b/Packs/CommonTypes/IncidentFields/incidentfield-Additional_Email_Addresses.json
new file mode 100644
index 000000000000..e6e99ea04724
--- /dev/null
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Additional_Email_Addresses.json
@@ -0,0 +1,30 @@
+{
+ "associatedToAll": true,
+ "caseInsensitive": true,
+ "cliName": "additionalemailaddresses",
+ "closeForm": false,
+ "content": true,
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_additionalemailaddresses",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Additional Email Addresses",
+ "neverSetAsRequired": false,
+ "openEnded": true,
+ "ownerOnly": false,
+ "propagationLabels": [
+ "all"
+ ],
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "multiSelect",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Alert_tags.json b/Packs/CommonTypes/IncidentFields/incidentfield-Alert_tags.json
new file mode 100644
index 000000000000..e9088c521a1c
--- /dev/null
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Alert_tags.json
@@ -0,0 +1,27 @@
+{
+ "associatedToAll": true,
+ "caseInsensitive": true,
+ "cliName": "alerttags",
+ "closeForm": false,
+ "content": true,
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_alerttags",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Alert tags",
+ "neverSetAsRequired": false,
+ "openEnded": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.0.0"
+}
\ No newline at end of file
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Containment_SLA.json b/Packs/CommonTypes/IncidentFields/incidentfield-Containment_SLA.json
index 67940cd92915..b5657b7262e3 100644
--- a/Packs/CommonTypes/IncidentFields/incidentfield-Containment_SLA.json
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Containment_SLA.json
@@ -24,7 +24,7 @@
"threshold": 3,
"type": "timer",
"unmapped": false,
- "unsearchable": true,
+ "unsearchable": false,
"useAsKpi": false,
"version": -1,
"fromVersion": "5.0.0"
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Detection_ID.json b/Packs/CommonTypes/IncidentFields/incidentfield-Detection_ID.json
index de906b84cab5..ffddf447c29b 100644
--- a/Packs/CommonTypes/IncidentFields/incidentfield-Detection_ID.json
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Detection_ID.json
@@ -24,7 +24,8 @@
"hidden": false,
"associatedTypes": [
"ExtraHop Detection",
- "Vectra Detection"
+ "Vectra Detection",
+ "CrowdStrike Falcon Mobile Detection"
],
"systemAssociatedTypes": null,
"associatedToAll": false,
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Display_Name.json b/Packs/CommonTypes/IncidentFields/incidentfield-Display_Name.json
index 99ecb4ca7d8f..76164a439998 100644
--- a/Packs/CommonTypes/IncidentFields/incidentfield-Display_Name.json
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Display_Name.json
@@ -1,37 +1,38 @@
-{
- "associatedToAll": false,
- "associatedTypes": [
- "IAM - New Hire",
- "IAM - Terminate User",
- "IAM - Update User",
- "User Profile",
- "IAM - Sync User",
- "IAM - Rehire User",
- "Vectra Account",
- "CrowdStrike Falcon IDP Detection"
- ],
- "caseInsensitive": true,
- "cliName": "displayname",
- "closeForm": false,
- "content": true,
- "description": "Display Name",
- "editForm": true,
- "group": 0,
- "hidden": false,
- "id": "incident_displayname",
- "isReadOnly": false,
- "locked": false,
- "name": "Display Name",
- "neverSetAsRequired": false,
- "ownerOnly": false,
- "required": false,
- "sla": 0,
- "system": false,
- "threshold": 72,
- "type": "shortText",
- "unmapped": false,
- "unsearchable": false,
- "useAsKpi": false,
- "version": -1,
- "fromVersion": "5.0.0"
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "IAM - New Hire",
+ "IAM - Terminate User",
+ "IAM - Update User",
+ "User Profile",
+ "IAM - Sync User",
+ "IAM - Rehire User",
+ "Vectra Account",
+ "CrowdStrike Falcon IDP Detection",
+ "CrowdStrike Falcon Mobile Detection"
+ ],
+ "caseInsensitive": true,
+ "cliName": "displayname",
+ "closeForm": false,
+ "content": true,
+ "description": "Display Name",
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_displayname",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Display Name",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "5.0.0"
}
\ No newline at end of file
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Email.json b/Packs/CommonTypes/IncidentFields/incidentfield-Email.json
index c973540f9101..0a1ad43799e5 100644
--- a/Packs/CommonTypes/IncidentFields/incidentfield-Email.json
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Email.json
@@ -13,7 +13,8 @@
"IAM - Rehire User",
"IAM - AD User Activation",
"IAM - AD User Deactivation",
- "Vectra Account"
+ "Vectra Account",
+ "CrowdStrike Falcon Mobile Detection"
],
"caseInsensitive": true,
"cliName": "email",
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Incident_Duration.json b/Packs/CommonTypes/IncidentFields/incidentfield-Incident_Duration.json
index 78178e08688c..ce69b860b937 100644
--- a/Packs/CommonTypes/IncidentFields/incidentfield-Incident_Duration.json
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Incident_Duration.json
@@ -21,7 +21,7 @@
"openEnded": false,
"associatedToAll": true,
"unmapped": false,
- "unsearchable": true,
+ "unsearchable": false,
"caseInsensitive": true,
"sla": 0,
"threshold": 0,
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Last_Update_Time.json b/Packs/CommonTypes/IncidentFields/incidentfield-Last_Update_Time.json
index 75dd2b40ed44..9145ee16eb1c 100644
--- a/Packs/CommonTypes/IncidentFields/incidentfield-Last_Update_Time.json
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Last_Update_Time.json
@@ -38,7 +38,8 @@
"FreshworksFreshservice Change Request",
"Graph Security Alert",
"CrowdStrike Falcon IDP Detection",
- "Cyberint Incident"
+ "Cyberint Incident",
+ "CrowdStrike Falcon Mobile Detection"
],
"breachScript": "",
"caseInsensitive": true,
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Triage_SLA.json b/Packs/CommonTypes/IncidentFields/incidentfield-Triage_SLA.json
index c08210b64727..b42391580b55 100644
--- a/Packs/CommonTypes/IncidentFields/incidentfield-Triage_SLA.json
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Triage_SLA.json
@@ -24,7 +24,7 @@
"threshold": 72,
"type": "timer",
"unmapped": false,
- "unsearchable": true,
+ "unsearchable": false,
"useAsKpi": false,
"version": -1,
"fromVersion": "5.0.0"
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-User_Risk_Level.json b/Packs/CommonTypes/IncidentFields/incidentfield-User_Risk_Level.json
new file mode 100644
index 000000000000..613f5a582535
--- /dev/null
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-User_Risk_Level.json
@@ -0,0 +1,30 @@
+{
+ "associatedToAll": true,
+ "caseInsensitive": true,
+ "cliName": "userrisklevel",
+ "closeForm": false,
+ "content": true,
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_userrisklevel",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "User Risk Level",
+ "neverSetAsRequired": false,
+ "openEnded": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.10.0",
+ "marketplaces": [
+ "xsoar"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-Vendor_Product.json b/Packs/CommonTypes/IncidentFields/incidentfield-Vendor_Product.json
index 9a13bf0dce61..055bbb71d557 100644
--- a/Packs/CommonTypes/IncidentFields/incidentfield-Vendor_Product.json
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-Vendor_Product.json
@@ -29,7 +29,8 @@
"FireEye NX Alert",
"FireEye NX IPS Event",
"Microsoft Sentinel Incident",
- "CrowdStrike Falcon IDP Detection"
+ "CrowdStrike Falcon IDP Detection",
+ "CrowdStrike Falcon Mobile Detection"
],
"associatedToAll": false,
"unmapped": false,
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Names.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Names.json
similarity index 95%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Names.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Names.json
index 8042d836704e..d878c49bd1d2 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Names.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Names.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "certificatenames",
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Signature.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Signature.json
similarity index 98%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Signature.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Signature.json
index 97f5609e5baa..75928029aaa5 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Signature.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Signature.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "certificatesignature",
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Validation_Checks.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Validation_Checks.json
similarity index 96%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Validation_Checks.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Validation_Checks.json
index 5bfbc921fae3..b579041c37e0 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Certificate_Validation_Checks.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Certificate_Validation_Checks.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "certificatevalidationchecks",
diff --git a/Packs/CommonTypes/IndicatorFields/indicatorfield-Domains.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Domains.json
new file mode 100644
index 000000000000..686d17e35a0f
--- /dev/null
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Domains.json
@@ -0,0 +1,46 @@
+{
+ "id": "indicator_domains",
+ "version": -1,
+ "modified": "2023-09-28T09:06:54.147918809Z",
+ "name": "Domains",
+ "ownerOnly": false,
+ "cliName": "domains",
+ "type": "grid",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 2,
+ "hidden": false,
+ "openEnded": true,
+ "associatedToAll": true,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": [
+ {
+ "key": "domain",
+ "displayName": "Domain",
+ "type": "shortText",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ }
+ ],
+ "defaultRows": [
+ {}
+ ],
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.0.0"
+}
\ No newline at end of file
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Extension.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Extension.json
similarity index 97%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Extension.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Extension.json
index ed785e1f6254..bd7c5402b60c 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Extension.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Extension.json
@@ -1,7 +1,6 @@
{
- "associatedToAll": false,
+ "associatedToAll": true,
"associatedTypes": [
- "Certificate"
],
"caseInsensitive": true,
"cliName": "extension",
diff --git a/Packs/CommonTypes/IndicatorFields/indicatorfield-Issuer.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Issuer.json
new file mode 100644
index 000000000000..9ba311c61c4a
--- /dev/null
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Issuer.json
@@ -0,0 +1,92 @@
+{
+ "id": "indicator_issuer",
+ "version": -1,
+ "modified": "2023-09-28T11:38:34.067130424Z",
+ "name": "Issuer",
+ "ownerOnly": false,
+ "cliName": "issuer",
+ "type": "grid",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 2,
+ "hidden": false,
+ "openEnded": false,
+ "associatedToAll": false,
+ "associatedTypes": [
+ "X509 Certificate"
+ ],
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": [
+ {
+ "key": "title",
+ "displayName": "Title",
+ "type": "singleSelect",
+ "orgType": "singleSelect",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": [
+ "",
+ "C",
+ "CN",
+ "L",
+ "O",
+ "OU",
+ "ST",
+ "aggregated",
+ "emailAddress"
+ ]
+ },
+ {
+ "key": "data",
+ "displayName": "Data",
+ "type": "shortText",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ }
+ ],
+ "defaultRows": [
+ {
+ "title": "C"
+ },
+ {
+ "title": "CN"
+ },
+ {
+ "title": "L"
+ },
+ {
+ "title": "O"
+ },
+ {
+ "title": "OU"
+ },
+ {
+ "title": "ST"
+ },
+ {
+ "title": "emailAddress"
+ }
+ ],
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.0.0"
+}
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Issuer_DN.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Issuer_DN.json
similarity index 96%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Issuer_DN.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Issuer_DN.json
index 7212eedb1a40..f058ab45bfa4 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Issuer_DN.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Issuer_DN.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "issuerdn",
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-PEM.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-PEM.json
similarity index 95%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-PEM.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-PEM.json
index 9d5933c3bc65..e63731b2ca3a 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-PEM.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-PEM.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "pem",
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Public_Key.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Public_Key.json
similarity index 99%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Public_Key.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Public_Key.json
index 685c3726538b..de7d17c37777 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Public_Key.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Public_Key.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "publickey",
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-SPKI_SHA256.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-SPKI_SHA256.json
similarity index 96%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-SPKI_SHA256.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-SPKI_SHA256.json
index 19bfb70f0fc1..466d3a0a126b 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-SPKI_SHA256.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-SPKI_SHA256.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "spkisha256",
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Serial_Number.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Serial_Number.json
similarity index 84%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Serial_Number.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Serial_Number.json
index 8bf601194216..c0dbb409b694 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Serial_Number.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Serial_Number.json
@@ -1,29 +1,28 @@
{
- "associatedToAll": false,
- "associatedTypes": [
- "Certificate"
- ],
- "caseInsensitive": true,
+ "id": "indicator_serialnumber",
+ "version": -1,
+ "modified": "2023-09-28T07:57:43.363424092Z",
+ "name": "Serial Number",
+ "ownerOnly": false,
"cliName": "serialnumber",
+ "type": "shortText",
"closeForm": false,
- "content": true,
"editForm": true,
- "group": 2,
- "hidden": false,
- "id": "indicator_serialnumber",
+ "required": false,
+ "neverSetAsRequired": false,
"isReadOnly": false,
+ "useAsKpi": false,
"locked": false,
- "name": "Serial Number",
- "neverSetAsRequired": false,
- "ownerOnly": false,
- "required": false,
- "sla": 0,
"system": false,
- "threshold": 72,
- "type": "shortText",
+ "content": true,
+ "group": 2,
+ "hidden": false,
+ "openEnded": false,
+ "associatedToAll": true,
"unmapped": false,
"unsearchable": false,
- "useAsKpi": false,
- "version": -1,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
"fromVersion": "6.0.0"
}
\ No newline at end of file
diff --git a/Packs/CommonTypes/IndicatorFields/indicatorfield-Signature_Algorithm.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Signature_Algorithm.json
new file mode 100644
index 000000000000..3a7d3e3525aa
--- /dev/null
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Signature_Algorithm.json
@@ -0,0 +1,28 @@
+{
+ "id": "indicator_signaturealgorithm",
+ "version": -1,
+ "modified": "2023-09-28T11:29:20.2163874Z",
+ "name": "Signature Algorithm",
+ "ownerOnly": false,
+ "cliName": "signaturealgorithm",
+ "type": "tagsSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 2,
+ "hidden": false,
+ "openEnded": false,
+ "associatedToAll": true,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.0.0"
+}
\ No newline at end of file
diff --git a/Packs/CommonTypes/IndicatorFields/indicatorfield-Subject.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Subject.json
new file mode 100644
index 000000000000..3b922cd668bf
--- /dev/null
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Subject.json
@@ -0,0 +1,92 @@
+{
+ "id": "indicator_subject",
+ "version": -1,
+ "modified": "2023-09-28T11:38:19.851120204Z",
+ "name": "Subject",
+ "ownerOnly": false,
+ "cliName": "subject",
+ "type": "grid",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 2,
+ "hidden": false,
+ "openEnded": false,
+ "associatedToAll": false,
+ "associatedTypes": [
+ "X509 Certificate"
+ ],
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": [
+ {
+ "key": "title",
+ "displayName": "Title",
+ "type": "singleSelect",
+ "orgType": "singleSelect",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": [
+ "",
+ "C",
+ "CN",
+ "L",
+ "O",
+ "OU",
+ "ST",
+ "aggregated",
+ "emailAddress"
+ ]
+ },
+ {
+ "key": "data",
+ "displayName": "Data",
+ "type": "shortText",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ }
+ ],
+ "defaultRows": [
+ {
+ "title": "C"
+ },
+ {
+ "title": "CN"
+ },
+ {
+ "title": "L"
+ },
+ {
+ "title": "O"
+ },
+ {
+ "title": "OU"
+ },
+ {
+ "title": "ST"
+ },
+ {
+ "title": "emailAddress"
+ }
+ ],
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.0.0"
+}
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Subject_Alternative_Names.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Subject_Alternative_Names.json
similarity index 98%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Subject_Alternative_Names.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Subject_Alternative_Names.json
index 165f3a37b365..bec965fa7824 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Subject_Alternative_Names.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Subject_Alternative_Names.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "subjectalternativenames",
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Subject_DN.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Subject_DN.json
similarity index 96%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Subject_DN.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Subject_DN.json
index c669e1c4e9f7..faf257e83f47 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Subject_DN.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Subject_DN.json
@@ -1,7 +1,7 @@
{
"associatedToAll": false,
"associatedTypes": [
- "Certificate"
+ "X509 Certificate"
],
"caseInsensitive": true,
"cliName": "subjectdn",
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Validity_Not_After.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Validity_Not_After.json
similarity index 86%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Validity_Not_After.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Validity_Not_After.json
index 2f247878da66..f895f796bf1e 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Validity_Not_After.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Validity_Not_After.json
@@ -1,30 +1,31 @@
{
- "associatedToAll": false,
- "associatedTypes": [
- "Certificate"
- ],
- "caseInsensitive": true,
+ "id": "indicator_validitynotafter",
+ "version": -1,
+ "modified": "2023-09-28T08:00:15.145651049Z",
+ "name": "Validity Not After",
+ "ownerOnly": false,
"cliName": "validitynotafter",
+ "type": "date",
"closeForm": false,
- "content": true,
- "description": "End of certificate validity period",
"editForm": true,
- "group": 2,
- "hidden": false,
- "id": "indicator_validitynotafter",
+ "required": false,
+ "neverSetAsRequired": false,
"isReadOnly": false,
+ "useAsKpi": false,
"locked": false,
- "name": "Validity Not After",
- "neverSetAsRequired": false,
- "ownerOnly": false,
- "required": false,
- "sla": 0,
"system": false,
- "threshold": 72,
- "type": "date",
+ "content": true,
+ "group": 2,
+ "hidden": false,
+ "openEnded": false,
+ "associatedToAll": false,
+ "associatedTypes": [
+ "X509 Certificate"
+ ],
"unmapped": false,
"unsearchable": false,
- "useAsKpi": false,
- "version": -1,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
"fromVersion": "6.0.0"
}
\ No newline at end of file
diff --git a/Packs/X509Certificate/IndicatorFields/indicatorfield-Validity_Not_Before.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-Validity_Not_Before.json
similarity index 86%
rename from Packs/X509Certificate/IndicatorFields/indicatorfield-Validity_Not_Before.json
rename to Packs/CommonTypes/IndicatorFields/indicatorfield-Validity_Not_Before.json
index 6cfeedcbab02..ab5ed32b8755 100644
--- a/Packs/X509Certificate/IndicatorFields/indicatorfield-Validity_Not_Before.json
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-Validity_Not_Before.json
@@ -1,30 +1,31 @@
{
- "associatedToAll": false,
- "associatedTypes": [
- "Certificate"
- ],
- "caseInsensitive": true,
+ "id": "indicator_validitynotbefore",
+ "version": -1,
+ "modified": "2023-09-28T08:00:15.145651049Z",
+ "name": "Validity Not Before",
+ "ownerOnly": false,
"cliName": "validitynotbefore",
+ "type": "date",
"closeForm": false,
- "content": true,
- "description": "Starting of certificate validity period",
"editForm": true,
- "group": 2,
- "hidden": false,
- "id": "indicator_validitynotbefore",
+ "required": false,
+ "neverSetAsRequired": false,
"isReadOnly": false,
+ "useAsKpi": false,
"locked": false,
- "name": "Validity Not Before",
- "neverSetAsRequired": false,
- "ownerOnly": false,
- "required": false,
- "sla": 0,
"system": false,
- "threshold": 72,
- "type": "date",
+ "content": true,
+ "group": 2,
+ "hidden": false,
+ "openEnded": false,
+ "associatedToAll": false,
+ "associatedTypes": [
+ "X509 Certificate"
+ ],
"unmapped": false,
"unsearchable": false,
- "useAsKpi": false,
- "version": -1,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
"fromVersion": "6.0.0"
}
\ No newline at end of file
diff --git a/Packs/CommonTypes/IndicatorFields/indicatorfield-X509_V3_Extensions.json b/Packs/CommonTypes/IndicatorFields/indicatorfield-X509_V3_Extensions.json
new file mode 100644
index 000000000000..a4e33c94c8f2
--- /dev/null
+++ b/Packs/CommonTypes/IndicatorFields/indicatorfield-X509_V3_Extensions.json
@@ -0,0 +1,91 @@
+{
+ "id": "indicator_x509v3extensions",
+ "version": -1,
+ "modified": "2023-09-28T09:05:21.07895269Z",
+ "name": "X.509 v3 Extensions",
+ "ownerOnly": false,
+ "cliName": "x509v3extensions",
+ "type": "grid",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 2,
+ "hidden": false,
+ "openEnded": false,
+ "associatedToAll": false,
+ "associatedTypes": [
+ "X509 Certificate"
+ ],
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": [
+ {
+ "key": "title",
+ "displayName": "Title",
+ "type": "shortText",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ },
+ {
+ "key": "data",
+ "displayName": "Data",
+ "type": "shortText",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ }
+ ],
+ "defaultRows": [
+ {
+ "title": "Authority Info Access"
+ },
+ {
+ "title": "Authority Key Identifier"
+ },
+ {
+ "title": "Basic Constraints"
+ },
+ {
+ "title": "Certificate Policies"
+ },
+ {
+ "title": "CRL Distribution Points"
+ },
+ {
+ "title": "CTL Poison Byte"
+ },
+ {
+ "title": "Extended Key Usage"
+ },
+ {
+ "title": "Key Usage"
+ },
+ {
+ "title": "Subject Alt Name"
+ },
+ {
+ "title": "Subject Key Identifier"
+ }
+ ],
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.0.0"
+}
diff --git a/Packs/CommonTypes/IndicatorTypes/reputation-X509_Certificate.json b/Packs/CommonTypes/IndicatorTypes/reputation-X509_Certificate.json
new file mode 100644
index 000000000000..418516365d32
--- /dev/null
+++ b/Packs/CommonTypes/IndicatorTypes/reputation-X509_Certificate.json
@@ -0,0 +1,18 @@
+{
+ "id": "X509 Certificate",
+ "version": -1,
+ "modified": "2023-09-28T11:07:00.488622222Z",
+ "shouldCommit": false,
+ "regex": "",
+ "details": "X509 Certificate",
+ "prevDetails": "X509 Certificate",
+ "system": false,
+ "locked": false,
+ "disabled": false,
+ "file": false,
+ "updateAfter": 0,
+ "mergeContext": false,
+ "expiration": 0,
+ "layout": "X509 Certificate",
+ "fromVersion": "6.0.0"
+}
diff --git a/Packs/CommonTypes/Layouts/layoutscontainer-X509_Certificate.json b/Packs/CommonTypes/Layouts/layoutscontainer-X509_Certificate.json
new file mode 100644
index 000000000000..3f471e65b197
--- /dev/null
+++ b/Packs/CommonTypes/Layouts/layoutscontainer-X509_Certificate.json
@@ -0,0 +1,864 @@
+{
+ "description": "CVE Indicator Layout",
+ "edit": {
+ "sections": [
+ {
+ "description": "",
+ "fields": [
+ {
+ "fieldId": "indicator_value",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_indicatortype",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_score",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_expiration",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_comment",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_investigationids",
+ "isVisible": true
+ }
+ ],
+ "isVisible": true,
+ "name": "Basic Information",
+ "query": null,
+ "queryType": "",
+ "readOnly": false,
+ "type": "basicInformationSection"
+ },
+ {
+ "description": "",
+ "fields": [
+ {
+ "fieldId": "indicator_stixid",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_tags",
+ "isVisible": true
+ }
+ ],
+ "isVisible": true,
+ "name": "Custom fields - core",
+ "query": null,
+ "queryType": "",
+ "readOnly": false,
+ "type": ""
+ },
+ {
+ "description": "",
+ "fields": [
+ {
+ "fieldId": "indicator_domains",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_validitynotbefore",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_validitynotafter",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_serialnumber",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_signaturealgorithm",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_issuer",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_subject",
+ "isVisible": true
+ },
+ {
+ "fieldId": "indicator_x509v3extensions",
+ "isVisible": true
+ }
+ ],
+ "isVisible": true,
+ "name": "Custom fields - unique",
+ "query": null,
+ "queryType": "",
+ "readOnly": false,
+ "type": ""
+ }
+ ]
+ },
+ "group": "indicator",
+ "id": "X509 Certificate",
+ "indicatorsDetails": {
+ "tabs": [
+ {
+ "id": "main",
+ "name": "Summary",
+ "sections": [
+ {
+ "displayType": "ROW",
+ "h": 3,
+ "hideName": false,
+ "i": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-cveRep-main-9fa550a0-a2e6-11e9-a74b-a79deb2f5d2a",
+ "isVisible": true,
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "timestamp",
+ "height": 22,
+ "id": "0030d590-7d0d-11ec-abe4-4fa9d22d9889",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "modified",
+ "height": 22,
+ "id": "790baae0-8a4f-11ec-a1ef-6369524b39c7",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "serialnumber",
+ "height": 22,
+ "id": "e22083e0-5df0-11ee-b502-4fc78bd978a3",
+ "index": 2,
+ "listId": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-8c4ad450-8a4a-11ec-a1ef-6369524b39c7",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "stixid",
+ "height": 22,
+ "id": "ea9dea60-7d0c-11ec-abe4-4fa9d22d9889",
+ "index": 3,
+ "listId": "main-cveRep-main-9fa550a0-a2e6-11e9-a74b-a79deb2f5d2a",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "validitynotbefore",
+ "height": 22,
+ "id": "d9288670-5df0-11ee-b502-4fc78bd978a3",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "validitynotafter",
+ "height": 22,
+ "id": "db76e340-5df0-11ee-b502-4fc78bd978a3",
+ "index": 5,
+ "listId": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-8c4ad450-8a4a-11ec-a1ef-6369524b39c7",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "signaturealgorithm",
+ "height": 22,
+ "id": "e8ed3ce0-5df0-11ee-b502-4fc78bd978a3",
+ "index": 6,
+ "listId": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-8c4ad450-8a4a-11ec-a1ef-6369524b39c7",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "tags",
+ "height": 22,
+ "id": "083ac1b0-4e75-11ea-8bf6-67db400d7da5",
+ "index": 6,
+ "listId": "main-ffe4f7b0-4e74-11ea-8bf6-67db400d7da5",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "args": {
+ "field": {
+ "simple": "tags"
+ }
+ },
+ "buttonClass": "secondary",
+ "dropEffect": "move",
+ "endCol": 1,
+ "fieldId": "",
+ "height": 44,
+ "id": "b6cab200-9db5-11ea-a17b-1d06fd239c52",
+ "index": 7,
+ "listId": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-c5fc5760-ef29-11ed-800c-4b027a433365",
+ "name": "Add tags",
+ "scriptId": "Builtin|||appendIndicatorField",
+ "sectionItemType": "button",
+ "startCol": 0
+ },
+ {
+ "args": {
+ "field": {
+ "simple": "tags"
+ }
+ },
+ "buttonClass": "secondary",
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "",
+ "height": 44,
+ "id": "f4d78d70-9db5-11ea-a17b-1d06fd239c52",
+ "index": 7,
+ "listId": "main-ffe4f7b0-4e74-11ea-8bf6-67db400d7da5",
+ "name": "Remove tags",
+ "scriptId": "Builtin|||removeIndicatorField",
+ "sectionItemType": "button",
+ "startCol": 1
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Certificate Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 0
+ },
+ {
+ "columns": [
+ {
+ "displayed": true,
+ "isDefault": false,
+ "key": "id",
+ "width": 110
+ },
+ {
+ "displayed": true,
+ "isDefault": true,
+ "key": "name",
+ "width": 120
+ },
+ {
+ "displayed": true,
+ "isDefault": false,
+ "key": "severity",
+ "width": 80
+ },
+ {
+ "displayed": true,
+ "isDefault": true,
+ "key": "type",
+ "width": 104
+ },
+ {
+ "displayed": true,
+ "isDefault": true,
+ "key": "status",
+ "width": 80
+ },
+ {
+ "displayed": true,
+ "isDefault": false,
+ "key": "owner",
+ "width": 160
+ }
+ ],
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-cveRep-main-374a4120-ac5d-11e9-82f1-b168a5ac3417",
+ "isVisible": true,
+ "items": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Related Incidents",
+ "readOnly": true,
+ "static": false,
+ "type": "relatedIncidents",
+ "w": 3,
+ "x": 0,
+ "y": 11
+ },
+ {
+ "columns": [
+ {
+ "displayed": true,
+ "isDefault": true,
+ "key": "created",
+ "width": 150
+ },
+ {
+ "displayed": true,
+ "isDefault": true,
+ "key": "content",
+ "width": 200
+ },
+ {
+ "displayed": true,
+ "isDefault": true,
+ "key": "source",
+ "width": 100
+ }
+ ],
+ "h": 4,
+ "i": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-cveRep-main-7f557450-3d56-11ea-9dff-c57e8949d2f9",
+ "items": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Timeline",
+ "static": false,
+ "type": "indicatorTimeline",
+ "w": 1,
+ "x": 2,
+ "y": 1
+ },
+ {
+ "h": 6,
+ "i": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-cveRep-main-174e7bd0-3d57-11ea-9dff-c57e8949d2f9",
+ "items": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Comments",
+ "static": false,
+ "type": "comments",
+ "w": 1,
+ "x": 2,
+ "y": 5
+ },
+ {
+ "displayType": "ROW",
+ "h": 1,
+ "hideName": false,
+ "i": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-cveRep-main-fafa73b0-a9af-11ea-a4b1-a5bbd1788c5a",
+ "items": [
+ {
+ "args": {},
+ "endCol": 1,
+ "fieldId": "",
+ "height": 44,
+ "id": "fe326d30-a9af-11ea-a4b1-a5bbd1788c5a",
+ "index": 0,
+ "name": "Enrich indicator",
+ "scriptId": "Builtin|||enrichIndicators",
+ "sectionItemType": "button",
+ "startCol": 0
+ },
+ {
+ "args": {},
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "",
+ "height": 44,
+ "id": "64422e90-bf71-11ea-b085-3f354e53d3e6",
+ "index": 0,
+ "listId": "main-fafa73b0-a9af-11ea-a4b1-a5bbd1788c5a",
+ "name": "Expire indicator",
+ "scriptId": "Builtin|||expireIndicators",
+ "sectionItemType": "button",
+ "startCol": 1
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Actions",
+ "static": false,
+ "w": 1,
+ "x": 2,
+ "y": 0
+ },
+ {
+ "h": 3,
+ "i": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-cveRep-main-cba17b40-7d0c-11ec-abe4-4fa9d22d9889",
+ "items": [],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Relationships",
+ "static": false,
+ "type": "relationshipsTable",
+ "w": 2,
+ "x": 0,
+ "y": 8
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideItemTitleOnlyOne": true,
+ "hideName": true,
+ "i": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-8c4ad450-8a4a-11ec-a1ef-6369524b39c7",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "domains",
+ "height": 106,
+ "id": "b22f1bb0-5df0-11ee-b502-4fc78bd978a3",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Domains",
+ "static": false,
+ "w": 1,
+ "x": 1,
+ "y": 0
+ },
+ {
+ "columns": [
+ {
+ "displayName": "Title",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "title",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 205
+ },
+ {
+ "displayName": "Data",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "data",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "displayType": "ROW",
+ "h": 5,
+ "hideItemTitleOnlyOne": true,
+ "hideName": false,
+ "i": "main-a79c78a0-ff8b-11ed-9881-51d8dd00e8e3",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "x509v3extensions",
+ "height": 22,
+ "id": "897b56c0-5df0-11ee-b502-4fc78bd978a3",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "V3 Extension",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 3
+ },
+ {
+ "displayType": "ROW",
+ "h": 3,
+ "hideItemTitleOnlyOne": true,
+ "hideName": false,
+ "i": "main-a7eab630-5df2-11ee-b3e8-b5d58cb9a84a",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "issuer",
+ "height": 22,
+ "id": "e2538fe0-5df2-11ee-b3e8-b5d58cb9a84a",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Issuer",
+ "static": false,
+ "w": 1,
+ "x": 1,
+ "y": 5
+ },
+ {
+ "displayType": "ROW",
+ "h": 3,
+ "hideItemTitleOnlyOne": true,
+ "hideName": false,
+ "i": "main-a8c8ba20-5df2-11ee-b3e8-b5d58cb9a84a",
+ "items": [
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "subject",
+ "height": 106,
+ "id": "b4cf3010-5df2-11ee-b3e8-b5d58cb9a84a",
+ "index": 0,
+ "listId": "main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-b891551f-96df-4348-8c29-a243172176a7-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-dad5aa4c-8d37-4211-8f73-49df65e4191d-main-8c4ad450-8a4a-11ec-a1ef-6369524b39c7",
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Subject",
+ "static": false,
+ "w": 1,
+ "x": 1,
+ "y": 2
+ }
+ ],
+ "type": "custom"
+ },
+ {
+ "hidden": false,
+ "id": "kopixekjqw",
+ "name": "Additional Details",
+ "sections": [
+ {
+ "displayType": "ROW",
+ "h": 4,
+ "i": "b891551f-96df-4348-8c29-a243172176a7-kopixekjqw-27fffce0-beb4-11ea-b519-21bdfb6f5a43",
+ "items": null,
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "minW": 3,
+ "moved": false,
+ "name": "Custom Details",
+ "static": false,
+ "type": "customDetails",
+ "w": 3,
+ "x": 0,
+ "y": 4
+ },
+ {
+ "displayType": "ROW",
+ "h": 4,
+ "hideName": false,
+ "i": "b891551f-96df-4348-8c29-a243172176a7-kopixekjqw-89359390-9878-11ec-8a36-89f2ae31e8cd",
+ "items": [
+ {
+ "endCol": 6,
+ "fieldId": "communitynotes",
+ "height": 106,
+ "id": "9835c180-9878-11ec-8a36-89f2ae31e8cd",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 6,
+ "fieldId": "publications",
+ "height": 106,
+ "id": "96266980-9878-11ec-8a36-89f2ae31e8cd",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "minW": 3,
+ "moved": false,
+ "name": "Additional Details",
+ "static": false,
+ "w": 3,
+ "x": 0,
+ "y": 0
+ }
+ ],
+ "type": "custom"
+ }
+ ]
+ },
+ "indicatorsQuickView": {
+ "tabs": [
+ {
+ "id": "indicator-quick-view-info",
+ "name": "Info",
+ "sections": [
+ {
+ "h": 2,
+ "i": "indicator-quick-view-info-dad5aa4c-8d37-4211-8f73-49df65e4191d-indicator-quick-view-info-relatedIncidents",
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Related Incidents",
+ "static": false,
+ "type": "relatedIncidents",
+ "w": 1,
+ "x": 0,
+ "y": 8
+ },
+ {
+ "h": 2,
+ "i": "indicator-quick-view-info-dad5aa4c-8d37-4211-8f73-49df65e4191d-indicator-quick-view-info-comments",
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Comments",
+ "static": false,
+ "type": "comments",
+ "w": 1,
+ "x": 0,
+ "y": 10
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "indicator-quick-view-info-dad5aa4c-8d37-4211-8f73-49df65e4191d-indicator-quick-view-info-b92ed1c0-ccbd-11ea-937a-6f8841d9fed1",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "timestamp",
+ "height": 22,
+ "id": "8337a3a0-84ed-11ec-a589-e73aadc98c9a",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "modified",
+ "height": 22,
+ "id": "8f4f4870-8a4f-11ec-a1ef-6369524b39c7",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "stixid",
+ "height": 22,
+ "id": "92f211b0-8a4f-11ec-a1ef-6369524b39c7",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "tags",
+ "height": 22,
+ "id": "indicator-quick-view-info-tags-field",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Basic Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 0
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideItemTitleOnlyOne": false,
+ "hideName": false,
+ "i": "indicator-quick-view-info-dad5aa4c-8d37-4211-8f73-49df65e4191d-indicator-quick-view-info-c834ecb0-84ed-11ec-a589-e73aadc98c9a",
+ "items": [
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "validitynotbefore",
+ "height": 22,
+ "id": "01de1590-5df0-11ee-b502-4fc78bd978a3",
+ "index": 0,
+ "listId": "indicator-quick-view-info-dad5aa4c-8d37-4211-8f73-49df65e4191d-indicator-quick-view-info-c834ecb0-84ed-11ec-a589-e73aadc98c9a",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "validitynotafter",
+ "height": 22,
+ "id": "0081b9e0-5df0-11ee-b502-4fc78bd978a3",
+ "index": 1,
+ "listId": "indicator-quick-view-info-dad5aa4c-8d37-4211-8f73-49df65e4191d-indicator-quick-view-info-c834ecb0-84ed-11ec-a589-e73aadc98c9a",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "domains",
+ "height": 106,
+ "id": "f8f5a880-5def-11ee-b502-4fc78bd978a3",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "serialnumber",
+ "height": 22,
+ "id": "10dedfc0-5df0-11ee-b502-4fc78bd978a3",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "signaturealgorithm",
+ "height": 22,
+ "id": "1513afd0-5df0-11ee-b502-4fc78bd978a3",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "issuer",
+ "height": 106,
+ "id": "1a6cdba0-5df0-11ee-b502-4fc78bd978a3",
+ "index": 5,
+ "listId": "indicator-quick-view-info-dad5aa4c-8d37-4211-8f73-49df65e4191d-indicator-quick-view-info-c834ecb0-84ed-11ec-a589-e73aadc98c9a",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "subject",
+ "height": 106,
+ "id": "25bb2520-5df0-11ee-b502-4fc78bd978a3",
+ "index": 6,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Certificate Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 2
+ },
+ {
+ "columns": [
+ {
+ "displayName": "Title",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "title",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 192
+ },
+ {
+ "displayName": "Data",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "data",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "displayType": "ROW",
+ "h": 4,
+ "hideItemTitleOnlyOne": true,
+ "hideName": false,
+ "i": "indicator-quick-view-info-3416aea0-5df0-11ee-b502-4fc78bd978a3",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "x509v3extensions",
+ "height": 106,
+ "id": "2d447760-5df0-11ee-b502-4fc78bd978a3",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "V3 Extension",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 4
+ }
+ ],
+ "type": "custom"
+ },
+ {
+ "hidden": false,
+ "id": "ksuqyogsni",
+ "name": "Relationships",
+ "sections": [
+ {
+ "h": 8,
+ "i": "ksuqyogsni-ec1e5850-84ed-11ec-a589-e73aadc98c9a",
+ "items": [],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Relationships",
+ "static": false,
+ "type": "relationshipsTable",
+ "w": 1,
+ "x": 0,
+ "y": 0
+ }
+ ],
+ "type": "custom"
+ }
+ ]
+ },
+ "name": "X509 Certificate",
+ "system": false,
+ "version": -1,
+ "fromVersion": "6.0.0"
+}
\ No newline at end of file
diff --git a/Packs/CommonTypes/README.md b/Packs/CommonTypes/README.md
index 9ba4ecdab6dd..e69de29bb2d1 100644
--- a/Packs/CommonTypes/README.md
+++ b/Packs/CommonTypes/README.md
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/Packs/CommonTypes/ReleaseNotes/3_4_2.md b/Packs/CommonTypes/ReleaseNotes/3_4_2.md
new file mode 100644
index 000000000000..ac394d02d13c
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_4_2.md
@@ -0,0 +1,9 @@
+#### Incident Fields
+
+Added the **CrowdStrike Falcon Mobile Detection** incident type to the following incident fields:
+
+- **Email**
+- **Detection ID**
+- **Last Update Time**
+- **Vendor Product**
+- **Display Name**
diff --git a/Packs/CommonTypes/ReleaseNotes/3_4_3.md b/Packs/CommonTypes/ReleaseNotes/3_4_3.md
new file mode 100644
index 000000000000..3f19ebfa27e4
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_4_3.md
@@ -0,0 +1,5 @@
+
+#### Incident Fields
+
+- New: **Alert tags**
+- New: **User Risk Level**
diff --git a/Packs/CommonTypes/ReleaseNotes/3_4_4.md b/Packs/CommonTypes/ReleaseNotes/3_4_4.md
new file mode 100644
index 000000000000..95e928887685
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_4_4.md
@@ -0,0 +1,30 @@
+
+#### Indicator Fields
+
+- New: **Subject**
+- New: **Issuer**
+- New: **Certificate Names**
+- New: **Validity Not Before**
+- New: **PEM**
+- New: **Certificate Signature**
+- New: **Extension**
+- New: **X.509 v3 Extensions**
+- New: **Certificate Validation Checks**
+- New: **Issuer DN**
+- New: **SPKI SHA256**
+- New: **Validity Not After**
+- New: **Serial Number**
+- New: **Signature Algorithm**
+- New: **Public Key**
+- New: **Subject Alternative Names**
+- New: **Domains**
+- New: **Subject DN**
+
+#### Indicator Types
+
+##### New: X509 Certificate
+
+#### Layouts
+
+##### New: X509 Certificate
+layout for the new X509 indicator type (Available from Cortex XSOAR 6.0.0).
diff --git a/Packs/CommonTypes/ReleaseNotes/3_4_5.md b/Packs/CommonTypes/ReleaseNotes/3_4_5.md
new file mode 100644
index 000000000000..e715c4e4c03a
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_4_5.md
@@ -0,0 +1,5 @@
+
+#### Incident Fields
+
+- New: **Additional Email Addresses**
+
diff --git a/Packs/CommonTypes/ReleaseNotes/3_4_6.md b/Packs/CommonTypes/ReleaseNotes/3_4_6.md
new file mode 100644
index 000000000000..22bc3d9f963c
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_4_6.md
@@ -0,0 +1,9 @@
+
+#### Incident Fields
+
+- **Containment SLA**
+ - Updated to be searchable
+- **Incident Duration**
+ - Updated to be searchable
+- **Triage SLA**
+ - Updated to be searchable
\ No newline at end of file
diff --git a/Packs/CommonTypes/pack_metadata.json b/Packs/CommonTypes/pack_metadata.json
index 8e6be00b3c37..e1a1e54883a0 100644
--- a/Packs/CommonTypes/pack_metadata.json
+++ b/Packs/CommonTypes/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Types",
"description": "This Content Pack will get you up and running in no-time and provide you with the most commonly used incident & indicator fields and types.",
"support": "xsoar",
- "currentVersion": "3.4.1",
+ "currentVersion": "3.4.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/ContentManagement/ReleaseNotes/1_2_18.md b/Packs/ContentManagement/ReleaseNotes/1_2_18.md
new file mode 100644
index 000000000000..0406cdc76929
--- /dev/null
+++ b/Packs/ContentManagement/ReleaseNotes/1_2_18.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### GetPrBranches
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/ContentManagement/Scripts/GetPrBranches/GetPrBranches.yml b/Packs/ContentManagement/Scripts/GetPrBranches/GetPrBranches.yml
index 52e9bc47b058..41eee1484574 100644
--- a/Packs/ContentManagement/Scripts/GetPrBranches/GetPrBranches.yml
+++ b/Packs/ContentManagement/Scripts/GetPrBranches/GetPrBranches.yml
@@ -5,7 +5,7 @@ commonfields:
contentitemexportablefields:
contentitemfields:
fromServerVersion: ''
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
enabled: true
name: GetPrBranches
runas: DBotWeakRole
diff --git a/Packs/ContentManagement/pack_metadata.json b/Packs/ContentManagement/pack_metadata.json
index 9a7199f3ca40..0f41abe9da0a 100644
--- a/Packs/ContentManagement/pack_metadata.json
+++ b/Packs/ContentManagement/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "XSOAR CI/CD",
"description": "This pack enables you to orchestrate your XSOAR system configuration.",
"support": "xsoar",
- "currentVersion": "1.2.17",
+ "currentVersion": "1.2.18",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Core/Layouts/layoutscontainer-Identity_Analytics_Alerts.json b/Packs/Core/Layouts/layoutscontainer-Identity_Analytics_Alerts.json
index 9d06741bb2e8..312f25c0d7b7 100644
--- a/Packs/Core/Layouts/layoutscontainer-Identity_Analytics_Alerts.json
+++ b/Packs/Core/Layouts/layoutscontainer-Identity_Analytics_Alerts.json
@@ -416,6 +416,22 @@
"hexColor": null,
"id": "af5e7ea0-98cc-11ee-8aac-c7473a83f305",
"index": 2,
+"listId": "w68olunplf-a0a75d10-98cb-11ee-8aac-c7473a83f305",
+ "name": "Reset Password - Active Directory",
+ "scriptId": "Active Directory Query v2|||ad-set-new-password",
+ "sectionItemType": "button",
+ "startCol": 0
+ },
+ {
+ "args": {},
+ "buttonClass": "error",
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "",
+ "height": 44,
+ "hexColor": null,
+ "id": "af5e7ea0-98cc-11ee-8aac-c7473a83f305",
+ "index": 3,
"listId": "w68olunplf-a0a75d10-98cb-11ee-8aac-c7473a83f305",
"name": "Disable Account",
"scriptId": "DisableUserWrapper",
diff --git a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
index 6daf352c84b1..d0ecc4cab148 100644
--- a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
+++ b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
@@ -500,7 +500,7 @@ tasks:
task:
id: c57402d1-54ac-4fcd-86b3-ea787ef0f134
version: -1
- name: Analyst Desicion
+ name: Analyst Decision
description: An analyst’s decision is required to determine whether it is a malicious or non-malicious activity.
type: condition
iscommand: false
diff --git a/Packs/Core/ReleaseNotes/3_0_21.md b/Packs/Core/ReleaseNotes/3_0_21.md
new file mode 100644
index 000000000000..52f13bf7fc11
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_21.md
@@ -0,0 +1,12 @@
+
+#### Playbooks
+
+##### Identity Analytics - Alert Handling
+
+Updated task name 'Analyst Decision'.
+
+#### Layouts
+
+##### Identity Analytics Alerts
+
+Added a new button to the Identity Analytics layout for resetting a user's password.
diff --git a/Packs/Core/ReleaseNotes/3_0_22.md b/Packs/Core/ReleaseNotes/3_0_22.md
new file mode 100644
index 000000000000..561ae5c640bd
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_22.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Investigation & Response
+
+Added support for flexible close-reason mapping in `handle_outgoing_issue_closure` in `CoreIRApiModule`. Does not affect this module.
\ No newline at end of file
diff --git a/Packs/Core/pack_metadata.json b/Packs/Core/pack_metadata.json
index 7722d1998c7e..78825a9bde93 100644
--- a/Packs/Core/pack_metadata.json
+++ b/Packs/Core/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Core - Investigation and Response",
"description": "Automates incident response",
"support": "xsoar",
- "currentVersion": "3.0.20",
+ "currentVersion": "3.0.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CoreAlertFields/IncidentFields/incidentfield-User_Risk_level.json b/Packs/CoreAlertFields/IncidentFields/incidentfield-User_Risk_level.json
index ebe3ff90fc5c..6e94c535c5d9 100644
--- a/Packs/CoreAlertFields/IncidentFields/incidentfield-User_Risk_level.json
+++ b/Packs/CoreAlertFields/IncidentFields/incidentfield-User_Risk_level.json
@@ -25,5 +25,6 @@
"caseInsensitive": true,
"sla": 0,
"threshold": 72,
- "fromVersion": "6.9.0"
+ "fromVersion": "6.9.0",
+ "marketplaces": ["marketplacev2"]
}
\ No newline at end of file
diff --git a/Packs/CoreAlertFields/ReleaseNotes/1_0_33.md b/Packs/CoreAlertFields/ReleaseNotes/1_0_33.md
new file mode 100644
index 000000000000..cb8e4696296f
--- /dev/null
+++ b/Packs/CoreAlertFields/ReleaseNotes/1_0_33.md
@@ -0,0 +1,3 @@
+
+#### Incident Fields
+- New: **User Risk Level**
diff --git a/Packs/CoreAlertFields/pack_metadata.json b/Packs/CoreAlertFields/pack_metadata.json
index ff000f0ba6ad..728dfa6e2105 100644
--- a/Packs/CoreAlertFields/pack_metadata.json
+++ b/Packs/CoreAlertFields/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Core Alert Fields",
"description": "This Content Pack will provide you with the core alert fields.",
"support": "xsoar",
- "currentVersion": "1.0.32",
+ "currentVersion": "1.0.33",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Service_Ownership.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Service_Ownership.yml
index fb040785ce74..69bd5e4c0cd1 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Service_Ownership.yml
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Service_Ownership.yml
@@ -43,10 +43,10 @@ tasks:
version: -1
name: Normalize and rank likely service owners
description: Recommend most likely service owners from those surfaced by Cortex ASM Enrichment.
- scriptName: RankServiceOwners
type: regular
iscommand: false
brand: ""
+ script: RankServiceOwners
nexttasks:
'#none#':
- "10"
@@ -99,10 +99,10 @@ tasks:
description: |-
Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
`!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
- scriptName: GridFieldSetup
type: regular
iscommand: false
brand: ""
+ script: GridFieldSetup
nexttasks:
'#none#':
- "17"
@@ -169,55 +169,71 @@ tasks:
isautoswitchedtoquietmode: false
"9":
id: "9"
- taskid: 82e4d1d4-d7bc-4a1a-894a-698786adcdc5
+ taskid: 5505de94-3fde-4947-86d2-7e95c050fa14
type: condition
task:
- id: 82e4d1d4-d7bc-4a1a-894a-698786adcdc5
+ id: 5505de94-3fde-4947-86d2-7e95c050fa14
version: -1
- name: Is asmserviceowner populated?
- description: Determines if the asmserviceowner field exists and if the common fields within it also exists.
+ name: Is asmserviceowner or asmserviceownerunrankedraw populated?
+ description: Determines if the asmserviceowner or asmserviceownerunrankedraw fields have been populated to continue.
type: condition
iscommand: false
brand: ""
nexttasks:
'#default#':
- "10"
- "yes":
+ service owner:
- "7"
+ unranked raw:
+ - "17"
separatecontext: false
conditions:
- - label: "yes"
+ - label: "service owner"
condition:
- - operator: isNotEmpty
left:
value:
- complex:
- root: alert
- accessor: asmserviceowner
+ simple: alert.asmserviceowner
iscontext: true
right:
value: {}
- - operator: isNotEmpty
left:
value:
- complex:
- root: alert.asmserviceowner
- accessor: email
+ simple: alert.asmserviceowner.email
iscontext: true
- - operator: isNotEmpty
left:
value:
- complex:
- root: alert.asmserviceowner
- accessor: name
+ simple: alert.asmserviceowner.name
iscontext: true
- - operator: isNotEmpty
left:
value:
- complex:
- root: alert.asmserviceowner
- accessor: source
+ simple: alert.asmserviceowner.source
iscontext: true
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmserviceownerunrankedraw
+ operator: isNotEmpty
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmserviceownerunrankedraw.email
+ operator: isNotEmpty
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmserviceownerunrankedraw.name
+ operator: isNotEmpty
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmserviceownerunrankedraw.source
+ operator: isNotEmpty
+ label: unranked raw
continueonerrortype: ""
view: |-
{
@@ -410,17 +426,17 @@ tasks:
isautoswitchedtoquietmode: false
"19":
id: "19"
- taskid: 6b8a094c-a54f-4678-86c6-6baf66e29721
+ taskid: d1039ef2-c791-4853-8367-d50fe11c8517
type: regular
task:
- id: 6b8a094c-a54f-4678-86c6-6baf66e29721
+ id: d1039ef2-c791-4853-8367-d50fe11c8517
version: -1
name: Look up project owners for service account
description: Parse a GCP service account email for the project name, then lookup project owners and add them to a list of potential service owners for ranking.
- scriptName: GetProjectOwners
type: regular
iscommand: false
brand: ""
+ script: GetProjectOwners
nexttasks:
'#none#':
- "6"
@@ -429,8 +445,7 @@ tasks:
complex:
root: ASM.ExternalService.externally_detected_providers
owners:
- complex:
- root: alert.asmserviceownerunrankedraw
+ simple: ${alert.asmserviceownerunrankedraw}
separatecontext: false
continueonerrortype: ""
view: |-
@@ -453,7 +468,7 @@ view: |-
"15_10_#default#": 0.2,
"17_10_#default#": 0.2,
"9_10_#default#": 0.33,
- "9_7_yes": 0.55
+ "9_7_service owner": 0.55
},
"paper": {
"dimensions": {
diff --git a/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_30.md b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_30.md
new file mode 100644
index 000000000000..00d330aaa882
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_30.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Cortex ASM - Service Ownership
+
+- Fixed an issue where *service owner* field wasn't always getting set.
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Service_Ownership.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Service_Ownership.png
index 05793efbe6ea..fe42024acfe9 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Service_Ownership.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Service_Ownership.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/pack_metadata.json b/Packs/CortexAttackSurfaceManagement/pack_metadata.json
index 38dfed7651f2..4179d8714350 100644
--- a/Packs/CortexAttackSurfaceManagement/pack_metadata.json
+++ b/Packs/CortexAttackSurfaceManagement/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex Attack Surface Management",
"description": "Content for working with Attack Surface Management (ASM).",
"support": "xsoar",
- "currentVersion": "1.7.29",
+ "currentVersion": "1.7.30",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CortexXDR/.pack-ignore b/Packs/CortexXDR/.pack-ignore
index 4a6926eec1dc..3dd8e5cabfcf 100644
--- a/Packs/CortexXDR/.pack-ignore
+++ b/Packs/CortexXDR/.pack-ignore
@@ -156,4 +156,7 @@ ignore=IF100
ignore=PB105
[file:playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.yml]
-ignore=PB105
\ No newline at end of file
+ignore=PB105
+
+[file:Cortex_XDR_-_Large_Upload.yml]
+ignore=PB121
\ No newline at end of file
diff --git a/Packs/CortexXDR/Classifiers/classifier-mapper-incoming-PaloAltoNetworks_CortexXDR.json b/Packs/CortexXDR/Classifiers/classifier-mapper-incoming-PaloAltoNetworks_CortexXDR.json
index a6951e64af54..dbe5ff373713 100644
--- a/Packs/CortexXDR/Classifiers/classifier-mapper-incoming-PaloAltoNetworks_CortexXDR.json
+++ b/Packs/CortexXDR/Classifiers/classifier-mapper-incoming-PaloAltoNetworks_CortexXDR.json
@@ -500,6 +500,18 @@
"Cortex XDR Incident": {
"dontMapEventToLabels": true,
"internalMapping": {
+ "Alert tags": {
+ "complex": {
+ "accessor": "tags",
+ "filters": [],
+ "root": "alerts",
+ "transformers": [
+ {
+ "operator": "uniq"
+ }
+ ]
+ }
+ },
"Hostnames": {
"complex": {
"accessor": "host_name",
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
index 36d26776c43e..9a005235eb84 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
@@ -44,6 +44,9 @@
'Both': 'Both'
}
+XSOAR_TO_XDR = "XSOAR -> XDR"
+XDR_TO_XSOAR = "XDR -> XSOAR"
+
def convert_epoch_to_milli(timestamp):
if timestamp is None:
@@ -122,7 +125,6 @@ def filter_and_save_unseen_incident(incidents: List, limit: int, number_of_alrea
class Client(CoreClient):
-
def __init__(self, base_url, proxy, verify, timeout, params=None):
if not params:
params = {}
@@ -148,6 +150,56 @@ def test_module(self, first_fetch_time):
else:
raise
+ # XSOAR -> XDR
+ self.validate_custom_mapping(mapping=self._params.get("custom_xsoar_to_xdr_close_reason_mapping"),
+ direction=XSOAR_TO_XDR)
+
+ # XDR -> XSOAR
+ self.validate_custom_mapping(mapping=self._params.get("custom_xdr_to_xsoar_close_reason_mapping"),
+ direction=XDR_TO_XSOAR)
+
+ def validate_custom_mapping(self, mapping: str, direction: str):
+ """ Check validity of provided custom close-reason mappings. """
+
+ xdr_statuses_to_xsoar = [status.replace("resolved_", "").replace("_", " ").title()
+ for status in XDR_RESOLVED_STATUS_TO_XSOAR]
+ xsoar_statuses_to_xdr = list(XSOAR_RESOLVED_STATUS_TO_XDR.keys())
+
+ exception_message = ('Improper custom mapping ({direction}) provided: "{key_or_value}" is not a valid Cortex '
+ '{xsoar_or_xdr} close-reason. Valid Cortex {xsoar_or_xdr} close-reasons are: {statuses}')
+
+ def to_xdr_status(status):
+ return "resolved_" + "_".join(status.lower().split(" "))
+
+ custom_mapping = comma_separated_mapping_to_dict(mapping)
+
+ valid_key = valid_value = True # If no mapping was provided.
+
+ for key, value in custom_mapping.items():
+ if direction == XSOAR_TO_XDR:
+ xdr_close_reason = to_xdr_status(value)
+ valid_key = key in XSOAR_RESOLVED_STATUS_TO_XDR
+ valid_value = xdr_close_reason in XDR_RESOLVED_STATUS_TO_XSOAR
+ elif direction == XDR_TO_XSOAR:
+ xdr_close_reason = to_xdr_status(key)
+ valid_key = xdr_close_reason in XDR_RESOLVED_STATUS_TO_XSOAR
+ valid_value = value in XSOAR_RESOLVED_STATUS_TO_XDR
+
+ if not valid_key:
+ raise DemistoException(
+ exception_message.format(direction=direction,
+ key_or_value=key,
+ xsoar_or_xdr="XSOAR" if direction == XSOAR_TO_XDR else "XDR",
+ statuses=xsoar_statuses_to_xdr
+ if direction == XSOAR_TO_XDR else xdr_statuses_to_xsoar))
+ elif not valid_value:
+ raise DemistoException(
+ exception_message.format(direction=direction,
+ key_or_value=value,
+ xsoar_or_xdr="XDR" if direction == XSOAR_TO_XDR else "XSOAR",
+ statuses=xdr_statuses_to_xsoar
+ if direction == XSOAR_TO_XDR else xsoar_statuses_to_xdr))
+
def handle_fetch_starred_incidents(self, limit: int, page_number: int, request_data: dict) -> List:
"""
handles pagination and filter of starred incidents that were fetched.
@@ -626,32 +678,73 @@ def handle_incoming_user_unassignment(incident_data):
incident_data['owner'] = ''
-def handle_incoming_closing_incident(incident_data):
- incident_id = incident_data.get('incident_id')
- demisto.debug(f'handle_incoming_closing_incident {incident_data=} {incident_id=}')
+def resolve_xsoar_close_reason(xdr_close_reason: str):
+ """
+ Resolving XSOAR close reason from possible custom XDR->XSOAR close-reason mapping or default mapping.
+ :param xdr_close_reason: XDR raw status/close reason e.g. 'resolved_false_positive'.
+ :return: XSOAR close reason.
+ """
+
+ # Check if incoming XDR close-reason has a non-default mapping to XSOAR close-reason.
+ if demisto.params().get("custom_xdr_to_xsoar_close_reason_mapping"):
+ custom_xdr_to_xsoar_close_reason_mapping = comma_separated_mapping_to_dict(
+ demisto.params().get("custom_xdr_to_xsoar_close_reason_mapping")
+ )
+ # XDR raw status/close-reason is prefixed with 'resolved_' and is given in snake_case format,
+ # e.g. 'resolved_false_positive', whilst custom XDR->XSOAR close-reason mapping
+ # is using title case format e.g. 'False Positive', therefore we need to adapt it accordingly.
+ title_cased_xdr_close_reason = (
+ xdr_close_reason.replace("resolved_", "").replace("_", " ").title()
+ )
+ xsoar_close_reason = custom_xdr_to_xsoar_close_reason_mapping.get(title_cased_xdr_close_reason)
+ if xsoar_close_reason in XSOAR_RESOLVED_STATUS_TO_XDR:
+ demisto.debug(
+ f"XDR->XSOAR custom close-reason exists, using {xdr_close_reason}={xsoar_close_reason}"
+ )
+ return xsoar_close_reason
+
+ # Otherwise, we use default mapping.
+ xsoar_close_reason = XDR_RESOLVED_STATUS_TO_XSOAR.get(xdr_close_reason)
+ demisto.debug(
+ f"XDR->XSOAR custom close-reason does not exists, using default mapping {xdr_close_reason}={xsoar_close_reason}"
+ )
+ return xsoar_close_reason
+
+
+def handle_incoming_closing_incident(incident_data) -> dict:
+ incident_id = incident_data.get("incident_id")
+ demisto.debug(f"handle_incoming_closing_incident {incident_data=} {incident_id=}")
closing_entry = {} # type: Dict
- if incident_data.get('status') in XDR_RESOLVED_STATUS_TO_XSOAR:
- demisto.debug(f"handle_incoming_closing_incident {incident_data.get('status')=} {incident_id=}")
+
+ if incident_data.get("status") in XDR_RESOLVED_STATUS_TO_XSOAR:
+ demisto.debug(
+ f"handle_incoming_closing_incident {incident_data.get('status')=} {incident_id=}"
+ )
demisto.debug(f"Closing XDR issue {incident_id=}")
+ xsoar_close_reason = resolve_xsoar_close_reason(incident_data.get("status"))
closing_entry = {
- 'Type': EntryType.NOTE,
- 'Contents': {
- 'dbotIncidentClose': True,
- 'closeReason': XDR_RESOLVED_STATUS_TO_XSOAR.get(incident_data.get("status")),
- 'closeNotes': incident_data.get('resolve_comment', '')
+ "Type": EntryType.NOTE,
+ "Contents": {
+ "dbotIncidentClose": True,
+ "closeReason": xsoar_close_reason,
+ "closeNotes": incident_data.get("resolve_comment", ""),
},
- 'ContentsFormat': EntryFormat.JSON
+ "ContentsFormat": EntryFormat.JSON,
}
- incident_data['closeReason'] = closing_entry['Contents']['closeReason']
- incident_data['closeNotes'] = closing_entry['Contents']['closeNotes']
- demisto.debug(f"handle_incoming_closing_incident {incident_id=} {incident_data['closeReason']=} "
- f"{incident_data['closeNotes']=}")
+ incident_data["closeReason"] = closing_entry["Contents"]["closeReason"]
+ incident_data["closeNotes"] = closing_entry["Contents"]["closeNotes"]
+ demisto.debug(
+ f"handle_incoming_closing_incident {incident_id=} {incident_data['closeReason']=} "
+ f"{incident_data['closeNotes']=}"
+ )
- if incident_data.get('status') == 'resolved_known_issue':
+ if incident_data.get("status") == "resolved_known_issue":
close_notes = f'Known Issue.\n{incident_data.get("closeNotes", "")}'
- closing_entry['Contents']['closeNotes'] = close_notes
- incident_data['closeNotes'] = close_notes
- demisto.debug(f"handle_incoming_closing_incident {incident_id=} {close_notes=}")
+ closing_entry["Contents"]["closeNotes"] = close_notes
+ incident_data["closeNotes"] = close_notes
+ demisto.debug(
+ f"handle_incoming_closing_incident {incident_id=} {close_notes=}"
+ )
return closing_entry
@@ -930,7 +1023,6 @@ def file_details_results(client: Client, args: Dict, add_to_context: bool) -> No
def get_contributing_event_command(client: Client, args: Dict) -> CommandResults:
-
if alert_ids := argToList(args.get('alert_ids')):
alerts = []
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
index f39c91278026..3132393f3017 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
@@ -36,6 +36,20 @@ configuration:
- Incoming
- Outgoing
- Both
+- display: Custom close-reason mapping (XSOAR -> XDR mirrored incident. Overwrites default close-reason mapping defined by Cortex XSOAR)
+ section: Collect
+ additionalinfo: 'Define how to close the mirrored incidents from Cortex XSOAR into Cortex XDR with a custom close reason mapping. Enter a comma-separated list of close reasons (acceptable format {Cortex XSOAR close reason}={Cortex XDR close reason}) to override the default close reason mapping defined by Cortex XSOAR. Note that the mapping must be configured accordingly with the existing close reasons in Cortex XSOAR and Cortex XDR. Not following this format will result in closing the incident with a default close reason. Example: "Resolved=Other,Duplicate=Other". Refer to integration documentation for possible close-reasons - `XDR Incident Mirroring`.'
+ name: custom_xsoar_to_xdr_close_reason_mapping
+ defaultvalue: ''
+ type: 0
+ required: false
+- display: Custom close-reason mapping (XDR -> XSOAR mirrored incident. Overwrites default close-reason mapping defined by Cortex XSOAR)
+ section: Collect
+ additionalinfo: 'Define how to close the mirrored incidents from Cortex XDR into Cortex XSOAR with a custom close reason mapping. Enter a comma-separated list of close reasons (acceptable format {Cortex XDR close reason}={Cortex XSOAR close reason}) to override the default close reason mapping defined by Cortex XSOAR. Note that the mapping must be configured accordingly with the existing close reasons in Cortex XSOAR and Cortex XDR. Not following this format will result in closing the incident with a default close reason. Example: “Known Issue=Resolved, Duplicate Incident=Other". Refer to integration documentation for possible close-reasons - `XDR Incident Mirroring`.'
+ name: custom_xdr_to_xsoar_close_reason_mapping
+ defaultvalue: ''
+ type: 0
+ required: false
- name: url
type: 0
display: 'Server URL (copy URL from XDR)'
@@ -241,7 +255,7 @@ script:
type: String
- contextPath: PaloAltoNetworksXDR.Incident.status
description: |
- Current status of the incident. Valid values are: "new","under_investigation","resolved_known_issue","resolved_duplicate","resolved_false_positive","resolved_true_positive","resolved_security_testing" or "resolved_other".
+ Current status of the incident. Valid values are: "new","under_investigation","resolved_known_issue","resolved_duplicate_incident","resolved_false_positive","resolved_true_positive","resolved_security_testing" or "resolved_other".
type: String
- contextPath: PaloAltoNetworksXDR.Incident.description
description: Dynamic calculated description of the incident.
@@ -3471,7 +3485,7 @@ script:
isArray: true
name: xdr-remove-user-role
description: Remove one or more users from a role.
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.89009
isfetch: true
isfetch:xpanse: false
script: ''
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_description.md b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_description.md
index 7f0c436a6a4b..9bd6257b4199 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_description.md
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_description.md
@@ -13,4 +13,11 @@
2. Click the **Copy URL** button in the top right corner.
---
+
+### Mirroring
+
+**Close-reason default mapping XSOAR -> XDR**: _Other=Other, Duplicate=Duplicate Incident, False Positive=False Positive, Resolved=True Positive_
+
+**Close-reason default mapping XDR -> XSOAR**: _Known Issue=Other, Duplicate Incident=Duplicate, False Positive=False Positive, True Positive=Resolved, Other=Other, Auto=Resolved_
+
[View Integration Documentation](https://xsoar.pan.dev/docs/reference/integrations/cortex-xdr---ir)
\ No newline at end of file
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
index 768cb6220203..38444fdf98ac 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
@@ -5,9 +5,9 @@
from freezegun import freeze_time
import demistomock as demisto
-from CommonServerPython import Common
-from CortexXDRIR import XDR_RESOLVED_STATUS_TO_XSOAR
-
+from CommonServerPython import Common, urljoin, DemistoException
+from CoreIRApiModule import XDR_RESOLVED_STATUS_TO_XSOAR
+from CortexXDRIR import XSOAR_TO_XDR, XDR_TO_XSOAR
XDR_URL = 'https://api.xdrurl.com'
''' HELPER FUNCTIONS '''
@@ -499,7 +499,7 @@ def test_get_remote_data_command_should_update(requests_mock, mocker):
def test_get_remote_data_command_with_rate_limit_exception(mocker):
"""
Given:
- - an XDR client
+ - an XDR client
- arguments (id and lastUpdate time set to a lower than incident modification time)
- a Rate limit exception is raises from get_extra_data_command method
When
@@ -792,3 +792,120 @@ def test_update_remote_system_command(incident_changed, delta):
}
actual_remote_id = update_remote_system_command(client, args)
assert actual_remote_id == expected_remote_id
+
+
+@pytest.mark.parametrize('custom_mapping, expected_resolved_status',
+ [
+ ("Known Issue=Other,Duplicate Incident=Duplicate,False Positive=False Positive,"
+ "True Positive=Resolved,Security Testing=Other,Other=Other",
+ ["Other", "Duplicate", "False Positive", "Resolved", "Other", "Other", "Resolved"]),
+
+ ("Known Issue=Other,Duplicate Incident=Other,False Positive=False Positive,"
+ "True Positive=Resolved,Security Testing=Other,Other=Other",
+ ["Other", "Other", "False Positive", "Resolved", "Other", "Other", "Resolved"]),
+
+ ("Duplicate Incident=Other,Security Testing=Other,Other=Other",
+ ["Other", "Other", "False Positive", "Resolved", "Other", "Other", "Resolved"]),
+
+ # Expecting default mapping to be used when no mapping provided.
+ ("", list(XDR_RESOLVED_STATUS_TO_XSOAR.values())),
+
+ # Expecting default mapping to be used when improper mapping is provided.
+ ("Duplicate=RANDOM1, Other=Random2", list(XDR_RESOLVED_STATUS_TO_XSOAR.values())),
+
+ ("Duplicate Incident=Random3", list(XDR_RESOLVED_STATUS_TO_XSOAR.values())),
+
+ # Expecting default mapping to be used when improper mapping *format* is provided.
+ ("Duplicate Incident=Other False Positive=Other", list(XDR_RESOLVED_STATUS_TO_XSOAR.values())),
+
+ # Expecting default mapping to be used for when improper key-value pair *format* is provided.
+ ("Duplicate Incident=Other, False Positive=Other True Positive=Other",
+ ["Other", "Other", "False Positive", "Resolved", "Security Testing", "Other",
+ "Resolved"]),
+
+ ],
+ ids=["case-1", "case-2", "case-3", "empty-case", "improper-input-case-1", "improper-input-case-2",
+ "improper-input-case-3", "improper-input-case-4"]
+ )
+def test_xdr_to_xsoar_flexible_close_reason_mapping(capfd, mocker, custom_mapping, expected_resolved_status):
+ """
+ Given:
+ - A custom XDR->XSOAR close-reason mapping
+ - Expected resolved XSOAR status according to the custom mapping.
+ When
+ - Handling incoming closing-incident (handle_incoming_closing_incident(...) executed).
+ Then
+ - The resolved XSOAR statuses match the expected statuses for all possible XDR close-reasons.
+ """
+ from CortexXDRIR import handle_incoming_closing_incident
+ mocker.patch.object(demisto, 'params', return_value={"mirror_direction": "Both",
+ "custom_xdr_to_xsoar_close_reason_mapping": custom_mapping})
+
+ all_xdr_close_reasons = XDR_RESOLVED_STATUS_TO_XSOAR.keys()
+
+ for i, xdr_close_reason in enumerate(all_xdr_close_reasons):
+ # Mock an xdr incident with "resolved" status.
+ incident_data = load_test_data('./test_data/resolved_incident_data.json')
+ # Set incident status to be tested close-reason.
+ incident_data["status"] = xdr_close_reason
+
+ # Overcoming expected non-empty stderr test failures (Errors are submitted to stderr when improper mapping is provided).
+ with capfd.disabled():
+ close_entry = handle_incoming_closing_incident(incident_data)
+ assert close_entry["Contents"]["closeReason"] == expected_resolved_status[i]
+
+
+@pytest.mark.parametrize('custom_mapping, direction, should_raise_error',
+ [
+ ("Other=Other,Duplicate=Other,False Positive=False Positive,Resolved=True Positive",
+ XSOAR_TO_XDR, False),
+
+ ("Known Issue=Other,Duplicate Incident=Duplicate,False Positive=False Positive",
+ XDR_TO_XSOAR, False),
+
+ ("Duplicate Incident=Random", XSOAR_TO_XDR, True),
+
+ ("Duplicate=RANDOM1, Other=Random2", XDR_TO_XSOAR, True),
+ # Inverted map provided
+ ("Duplicate=Duplicate Incident", XDR_TO_XSOAR, True),
+ ("Duplicate Incident=Duplicate", XSOAR_TO_XDR, True),
+ # Improper mapping
+ ("Random1, Random2", XDR_TO_XSOAR, True),
+ ("Random1, Random2", XSOAR_TO_XDR, True),
+
+ ],
+ ids=["case-1", "case-2", "case-3", "case-4", "case-5", "case-6", "case-7", "case-8"]
+ )
+def test_test_module(capfd, custom_mapping, direction, should_raise_error):
+ """
+ Given:
+ - mock client with username and api_key (basic auth)
+ When:
+ - run `test_module` function
+ Then:
+ - Ensure no error is raised, and return `ok`
+ """
+ from CortexXDRIR import Client
+
+ # using two different credentials object as they both fields need to be encrypted
+ base_url = urljoin("dummy_url", '/public_api/v1')
+ proxy = demisto.params().get('proxy')
+ verify_cert = not demisto.params().get('insecure', False)
+
+ client = Client(
+ base_url=base_url,
+ proxy=proxy,
+ verify=verify_cert,
+ timeout=120,
+ params=demisto.params()
+ )
+ # Overcoming expected non-empty stderr test failures (Errors are submitted to stderr when improper mapping is provided).
+ with capfd.disabled():
+ if should_raise_error:
+ with pytest.raises(DemistoException):
+ client.validate_custom_mapping(mapping=custom_mapping, direction=direction)
+ else:
+ try:
+ client.validate_custom_mapping(mapping=custom_mapping, direction=direction)
+ except DemistoException as e:
+ pytest.fail(f"Unexpected exception raised for input {input}: {e}")
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/README.md b/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
index 5b062d7d731e..f874137a1a2a 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
@@ -7,26 +7,28 @@ This integration was integrated and tested with version 2.6.5 of Cortex XDR - IR
2. Search for Palo Alto Networks Cortex XDR - Investigation and Response.
3. Click **Add instance** to create and configure a new integration instance.
- | **Parameter** | **Description** | **Required** |
- | - | --- | --- |
- | Fetch incidents | | False |
- | Incident type | | False |
- | Remove legacy incident fields | Unchecked for backwards compatibility, recommended to check. This will remove duplicated incident fields under file_artifacts, network_artifacts, and alerts (like client_id, clientid.) | False |
- | Incident Mirroring Direction | | False |
- | Server URL (copy URL from XDR - click ? to see more info.) | | True |
- | API Key ID | | False |
- | API Key | | False |
- | HTTP Timeout | The timeout of the HTTP requests sent to Cortex XDR API \(in seconds\). | False |
- | Maximum number of incidents per fetch | The maximum number of incidents per fetch. Cannot exceed 100. | False |
- | Only fetch starred incidents | | False |
- | Starred incidents fetch window | Starred fetch window timestamp \(<number> <time unit>, e.g., 12 hours, 7 days\). Fetches only starred incidents within the specified time range. | False |
- | First fetch timestamp (<number> <time unit>, e.g., 12 hours, 7 days) | | False |
- | Sync Incident Owners | For Cortex XSOAR version 6.0.0 and above. If selected, for every incident fetched from Cortex XDR to Cortex XSOAR, the incident owners will be synced. Note that once this value is changed and synchronized between the systems, additional changes will not be reflected. For example, if you change the owner in Cortex XSOAR, the new owner will also be changed in Cortex XDR. However, if you now change the owner back in Cortex XDR, this additional change will not be reflected in Cortex XSOAR. In addition, for this change to be reflected, the owners must exist in both Cortex XSOAR and Cortex XDR. | False |
- | Trust any certificate (not secure) | | False |
- | Use system proxy settings | | False |
- | Prevent Only Mode | Whether the XDR tenant Mode is prevent only | False |
- | Incident Statuses to Fetch | The statuses of the incidents that will be fetched. If no status is provided then incidents of all the statuses will be fetched. Note: An incident whose status was changed to a filtered status after its creation time will not be fetched. | False |
- | Incidents Fetch Interval | | False |
+ | **Parameter** | **Description** | **Required** |
+ |----------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------|
+ | Fetch incidents | | False |
+ | Incident type | | False |
+ | Remove legacy incident fields | Unchecked for backwards compatibility, recommended to check. This will remove duplicated incident fields under file_artifacts, network_artifacts, and alerts (like client_id, clientid.) | False |
+ | Incident Mirroring Direction | | False |
+ | Custom close-reason mapping for mirrored **XSOAR -> XDR** incidents. | Define how to close the mirrored incidents from Cortex XSOAR into XDR with a custom close reason mapping. Enter a comma-separated close-reason mapping (acceptable format {XSOAR close reason}={XDR close reason}) to override the default close reason mapping defined by XSOAR. Note that the mapping must be configured accordingly with the existing close reasons in Cortex XSOAR and XDR. Not following this format will result in closing the incident with a default close reason. Example: "Resolved=Other". Default: "Other=Other,Duplicate=Duplicate Incident,False Positive=False Positive,Resolved=True Positive”. Refer to integration documentation for possible close-reasons (`XDR Incident Mirroring, sec. 7`). | False |
+ | Custom close-reason mapping for mirrored **XDR -> XSOAR** incidents. | Define how to close the mirrored incidents from Cortex XDR into XSOAR with a custom close reason mapping. Enter a comma-separated list of close reasons (acceptable format {XDR close reason}={XSOAR close reason}) to override the default close reason mapping defined by XSOAR. Note that the mapping must be configured accordingly with the existing close reasons in Cortex XSOAR and XDR. Not following this format will result in closing the incident with a default close reason. Example: “Known Issue=Resolved". Default: “Known Issue=Other,Duplicate Incident=Duplicate,False Positive=False Positive,True Positive=Resolved,Security Testing=Other,Other=Other,Auto=Resolved". Refer to integration documentation for possible close-reasons (`XDR Incident Mirroring, sec. 7`). | False |
+ | Server URL (copy URL from XDR - click ? to see more info.) | | True |
+ | API Key ID | | False |
+ | API Key | | False |
+ | HTTP Timeout | The timeout of the HTTP requests sent to Cortex XDR API \(in seconds\). | False |
+ | Maximum number of incidents per fetch | The maximum number of incidents per fetch. Cannot exceed 100. | False |
+ | Only fetch starred incidents | | False |
+ | Starred incidents fetch window | Starred fetch window timestamp \(<number> <time unit>, e.g., 12 hours, 7 days\). Fetches only starred incidents within the specified time range. | False |
+ | First fetch timestamp (<number> <time unit>, e.g., 12 hours, 7 days) | | False |
+ | Sync Incident Owners | For Cortex XSOAR version 6.0.0 and above. If selected, for every incident fetched from Cortex XDR to Cortex XSOAR, the incident owners will be synced. Note that once this value is changed and synchronized between the systems, additional changes will not be reflected. For example, if you change the owner in Cortex XSOAR, the new owner will also be changed in Cortex XDR. However, if you now change the owner back in Cortex XDR, this additional change will not be reflected in Cortex XSOAR. In addition, for this change to be reflected, the owners must exist in both Cortex XSOAR and Cortex XDR. | False |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+ | Prevent Only Mode | Whether the XDR tenant Mode is prevent only | False |
+ | Incident Statuses to Fetch | The statuses of the incidents that will be fetched. If no status is provided then incidents of all the statuses will be fetched. Note: An incident whose status was changed to a filtered status after its creation time will not be fetched. | False |
+ | Incidents Fetch Interval | | False |
4. Click **Test** to validate the URLs, token, and connection.
@@ -52,6 +54,8 @@ For builtin role with less permission but maximum command running abilities, use
1. In your Cortex XDR platform, go to **Settings** > **Configurations** > **API key** page > **API Keys**.
2. Click the **Copy URL** button in the top right corner.
+#### XDR & XSOAR
+
## Playbooks
---
@@ -137,19 +141,44 @@ To setup the mirroring follow these instructions:
4. Under **Mapper (incoming)**, select `XDR - Incoming Mapper`.
5. Under **Mapper (outgoing)**, select `Cortex XDR - Outgoing Mapper`.
6. In the *Incident Mirroring Direction* integration parameter, select in which direction the incidents should be mirrored:
-
-- Incoming - Any changes in XDR incidents will be reflected in XSOAR incidents.
-- Outgoing - Any changes in XSOAR incidents will be reflected in XDR incidents.
-- Both - Changes in XSOAR and XDR incidents will be reflected in both directions.
-- None - Choose this to turn off incident mirroring.
-
-7. Optional: Check the *Sync Incident Owners* integration parameter to sync the incident owners in both XDR and XSOAR.
-
-- Note: This feature will only work if the same users are registered in both Cortex XSOAR and Cortex XDR.
-
-8. Newly fetched incidents will be mirrored in the chosen direction.
-
-- Note: This will not effect existing incidents.
+ - Incoming - Any changes in Cortex XDR incidents will be reflected in Cortex XSOAR incidents.
+ - Outgoing - Any changes in Cortex XSOAR incidents will be reflected in Cortex XDR incidents.
+ - Both - Changes in Cortex XSOAR and Cortex XDR incidents will be reflected in both directions.
+ - None - Choose this to turn off incident mirroring.
+
+7. Optional: Provide a custom close-reason mapping for mirrored XDR <-> XSOAR incidents. Please use only possible close-reasons to map:
+
+ | Possible Closure Reasons for Cortex XSOAR Incident |
+ |----------------------------------------------------|
+ | Resolved |
+ | False Positive |
+ | Duplicate |
+ | Security Testing |
+ | Other |
+
+ |Possible Closure Reasons for Cortex Cortex XDR Incident|
+ |-----------------------------------|
+ | True Positive |
+ | False Positive |
+ | Duplicate Incident |
+ | Security Testing |
+ | Known Issue |
+ | Other |
+ | Auto |
+
+ Failing to use only available values will result in using default mapping of closure reasons within the mirroring process.
+
+ **Close-reason default mapping XSOAR -> XDR**: _Other=Other, Duplicate=Duplicate Incident, False Positive=False Positive, Resolved=True Positive_
+
+ **Close-reason default mapping XDR -> XSOAR**: _Known Issue=Other, Duplicate Incident=Duplicate, False Positive=False Positive, True Positive=Resolved, Other=Other, Auto=Resolved_
+
+8. Optional: Check the *Sync Incident Owners* integration parameter to sync the incident owners in both XDR and XSOAR.
+
+ - Note: This feature will only work if the same users are registered in both Cortex XSOAR and Cortex XDR.
+
+9. Newly fetched incidents will be mirrored in the chosen direction.
+
+ - Note: This will not effect existing incidents.
### XDR Mirroring Notes, limitations and Troubleshooting
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/test_data/resolved_incident_data.json b/Packs/CortexXDR/Integrations/CortexXDRIR/test_data/resolved_incident_data.json
new file mode 100644
index 000000000000..169723ceac0e
--- /dev/null
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/test_data/resolved_incident_data.json
@@ -0,0 +1,360 @@
+{
+ "incident_id": "1",
+ "is_blocked": false,
+ "incident_name": null,
+ "modification_time": 1708940581625,
+ "detection_time": null,
+ "status": "resolved_duplicate_incident",
+ "severity": "medium",
+ "description": " --- ",
+ "assigned_user_mail": "",
+ "assigned_user_pretty_name": "",
+ "alert_count": 1,
+ "low_severity_alert_count": 0,
+ "med_severity_alert_count": 1,
+ "high_severity_alert_count": 0,
+ "critical_severity_alert_count": 0,
+ "user_count": 0,
+ "host_count": 0,
+ "notes": null,
+ "resolve_comment": null,
+ "resolved_timestamp": 1708940581625,
+ "manual_severity": null,
+ "manual_description": null,
+ "xdr_url": "https://demisto.hello.com/incident-view/1",
+ "starred": false,
+ "starred_manually": false,
+ "hosts": null,
+ "users": [],
+ "incident_sources": [
+ "Correlation"
+ ],
+ "rule_based_score": null,
+ "predicted_score": null,
+ "manual_score": null,
+ "aggregated_score": null,
+ "wildfire_hits": 0,
+ "alerts_grouping_status": "Disabled",
+ "mitre_tactics_ids_and_names": null,
+ "mitre_techniques_ids_and_names": null,
+ "alert_categories": [
+ "Other"
+ ],
+ "original_tags": [
+ "DS:Microsoft Graph"
+ ],
+ "tags": [
+ "DS:Microsoft Graph"
+ ],
+ "alerts": [
+ {
+ "agent_os_sub_type": null,
+ "fw_app_category": null,
+ "fw_app_id": null,
+ "fw_app_subcategory": null,
+ "fw_app_technology": null,
+ "category": "Other",
+ "causality_actor_process_command_line": null,
+ "causality_actor_process_image_md5": null,
+ "causality_actor_process_image_name": null,
+ "causality_actor_process_image_path": null,
+ "causality_actor_process_image_sha256": null,
+ "causality_actor_process_signature_status": "N/A",
+ "causality_actor_process_signature_vendor": null,
+ "causality_actor_causality_id": null,
+ "identity_sub_type": null,
+ "identity_type": null,
+ "operation_name": null,
+ "project": null,
+ "cloud_provider": null,
+ "referenced_resource": null,
+ "resource_sub_type": null,
+ "resource_type": null,
+ "cluster_name": null,
+ "container_id": null,
+ "contains_featured_host": "NO",
+ "contains_featured_ip": "NO",
+ "contains_featured_user": "NO",
+ "action_country": "UNKNOWN",
+ "fw_interface_to": null,
+ "dns_query_name": null,
+ "agent_device_domain": null,
+ "fw_email_recipient": null,
+ "fw_email_sender": null,
+ "fw_email_subject": null,
+ "event_type": null,
+ "is_whitelisted": false,
+ "action_file_macro_sha256": null,
+ "action_file_md5": null,
+ "action_file_name": null,
+ "action_file_path": null,
+ "action_file_sha256": null,
+ "fw_device_name": null,
+ "fw_rule_id": null,
+ "fw_rule": null,
+ "fw_serial_number": null,
+ "agent_fqdn": null,
+ "mac": null,
+ "agent_os_type": "NO_HOST",
+ "image_name": null,
+ "actor_process_image_name": null,
+ "actor_process_command_line": null,
+ "actor_process_image_md5": null,
+ "actor_process_image_path": null,
+ "actor_process_os_pid": null,
+ "actor_process_image_sha256": null,
+ "actor_process_signature_status": "N/A",
+ "actor_process_signature_vendor": null,
+ "actor_thread_thread_id": null,
+ "fw_is_phishing": "N/A",
+ "action_local_ip": null,
+ "action_local_port": null,
+ "fw_misc": null,
+ "mitre_tactic_id_and_name": null,
+ "mitre_technique_id_and_name": null,
+ "module_id": null,
+ "fw_vsys": null,
+ "os_actor_process_command_line": null,
+ "os_actor_thread_thread_id": null,
+ "os_actor_process_image_name": null,
+ "os_actor_process_os_pid": null,
+ "os_actor_process_image_sha256": null,
+ "os_actor_process_signature_status": "N/A",
+ "os_actor_process_signature_vendor": null,
+ "os_actor_effective_username": null,
+ "action_process_signature_status": "N/A",
+ "action_process_signature_vendor": null,
+ "action_registry_data": null,
+ "action_registry_full_key": null,
+ "action_external_hostname": null,
+ "action_remote_ip": null,
+ "action_remote_port": null,
+ "matching_service_rule_id": "16",
+ "fw_interface_from": null,
+ "starred": false,
+ "action_process_image_command_line": null,
+ "action_process_image_name": null,
+ "action_process_image_sha256": null,
+ "fw_url_domain": null,
+ "user_agent": null,
+ "fw_xff": null,
+ "external_id": "0dc55b4f-3e15-4380-9efd-706a67b16c34",
+ "severity": "medium",
+ "matching_status": "UNMATCHABLE",
+ "end_match_attempt_ts": null,
+ "local_insert_ts": 1708939170883,
+ "last_modified_ts": null,
+ "bioc_indicator": null,
+ "attempt_counter": 0,
+ "bioc_category_enum_key": null,
+ "case_id": 1,
+ "deduplicate_tokens": null,
+ "filter_rule_id": null,
+ "agent_version": null,
+ "agent_ip_addresses_v6": null,
+ "agent_data_collection_status": null,
+ "agent_is_vdi": null,
+ "agent_install_type": "NA",
+ "agent_host_boot_time": null,
+ "event_sub_type": null,
+ "association_strength": null,
+ "dst_association_strength": null,
+ "story_id": null,
+ "event_id": null,
+ "event_timestamp": null,
+ "actor_process_instance_id": null,
+ "actor_process_causality_id": null,
+ "actor_causality_id": null,
+ "causality_actor_process_execution_time": null,
+ "action_registry_key_name": null,
+ "action_registry_value_name": null,
+ "action_local_ip_v6": null,
+ "action_remote_ip_v6": null,
+ "action_process_instance_id": null,
+ "action_process_causality_id": null,
+ "os_actor_process_instance_id": null,
+ "os_actor_process_image_path": null,
+ "os_actor_process_causality_id": null,
+ "os_actor_causality_id": null,
+ "dst_agent_id": null,
+ "dst_causality_actor_process_execution_time": null,
+ "dst_action_external_hostname": null,
+ "dst_action_country": null,
+ "dst_action_external_port": null,
+ "is_pcap": false,
+ "image_id": null,
+ "container_name": null,
+ "namespace": null,
+ "alert_type": "Unclassified",
+ "resolution_status": "STATUS_010_NEW",
+ "resolution_comment": null,
+ "dynamic_fields": null,
+ "tags": "DS:Microsoft Graph",
+ "malicious_urls": null,
+ "alert_id": "1",
+ "detection_timestamp": 1708939168584,
+ "name": "DLP policy (Custom policy) matched for email with subject (Splunk Report: High Or Critical Priority Host With Malware - 15 min) - dlcc6c6fab-0202-be70-c800-08dc36ab8c3e",
+ "endpoint_id": null,
+ "description": "EMPTY",
+ "host_ip": null,
+ "host_name": null,
+ "source": "Correlation",
+ "action": "DETECTED",
+ "action_pretty": "Detected",
+ "user_name": null,
+ "events_length": 1,
+ "original_tags": "DS:Microsoft Graph",
+ "host_ip_list": [],
+ "agentossubtype": null,
+ "fwappcategory": null,
+ "fwappid": null,
+ "fwappsubcategory": null,
+ "fwapptechnology": null,
+ "causalityactorprocesscommandline": null,
+ "causalityactorprocessimagemd5": null,
+ "causalityactorprocessimagename": null,
+ "causalityactorprocessimagepath": null,
+ "causalityactorprocessimagesha256": null,
+ "causalityactorprocesssignaturestatus": "N/A",
+ "causalityactorprocesssignaturevendor": null,
+ "causalityactorcausalityid": null,
+ "identitysubtype": null,
+ "identitytype": null,
+ "operationname": null,
+ "cloudprovider": null,
+ "referencedresource": null,
+ "resourcesubtype": null,
+ "resourcetype": null,
+ "clustername": null,
+ "containerid": null,
+ "containsfeaturedhost": "NO",
+ "containsfeaturedip": "NO",
+ "containsfeatureduser": "NO",
+ "actioncountry": "UNKNOWN",
+ "fwinterfaceto": null,
+ "dnsqueryname": null,
+ "agentdevicedomain": null,
+ "fwemailrecipient": null,
+ "fwemailsender": null,
+ "fwemailsubject": null,
+ "eventtype": null,
+ "iswhitelisted": false,
+ "actionfilemacrosha256": null,
+ "actionfilemd5": null,
+ "actionfilename": null,
+ "actionfilepath": null,
+ "actionfilesha256": null,
+ "fwdevicename": null,
+ "fwruleid": null,
+ "fwrule": null,
+ "fwserialnumber": null,
+ "agentfqdn": null,
+ "agentostype": "NO_HOST",
+ "imagename": null,
+ "actorprocessimagename": null,
+ "actorprocesscommandline": null,
+ "actorprocessimagemd5": null,
+ "actorprocessimagepath": null,
+ "actorprocessospid": null,
+ "actorprocessimagesha256": null,
+ "actorprocesssignaturestatus": "N/A",
+ "actorprocesssignaturevendor": null,
+ "actorthreadthreadid": null,
+ "fwisphishing": "N/A",
+ "actionlocalip": null,
+ "actionlocalport": null,
+ "fwmisc": null,
+ "mitretacticidandname": null,
+ "mitretechniqueidandname": null,
+ "moduleid": null,
+ "fwvsys": null,
+ "osactorprocesscommandline": null,
+ "osactorthreadthreadid": null,
+ "osactorprocessimagename": null,
+ "osactorprocessospid": null,
+ "osactorprocessimagesha256": null,
+ "osactorprocesssignaturestatus": "N/A",
+ "osactorprocesssignaturevendor": null,
+ "osactoreffectiveusername": null,
+ "actionprocesssignaturestatus": "N/A",
+ "actionprocesssignaturevendor": null,
+ "actionregistrydata": null,
+ "actionregistryfullkey": null,
+ "actionexternalhostname": null,
+ "actionremoteip": null,
+ "actionremoteport": null,
+ "matchingserviceruleid": "16",
+ "fwinterfacefrom": null,
+ "actionprocessimagecommandline": null,
+ "actionprocessimagename": null,
+ "actionprocessimagesha256": null,
+ "fwurldomain": null,
+ "useragent": null,
+ "fwxff": null,
+ "externalid": "0dc55b4f-3e15-4380-9efd-706a67b16c34",
+ "matchingstatus": "UNMATCHABLE",
+ "endmatchattemptts": null,
+ "localinsertts": 1708939170883,
+ "lastmodifiedts": null,
+ "biocindicator": null,
+ "attemptcounter": 0,
+ "bioccategoryenumkey": null,
+ "caseid": 1,
+ "deduplicatetokens": null,
+ "filterruleid": null,
+ "agentversion": null,
+ "agentipaddressesv6": null,
+ "agentdatacollectionstatus": null,
+ "agentisvdi": null,
+ "agentinstalltype": "NA",
+ "agenthostboottime": null,
+ "eventsubtype": null,
+ "associationstrength": null,
+ "dstassociationstrength": null,
+ "storyid": null,
+ "eventid": null,
+ "eventtimestamp": null,
+ "actorprocessinstanceid": null,
+ "actorprocesscausalityid": null,
+ "actorcausalityid": null,
+ "causalityactorprocessexecutiontime": null,
+ "actionregistrykeyname": null,
+ "actionregistryvaluename": null,
+ "actionlocalipv6": null,
+ "actionremoteipv6": null,
+ "actionprocessinstanceid": null,
+ "actionprocesscausalityid": null,
+ "osactorprocessinstanceid": null,
+ "osactorprocessimagepath": null,
+ "osactorprocesscausalityid": null,
+ "osactorcausalityid": null,
+ "dstagentid": null,
+ "dstcausalityactorprocessexecutiontime": null,
+ "dstactionexternalhostname": null,
+ "dstactioncountry": null,
+ "dstactionexternalport": null,
+ "ispcap": false,
+ "imageid": null,
+ "containername": null,
+ "alerttype": "Unclassified",
+ "resolutionstatus": "STATUS_010_NEW",
+ "resolutioncomment": null,
+ "dynamicfields": null,
+ "maliciousurls": null,
+ "alertid": "1",
+ "detectiontimestamp": 1708939168584,
+ "endpointid": null,
+ "hostip": null,
+ "hostname": null,
+ "actionpretty": "Detected",
+ "username": null,
+ "eventslength": 1,
+ "originaltags": "DS:Microsoft Graph",
+ "hostiplist": []
+ }
+ ],
+ "file_artifacts": [],
+ "network_artifacts": [],
+ "id": "1",
+ "owner": ""
+}
\ No newline at end of file
diff --git a/Packs/CortexXDR/Integrations/XDR_iocs/README.md b/Packs/CortexXDR/Integrations/XDR_iocs/README.md
index 58eba1c28ce8..8240a5943995 100644
--- a/Packs/CortexXDR/Integrations/XDR_iocs/README.md
+++ b/Packs/CortexXDR/Integrations/XDR_iocs/README.md
@@ -165,7 +165,10 @@ Creates the sync file for the manual process. Run this command when instructed b
`xdr-iocs-create-sync-file`
#### Input
-There are no input arguments for this command.
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| zip | Whether to zip the output file. | Required |
+| set_time | Whether to modify the sync time locally. | Required |
#### Context Output
diff --git a/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs.py b/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs.py
index bad8a24d0f72..f3d2b5a7345a 100644
--- a/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs.py
+++ b/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs.py
@@ -253,7 +253,44 @@ def demisto_types_to_xdr(_type: str) -> str:
return xdr_type
-def _parse_demisto_comments(ioc: dict, comment_field_name: str, comments_as_tags: bool) -> list[str] | None:
+def _parse_demisto_comments(ioc: dict, comment_field_name: list[str] | str, comments_as_tags: bool) -> list[Any] | None:
+ """"
+ Parsing xsoar fields to xdr from multiple fields value or a single value.
+ Args:
+ ioc (dict): the IOC dict.
+ comment_field_name (list[str] | str): the name of the comment field(s) to parse.
+ comments_as_tags (bool): whether to return comments as XDR tags rather than notes.
+
+ Returns:
+ A list with the parsed comment(s) joined by commas if multiple comment fields were provided,
+ otherwise the parsed comment from the single provided field.
+ Returns None if no comments were found.
+ """
+ # parse comments from multiple fields if specified as list
+ if isinstance(comment_field_name, list):
+ comments = []
+ for field in comment_field_name:
+ parsing = parse_demisto_single_comments(ioc, field, comments_as_tags)
+ if parsing:
+ comments.extend(parsing)
+ return [', '.join(comments)]
+
+ # else return single field
+ return parse_demisto_single_comments(ioc, comment_field_name, comments_as_tags)
+
+
+def parse_demisto_single_comments(ioc: dict, comment_field_name: list[str] | str, comments_as_tags: bool) -> list[str] | None:
+ """"
+ Parsing xsoar fields to xdr from a single value.
+ Args:
+ ioc (dict): the IOC dict.
+ comment_field_name (list[str] | str): the name of the comment field(s) to parse.
+ comments_as_tags (bool): whether to return comments as XDR tags rather than notes.
+
+ Returns:
+ The parsed comment from the single provided field.
+ Returns None if no comments were found.
+ """
if comment_field_name == 'comments':
if comments_as_tags:
raise DemistoException("When specifying comments_as_tags=True, the xsoar_comment_field cannot be `comments`)."
@@ -267,6 +304,12 @@ def _parse_demisto_comments(ioc: dict, comment_field_name: str, comments_as_tags
return None
return [comment]
+ elif comment_field_name == 'indicator_link':
+ # parse indicator link into comments field
+ if is_xsoar_saas():
+ return [f'{demisto.demistoUrls().get("server")}/indicator/{ioc.get("id")}']
+ return [f'{demisto.demistoUrls().get("server")}/#/indicator/{ioc.get("id")}']
+
else: # custom comments field
if not (raw_comment := ioc.get('CustomFields', {}).get(comment_field_name)):
return None
@@ -291,8 +334,9 @@ def demisto_ioc_to_xdr(ioc: dict) -> dict:
xdr_ioc['reliability'] = aggregated_reliability[0]
if vendors := demisto_vendors_to_xdr(ioc.get('moduleToFeedMap', {})):
xdr_ioc['vendors'] = vendors
- if (comment := _parse_demisto_comments(ioc=ioc, comment_field_name=Client.xsoar_comments_field,
- comments_as_tags=Client.comments_as_tags)):
+
+ if comment := _parse_demisto_comments(ioc=ioc, comment_field_name=Client.xsoar_comments_field,
+ comments_as_tags=Client.comments_as_tags):
xdr_ioc['comment'] = comment
custom_fields = ioc.get('CustomFields', {})
diff --git a/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs.yml b/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs.yml
index 2558c2572afb..984e2e0ebd3b 100644
--- a/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs.yml
+++ b/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs.yml
@@ -135,10 +135,15 @@ configuration:
defaultvalue: 'sourceoriginalseverity'
required: false
- defaultvalue: 'comments'
- additionalinfo: The Cortex XSOAR field where comments are stored. Default is `comments`. Expecting a Cortex XSOAR IOC format of a comment (nested dictionary). Specifying a different field name assumes the field has a list of strings to be used as comments, or a comma-separated string.
- display: XSOAR Comment Field
+ additionalinfo: 'The Cortex XSOAR field where comments are stored and being export to XDR. Default is `comments`. Expecting a Cortex XSOAR IOC format of a comment (nested dictionary).'
+ display: XSOAR Comment Field Exporting To XDR
name: xsoar_comments_field
- type: 0
+ options:
+ - indicator_link
+ - comments
+ type: 16
+ section: Collect
+ advanced: true
required: false
- defaultvalue: 'false'
additionalinfo: Whether to consider the value at `xsoar_comments_field` as CSV. Requires specifying a xsoar_comments_field value different than the default `comments`.
@@ -215,7 +220,7 @@ script:
required: true
description: Disables IOCs in the XDR server.
name: xdr-iocs-disable
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.89009
feed: true
runonce: false
script: '-'
diff --git a/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs_test.py b/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs_test.py
index 2627308a1cc1..c4afc860e5ec 100644
--- a/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs_test.py
+++ b/Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs_test.py
@@ -1040,3 +1040,61 @@ def test_set_new_iocs_to_keep_time(random_int, expected_next_time, mocker):
set_integration_context_mock = mocker.patch.object(demisto, 'setIntegrationContext')
set_new_iocs_to_keep_time()
set_integration_context_mock.assert_called_once_with({'next_iocs_to_keep_time': expected_next_time})
+
+
+def test_parse_demisto_comments_url_xsoar_6_default(mocker):
+ """
+ Given:
+ - xsoar version 6, a custom field name, and comma-separated comments in it
+ When:
+ - parsing a comment of the url indicator field
+ Then:
+ - check the output values
+ """
+ from XDR_iocs import _parse_demisto_comments
+ inc_id = '111111'
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'url'})
+ assert _parse_demisto_comments(
+ ioc={'id': inc_id},
+ comment_field_name='indicator_link',
+ comments_as_tags=False
+ ) == [f'url/#/indicator/{inc_id}']
+
+
+def test_parse_demisto_comments_url_xsoar_8_default(mocker):
+ """
+ Given:
+ - xsoar version that is greater than 8, a custom field name, and comma-separated comments in it
+ When:
+ - parsing a comment of the url indicator field
+ Then:
+ - check the output values
+ """
+ import XDR_iocs
+ os.environ['CRTX_HTTP_PROXY'] = 'xsoar_8_proxy'
+ inc_id = '111111'
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'url'})
+ mocker.patch.object(XDR_iocs, 'is_xsoar_saas', return_value=True)
+ assert XDR_iocs._parse_demisto_comments(
+ ioc={'id': inc_id},
+ comment_field_name='indicator_link',
+ comments_as_tags=False
+ ) == [f'url/indicator/{inc_id}']
+
+
+def test_parse_demisto_list_of_comments_default(mocker):
+ """
+ Given a custom field name, and comma-separated comments in it
+ When parsing a comment of the url indicator field
+ Then check the output values
+ """
+ from XDR_iocs import _parse_demisto_comments
+ inc_id = '111111'
+ comment_value = 'here be comment'
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'url'})
+ assert _parse_demisto_comments(
+ ioc={Client.xsoar_comments_field: [{'type': 'IndicatorCommentRegular', 'content': comment_value}],
+ 'id': inc_id},
+ comment_field_name=['indicator_link', Client.xsoar_comments_field],
+ comments_as_tags=False
+ ) == [f'url/#/indicator/{inc_id}, {comment_value}']
diff --git a/Packs/CortexXDR/Layouts/layoutscontainer-Cortex_XDR_Incident.json b/Packs/CortexXDR/Layouts/layoutscontainer-Cortex_XDR_Incident.json
index 444b57b68205..5ba21044621f 100644
--- a/Packs/CortexXDR/Layouts/layoutscontainer-Cortex_XDR_Incident.json
+++ b/Packs/CortexXDR/Layouts/layoutscontainer-Cortex_XDR_Incident.json
@@ -1343,6 +1343,400 @@
],
"type": "custom"
},
+{
+ "filters": [
+ [
+ {
+ "id": 0,
+ "ignoreCase": false,
+ "left": {
+ "isContext": true,
+ "value": {
+ "simple": "alerttags"
+ }
+ },
+ "operator": "containsString",
+ "right": {
+ "isContext": false,
+ "value": {
+ "simple": "DT:Identity Analytics"
+ }
+ },
+ "type": "shortText"
+ }
+ ]
+ ],
+ "hidden": false,
+ "id": "wmtowidpmp",
+ "name": "Identity Analytics",
+ "sections": [
+ {
+ "displayType": "CARD",
+ "h": 2,
+ "hideName": false,
+ "i": "wmtowidpmp-bf27ef60-bdc0-11ee-a451-7b39f6b1cb95",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "numberoffoundrelatedalerts",
+ "height": 53,
+ "id": "12f10ff0-bdc1-11ee-a451-7b39f6b1cb95",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "failedlogonevents",
+ "height": 53,
+ "id": "18745900-bdc1-11ee-a451-7b39f6b1cb95",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "userrisklevel",
+ "height": 53,
+ "id": "3d79a2a0-bdc6-11ee-81b8-1dd092d8ebaf",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Investigation Results",
+ "static": false,
+ "w": 1,
+ "x": 2,
+ "y": 0
+ },
+ {
+ "h": 2,
+ "i": "wmtowidpmp-c1842ee0-bdc0-11ee-a451-7b39f6b1cb95",
+ "items": [],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Verdict",
+ "query": "VerdictResult",
+ "queryType": "script",
+ "static": false,
+ "type": "dynamic",
+ "w": 1,
+ "x": 1,
+ "y": 0
+ },
+ {
+ "displayType": "ROW",
+ "h": 3,
+ "hideName": false,
+ "i": "wmtowidpmp-452c1a00-bdc1-11ee-a451-7b39f6b1cb95",
+ "items": [
+ {
+ "endCol": 6,
+ "fieldId": "xdralertsearchresults",
+ "height": 106,
+ "id": "4b98ffc0-bdc1-11ee-a451-7b39f6b1cb95",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "FOUND RELATED ALERTS",
+ "static": false,
+ "w": 3,
+ "x": 0,
+ "y": 4
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "wmtowidpmp-79b11410-bdc1-11ee-a451-7b39f6b1cb95",
+ "items": [
+ {
+ "args": {},
+ "buttonClass": "error",
+ "endCol": 2,
+ "fieldId": "",
+ "height": 44,
+ "hexColor": null,
+ "id": "99d3be40-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 0,
+ "name": "Clear User's Sessions - Okta ",
+ "scriptId": "Okta v2|||okta-clear-user-sessions",
+ "sectionItemType": "button",
+ "startCol": 0
+ },
+ {
+ "args": {},
+ "buttonClass": "error",
+ "endCol": 2,
+ "fieldId": "",
+ "height": 44,
+ "hexColor": null,
+ "id": "9d567c60-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 1,
+ "name": "Revoke User's Sessions - Azure",
+ "scriptId": "Microsoft Graph User|||msgraph-user-session-revoke",
+ "sectionItemType": "button",
+ "startCol": 0
+ },
+ {
+ "args": {},
+ "buttonClass": "error",
+ "endCol": 2,
+ "fieldId": "",
+ "height": 44,
+ "hexColor": null,
+ "id": "ddbdbf80-d4a0-11ee-a1be-156b316886a1",
+ "index": 2,
+ "name": "Reset Password - Active Directory",
+ "scriptId": "Active Directory Query v2|||ad-set-new-password",
+ "sectionItemType": "button",
+ "startCol": 0
+ },
+ {
+ "args": {},
+ "buttonClass": "error",
+ "endCol": 2,
+ "fieldId": "",
+ "height": 44,
+ "hexColor": null,
+ "id": "9eb80830-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 3,
+ "name": "Disable Account",
+ "scriptId": "DisableUserWrapper",
+ "sectionItemType": "button",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Response Actions",
+ "static": false,
+ "w": 1,
+ "x": 2,
+ "y": 2
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "wmtowidpmp-a767f590-bdc1-11ee-a451-7b39f6b1cb95",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "username",
+ "height": 26,
+ "id": "30f34b20-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "accountstatus",
+ "height": 26,
+ "id": "37d783c0-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "email",
+ "height": 26,
+ "id": "3d0580e0-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "accountmemberof",
+ "height": 26,
+ "id": "41c8ae40-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "cloudaccountid",
+ "height": 26,
+ "id": "4738bd70-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "accountid",
+ "height": 26,
+ "id": "4c778370-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 5,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "manageremailaddress",
+ "height": 26,
+ "id": "535e5420-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 6,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Identity Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 2
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "wmtowidpmp-a9ad51b0-bdc1-11ee-a451-7b39f6b1cb95",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "sourceip",
+ "height": 26,
+ "id": "0dc9ad10-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "asn",
+ "height": 26,
+ "id": "120ef7e0-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "countrycode",
+ "height": 26,
+ "id": "1b4efad0-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "ipreputation",
+ "height": 26,
+ "id": "2020f540-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Connection Details",
+ "static": false,
+ "w": 1,
+ "x": 1,
+ "y": 2
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "wmtowidpmp-ac89e4c0-bdc1-11ee-a451-7b39f6b1cb95",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "alertname",
+ "height": 26,
+ "id": "6334e4e0-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xdrdescription",
+ "height": 26,
+ "id": "67cae7c0-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "detecteduser",
+ "height": 26,
+ "id": "27211850-bdd8-11ee-81b8-1dd092d8ebaf",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "detectedips",
+ "height": 26,
+ "id": "2b5e73e0-bdd8-11ee-81b8-1dd092d8ebaf",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "occurred",
+ "height": 26,
+ "id": "8cfc63c0-bdc2-11ee-a451-7b39f6b1cb95",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "alerttags",
+ "height": 26,
+ "id": "8e352020-bde9-11ee-b6e4-23c51981518e",
+ "index": 5,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Alert Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 0
+ }
+ ],
+ "type": "custom"
+ },
{
"filters": [
[
@@ -2085,6 +2479,238 @@
],
"type": "custom"
},
+ {
+ "filters": [
+ [
+ {
+ "ignoreCase": false,
+ "left": {
+ "isContext": true,
+ "value": {
+ "simple": "xdralertname"
+ }
+ },
+ "operator": "containsGeneral",
+ "right": {
+ "isContext": false,
+ "value": {
+ "simple": "Large Upload"
+ }
+ },
+ "type": "multiSelect"
+ }
+ ]
+ ],
+ "hidden": false,
+ "id": "vycnvdgk6l",
+ "name": "Large Upload",
+ "sections": [
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "1vduzkpmlh-6774ac10-dfdd-11ee-a718-2d25454b6746",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "alertname",
+ "height": 22,
+ "id": "7fc88c00-dfdd-11ee-a718-2d25454b6746",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xdrdescription",
+ "height": 22,
+ "id": "8640e370-dfdd-11ee-a718-2d25454b6746",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "occurred",
+ "height": 22,
+ "id": "bb2f26a0-dfdd-11ee-a718-2d25454b6746",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "hostname",
+ "height": 22,
+ "id": "f9cac5d0-dfe3-11ee-a718-2d25454b6746",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "xdrusers",
+ "height": 22,
+ "id": "17b59de0-dfe4-11ee-a718-2d25454b6746",
+ "index": 4,
+ "listId": "1vduzkpmlh-6774ac10-dfdd-11ee-a718-2d25454b6746",
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Alert Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 0
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "1vduzkpmlh-469b1ee0-dfe5-11ee-a718-2d25454b6746",
+ "items": [
+ {
+ "endCol": 6,
+ "fieldId": "xdrnetworkartifacts",
+ "height": 106,
+ "id": "11883550-dfe8-11ee-a718-2d25454b6746",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Network Artifacts",
+ "static": false,
+ "w": 3,
+ "x": 0,
+ "y": 2
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "1vduzkpmlh-8a36af70-e071-11ee-9f1d-c7e880f68802",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "verdict",
+ "height": 22,
+ "id": "534d3880-e071-11ee-9f1d-c7e880f68802",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "partofcampaign",
+ "height": 22,
+ "id": "a2f3d880-e071-11ee-9f1d-c7e880f68802",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "relatedreport",
+ "height": 22,
+ "id": "ab7c7250-e071-11ee-9f1d-c7e880f68802",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "commandline",
+ "height": 22,
+ "id": "7ae9b4c0-dfe7-11ee-a718-2d25454b6746",
+ "index": 3,
+ "listId": "1vduzkpmlh-fc683030-dfe7-11ee-a718-2d25454b6746",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "commandlineverdict",
+ "height": 22,
+ "id": "c51aa040-dfe7-11ee-a718-2d25454b6746",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "threathuntingdetectedhostnames",
+ "height": 22,
+ "id": "f14e00a0-e148-11ee-9a3d-4f29cba1e42e",
+ "index": 5,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "threathuntingdetectedip",
+ "height": 22,
+ "id": "f4749500-e148-11ee-9a3d-4f29cba1e42e",
+ "index": 6,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Investigation Details",
+ "static": false,
+ "w": 1,
+ "x": 1,
+ "y": 0
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "1vduzkpmlh-ea6fb620-e071-11ee-9f1d-c7e880f68802",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "containmentsla",
+ "height": 22,
+ "id": "955e68b0-e072-11ee-9f1d-c7e880f68802",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "remediationsla",
+ "height": 22,
+ "id": "a8baebe0-e072-11ee-9f1d-c7e880f68802",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Containment and Remediation Status",
+ "static": false,
+ "w": 1,
+ "x": 2,
+ "y": 0
+ }
+ ],
+ "type": "custom"
+ },
{
"hidden": false,
"id": "lthdv5gwt9",
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Large_Upload.yml b/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Large_Upload.yml
new file mode 100644
index 000000000000..c297ca567bda
--- /dev/null
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Large_Upload.yml
@@ -0,0 +1,4016 @@
+id: Cortex XDR - Large Upload
+version: -1
+name: Cortex XDR - Large Upload
+description: "The playbook investigates Cortex XDR incidents involving large upload alerts. The playbook is designed to run as a sub-playbook of ‘Cortex XDR Alerts Handling v2’. \n\nThe playbook consists of the following procedures:\n- Searches for similar previous incidents that were closed as false positives.\n- Enrichment and investigation of the initiator and destination hostname and IP address.\n- Enrichment and investigation of the initiator user, process, file, or command if it exists.\n- Detection of related indicators and analysis of the relationship between the detected indicators.\n- Utilize the detected indicators to conduct threat hunting.\n- Blocks detected malicious indicators.\n- Endpoint isolation.\n\nThis playbook supports the following Cortex XDR alert names:\n- Large Upload (Generic)\n- Large Upload (SMTP)\n- Large Upload (FTP)\n- Large Upload (HTTPS)"
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: b5bf1dbd-3948-4df9-81a7-71ae2fc80581
+ type: start
+ task:
+ id: b5bf1dbd-3948-4df9-81a7-71ae2fc80581
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2410,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 6581026b-5e6e-46cb-836a-1d850269f0ed
+ type: title
+ task:
+ id: 6581026b-5e6e-46cb-836a-1d850269f0ed
+ version: -1
+ name: Check Previous Similar Incidents
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "4"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2410,
+ "y": 100
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: 2864f683-d804-43bb-8946-4d90039fc751
+ type: condition
+ task:
+ id: 2864f683-d804-43bb-8946-4d90039fc751
+ version: -1
+ name: Found Results?
+ description: Determine if previous false positive incidents have been detected with similar characteristics.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "113"
+ "yes":
+ - "5"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: DBotFindSimilarIncidents
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ - - operator: containsGeneral
+ left:
+ value:
+ complex:
+ root: DBotFindSimilarIncidents.similarIncident
+ accessor: closeReason
+ transformers:
+ - operator: uniq
+ iscontext: true
+ right:
+ value:
+ simple: Fale Positive
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2410,
+ "y": 390
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: d44093f6-2027-42d7-8815-ffe1ca89dbce
+ type: regular
+ task:
+ id: d44093f6-2027-42d7-8815-ffe1ca89dbce
+ version: -1
+ name: Get Similar False-Positives Closed Incidents
+ description: Find past similar incidents based on incident fields' similarity. Includes an option to also display indicators similarity.
+ scriptName: DBotFindSimilarIncidents
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "3"
+ scriptarguments:
+ fieldExactMatch:
+ simple: incident.type
+ fieldsToDisplay:
+ simple: closeReason,closeNotes
+ fromDate:
+ simple: 2 months ago
+ minimunIncidentSimilarity:
+ simple: "0.8"
+ query:
+ simple: incident.closeReason:"False Positive"
+ similarCategoricalField:
+ simple: incident.xdrfileartifacts.filesha256,incident.xdralerts.hostip
+ similarTextField:
+ simple: incident.xdralerts.osactorprocesscommandline,incident.xdralerts.actorprocesscommandline,incident.xdralerts.actionprocessimagecommandline,incident.xdralerts.causalityactorprocesscommandline,incident.xdralerts.host_name,incident.xdralerts.user_name
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2410,
+ "y": 230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "5":
+ id: "5"
+ taskid: 18898ae5-33f6-44cf-8618-e93112dcb2b3
+ type: condition
+ task:
+ id: 18898ae5-33f6-44cf-8618-e93112dcb2b3
+ version: -1
+ name: Investigation Should be Continued?
+ description: Determines whether an alert requires further investigation if similar previous false positive incidents were found.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "58"
+ "yes":
+ - "7"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: inputs.FurtherInvestigation
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3040,
+ "y": 560
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: fa5be6a6-a64a-4e1e-8a29-02ffbf4a3920
+ type: condition
+ task:
+ id: fa5be6a6-a64a-4e1e-8a29-02ffbf4a3920
+ version: -1
+ name: Alert Investigation Should Continue?
+ description: |-
+ The playbook detected similar incidents that were closed as false positives previously.
+
+ Review the incident details and similar closed false positive incidents to determine whether this Cortex XDR alert should be further investigated.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "58"
+ "Yes":
+ - "113"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2830,
+ "y": 730
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: 7dd46caa-5159-4e41-8c8c-2bbd79f540ab
+ type: title
+ task:
+ id: 7dd46caa-5159-4e41-8c8c-2bbd79f540ab
+ version: -1
+ name: Analysis
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "84"
+ - "85"
+ - "107"
+ - "82"
+ - "88"
+ - "109"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 1515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: db25c15f-7de2-4500-8a6f-0e6059d52255
+ type: title
+ task:
+ id: db25c15f-7de2-4500-8a6f-0e6059d52255
+ version: -1
+ name: Containment
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "15"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 2010
+ }
+ }
+ note: false
+ timertriggers:
+ - fieldname: containmentsla
+ action: start
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "14":
+ id: "14"
+ taskid: 54decb4b-024a-43f6-81de-8e9ffc1da41f
+ type: condition
+ task:
+ id: 54decb4b-024a-43f6-81de-8e9ffc1da41f
+ version: -1
+ name: Early containment enabled?
+ description: Checks whether early containment is enabled for this playbook before executing containment.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "17"
+ "yes":
+ - "16"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: inputs.EarlyContainment
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1360,
+ "y": 2310
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "15":
+ id: "15"
+ taskid: a9ae6eb8-74ee-4c68-8126-f911e37bd370
+ type: condition
+ task:
+ id: a9ae6eb8-74ee-4c68-8126-f911e37bd370
+ version: -1
+ name: Malicious Indicators Found?
+ description: Checks whether there are any malicious indicators to block.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "17"
+ "yes":
+ - "14"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: Domain
+ ignorecase: true
+ accessor: Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - operator: greaterThanOrEqual
+ left:
+ value:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: File
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: Hash
+ ignorecase: true
+ accessor: Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 2140
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "16":
+ id: "16"
+ taskid: aadafb00-7daa-4149-82de-c0b89edd8faf
+ type: playbook
+ task:
+ id: aadafb00-7daa-4149-82de-c0b89edd8faf
+ version: -1
+ name: Block Indicators - Generic v3
+ description: |-
+ This playbook blocks malicious indicators using all integrations that are enabled, using the following sub-playbooks:
+
+ - Block URL - Generic v2
+ - Block Account - Generic v2
+ - Block IP - Generic v3
+ - Block File - Generic v2
+ - Block Email - Generic v2
+ - Block Domain - Generic v2.
+ playbookName: Block Indicators - Generic v3
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "17"
+ scriptarguments:
+ AutoBlockIndicators:
+ complex:
+ root: inputs.AutoBlockIndicators
+ AutoCommit:
+ simple: "No"
+ CustomBlockRule:
+ simple: "True"
+ CustomURLCategory:
+ simple: XSOAR Remediation - Malicious URLs
+ DomainToBlock:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: domain
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ accessor: Indicator
+ transformers:
+ - operator: uniq
+ InputEnrichment:
+ simple: "False"
+ RuleDirection:
+ simple: outbound
+ RuleName:
+ simple: XSOAR - Block Indicators playbook - ${incident.id}
+ SHA256:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: stringHasLength
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: "64"
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: file
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: hash
+ accessor: Indicator
+ transformers:
+ - operator: uniq
+ UserVerification:
+ complex:
+ root: inputs.BlockIndicators_UserVerification
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 1360,
+ "y": 2480
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "17":
+ id: "17"
+ taskid: 47ae1efd-7b1b-4044-885e-172be32b8fab
+ type: title
+ task:
+ id: 47ae1efd-7b1b-4044-885e-172be32b8fab
+ version: -1
+ name: Investigation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "22"
+ - "24"
+ - "18"
+ - "19"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 2655
+ }
+ }
+ note: false
+ timertriggers:
+ - fieldname: containmentsla
+ action: stop
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "18":
+ id: "18"
+ taskid: fb72c0dc-c54b-4097-8a82-608e23e1eb46
+ type: playbook
+ task:
+ id: fb72c0dc-c54b-4097-8a82-608e23e1eb46
+ version: -1
+ name: Cortex XDR - Endpoint Investigation
+ description: "This playbook is part of the 'Malware Investigation And Response' pack. For more information, refer to https://xsoar.pan.dev/docs/reference/packs/malware-investigation-and-response. This playbook handles all the endpoint investigation actions available with Cortex XSOAR, including the following tasks:\n * Pre-defined MITRE Tactics\n * Host fields (Host ID)\n * Attacker fields (Attacker IP, External host)\n * MITRE techniques\n * File hash (currently, the playbook supports only SHA256) \n\n Note: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
+ playbookName: Cortex XDR - Endpoint Investigation
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "32"
+ scriptarguments:
+ FileSHA256:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: os_actor_process_image_sha256
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_image_sha256
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.causality_actor_process_image_sha256
+ iscontext: true
+ - operator: uniq
+ HuntAttacker:
+ simple: "True"
+ HuntByFile:
+ simple: "True"
+ HuntByHost:
+ simple: "True"
+ HuntCnCTechniques:
+ simple: "True"
+ HuntCollectionTechniques:
+ simple: "True"
+ HuntDefenseEvasionTechniques:
+ simple: "True"
+ HuntDiscoveryTechniques:
+ simple: "True"
+ HuntExecutionTechniques:
+ simple: "True"
+ HuntImpactTechniques:
+ simple: "True"
+ HuntInitialAccessTechniques:
+ simple: "True"
+ HuntLateralMovementTechniques:
+ simple: "True"
+ HuntPersistenceTechniques:
+ simple: "True"
+ HuntPrivilegeEscalationTechniques:
+ simple: "True"
+ HuntReconnaissanceTechniques:
+ simple: "True"
+ RunAll:
+ simple: "True"
+ agentID:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: agent_id
+ transformers:
+ - operator: uniq
+ attackerExternalHost:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: Domain
+ ignorecase: true
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "2"
+ accessor: Indicator
+ transformers:
+ - operator: uniq
+ attackerRemoteIP:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: IP
+ ignorecase: true
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ accessor: Indicator
+ transformers:
+ - operator: uniq
+ timeRange:
+ simple: 2 hours ago
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 1820,
+ "y": 2800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "19":
+ id: "19"
+ taskid: 98db4730-e7e9-4d8e-8722-1b2fe9704f3d
+ type: playbook
+ task:
+ id: 98db4730-e7e9-4d8e-8722-1b2fe9704f3d
+ version: -1
+ name: TIM - Indicator Relationships Analysis
+ description: |-
+ This playbook is designed to assist with a security investigation by providing an analysis of indicator relationships. The following information is included:
+ - Indicators of compromise (IOCs) related to the investigation.
+ - Attack patterns related to the investigation.
+ - Campaigns related to the investigation.
+ - IOCs associated with the identified campaigns.
+ - Reports containing details on the identified campaigns.
+ playbookName: TIM - Indicator Relationships Analysis
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "115"
+ scriptarguments:
+ Indicator:
+ complex:
+ root: DBotScore
+ accessor: Indicator
+ transformers:
+ - operator: uniq
+ LimitResults:
+ simple: "200"
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 2800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "20":
+ id: "20"
+ taskid: 09d0928b-9634-43c9-8b72-377241818a67
+ type: playbook
+ task:
+ id: 09d0928b-9634-43c9-8b72-377241818a67
+ version: -1
+ name: User Investigation - Generic
+ description: |-
+ This playbook performs an investigation on a specific user, using queries and logs from SIEM, Identity management systems, XDR, and firewalls.
+
+ Supported Integrations:
+ -Okta
+ -Splunk
+ -QRadar
+ -Azure Log Analytics
+ -PAN-OS
+ -XDR / Core By Palo Alto Networks.
+ playbookName: User Investigation - Generic
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "32"
+ scriptarguments:
+ AzureSearchTime:
+ simple: ago(1d)
+ OktaSearch:
+ simple: "True"
+ QRadarSearchTime:
+ simple: Last 1 days
+ SIEMFailedLogonSearch:
+ simple: "True"
+ SplunkEarliestTime:
+ simple: -1d
+ SplunkIndex:
+ simple: '*'
+ SplunkLatestTime:
+ simple: now
+ ThreatLogSearch:
+ simple: "True"
+ Username:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_primary_normalized_user
+ accessor: identity
+ transformers:
+ - operator: uniq
+ XDRAlertSearch:
+ simple: "True"
+ XDRUsernameField:
+ simple: actor_effective_username
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 1390,
+ "y": 2975
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "21":
+ id: "21"
+ taskid: 020df797-a905-4c06-8b81-7cd6479666cf
+ type: playbook
+ task:
+ id: 020df797-a905-4c06-8b81-7cd6479666cf
+ version: -1
+ name: Command-Line Analysis
+ description: "This playbook takes a command line from the alert and performs the following actions:\n- Checks for base64 string and decodes if exists\n- Extracts and enriches indicators from the command line\n- Checks specific arguments for malicious usage \n\nAt the end of the playbook, it sets a possible verdict for the command line, based on the finding:\n1. Indicators found in the command line\n2. Found AMSI techniques\n3. Found suspicious parameters\n4. Usage of malicious tools\n5. Indication of network activity\n6. Indication of suspicious LOLBIN execution\n\nNote: In case you are wishing to run this playbook with a list of command lines, set this playbook to be running in a loop. To do so, navigate to the 'Loop' and check \"For Each Input\"."
+ playbookName: Command-Line Analysis
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "25"
+ scriptarguments:
+ Commandline:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: os_actor_process_command_line
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_command_line
+ iscontext: true
+ - operator: uniq
+ StringSimilarityThreshold:
+ simple: "0.5"
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ forEach: true
+ view: |-
+ {
+ "position": {
+ "x": 2640,
+ "y": 2975
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "22":
+ id: "22"
+ taskid: ea9acbd4-6456-49e2-895f-a2622b8bdcd2
+ type: condition
+ task:
+ id: ea9acbd4-6456-49e2-895f-a2622b8bdcd2
+ version: -1
+ name: Has User to Investigate?
+ description: |
+ Checks whether an initiator username is available for investigation.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "32"
+ "yes":
+ - "20"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_primary_normalized_user
+ accessor: identity
+ transformers:
+ - operator: uniq
+ iscontext: true
+ right:
+ value: {}
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1390,
+ "y": 2800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "24":
+ id: "24"
+ taskid: ca264388-0ba3-4fa7-8595-11b0997bb461
+ type: condition
+ task:
+ id: ca264388-0ba3-4fa7-8595-11b0997bb461
+ version: -1
+ name: Has Command-Line to Investigate?
+ description: |
+ Checks whether an initiator command-line is available for investigation.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "32"
+ "yes":
+ - "21"
+ - "70"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: actor_process_command_line
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.os_actor_process_command_line
+ iscontext: true
+ iscontext: true
+ right:
+ value: {}
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 2800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "25":
+ id: "25"
+ taskid: 9e426f5e-e46a-4d43-8ce9-ae2db84c200f
+ type: condition
+ task:
+ id: 9e426f5e-e46a-4d43-8ce9-ae2db84c200f
+ version: -1
+ name: Has Command-Line Investigation Results?
+ description: Checks whether the command-line investigation produced any results.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "32"
+ "yes":
+ - "27"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: Findings
+ iscontext: true
+ right:
+ value: {}
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: CommandlineVerdict
+ accessor: suspiciousParameters
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: CommandlineVerdict
+ accessor: AMSI
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: CommandlineVerdict
+ accessor: foundIndicators
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: CommandlineVerdict
+ accessor: maliciousTools
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: CommandlineVerdict
+ accessor: networkActivity
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: CommandlineVerdict
+ accessor: SuspiciousLolbinExecution
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2440,
+ "y": 3150
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "26":
+ id: "26"
+ taskid: 356a8af0-8b4b-4621-8ad8-3879d083cc0c
+ type: title
+ task:
+ id: 356a8af0-8b4b-4621-8ad8-3879d083cc0c
+ version: -1
+ name: Investigation - Related Indicators Hunt
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "28"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1190,
+ "y": 4100
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "27":
+ id: "27"
+ taskid: 8b072891-87d4-4174-850f-885bc502da8e
+ type: regular
+ task:
+ id: 8b072891-87d4-4174-850f-885bc502da8e
+ version: -1
+ name: Set Malicious Command-Line
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "32"
+ scriptarguments:
+ key:
+ simple: Investigation.CMDReputation
+ value:
+ simple: Malicious
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2440,
+ "y": 3330
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ fieldMapping:
+ - incidentfield: Command Line
+ output:
+ simple: ${commandline.original}
+ - incidentfield: Command Line Verdict
+ output:
+ simple: Malicious
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "28":
+ id: "28"
+ taskid: 1f86ac51-6b86-4e74-8906-17a8b741d724
+ type: condition
+ task:
+ id: 1f86ac51-6b86-4e74-8906-17a8b741d724
+ version: -1
+ name: Found Related Indicators to Hunt?
+ description: Checks whether there are any indicators to hunt for.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "54"
+ "yes":
+ - "29"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: RelatedFiles
+ iscontext: true
+ right:
+ value: {}
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: RelatedDomains
+ iscontext: true
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: RelatedIPs
+ iscontext: true
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: RelatedURLs
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1190,
+ "y": 4230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "29":
+ id: "29"
+ taskid: db9478b5-07bc-4c3a-81ba-8dbcabdd41a3
+ type: playbook
+ task:
+ id: db9478b5-07bc-4c3a-81ba-8dbcabdd41a3
+ version: -1
+ name: Threat Hunting - Generic
+ description: "This playbook enables threat hunting for IOCs in your enterprise. It currently supports the following integrations: \n- Splunk\n- Qradar\n- Pan-os \n- Cortex data lake \n- Autofocus\n- Microsoft 365 Defender"
+ playbookName: Threat Hunting - Generic
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "30"
+ scriptarguments:
+ IPAddress:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: IP
+ ignorecase: true
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ accessor: Indicator
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedIPs
+ iscontext: true
+ - operator: uniq
+ InternalRange:
+ complex:
+ root: inputs.InternalIPRanges
+
+ MD5:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: file
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: hash
+ ignorecase: true
+ - - operator: stringHasLength
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: "32"
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ accessor: Indicator
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedFiles
+ iscontext: true
+ - operator: uniq
+ QRadarTimeFrame:
+ simple: LAST 7 DAYS
+ SHA1:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: file
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: hash
+ ignorecase: true
+ - - operator: stringHasLength
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: "40"
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ accessor: Indicator
+ transformers:
+ - operator: uniq
+ SHA256:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: file
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: hash
+ ignorecase: true
+ - - operator: stringHasLength
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: "64"
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ accessor: Indicator
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedFiles
+ iscontext: true
+ - operator: uniq
+ SplunkEarliestTime:
+ simple: -7d@d
+ SplunkLatestTime:
+ simple: now
+ URLDomain:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: domain
+ ignorecase: true
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ accessor: Indicator
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedDomains
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedURLs
+ iscontext: true
+ - operator: uniq
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 4400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "30":
+ id: "30"
+ taskid: 9175d975-efa8-4e09-81db-71841a30971d
+ type: condition
+ task:
+ id: 9175d975-efa8-4e09-81db-71841a30971d
+ version: -1
+ name: Has Hunt Results?
+ description: Verifies whether there are any threat hunting results.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "54"
+ "yes":
+ - "31"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: Splunk
+ iscontext: true
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: PANWHunting
+ iscontext: true
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: QRadar
+ iscontext: true
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: Microsoft365Defender
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 4560
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "31":
+ id: "31"
+ taskid: 2e63f433-ae84-44b4-8841-d88df9f0cc34
+ type: regular
+ task:
+ id: 2e63f433-ae84-44b4-8841-d88df9f0cc34
+ version: -1
+ name: Set Hunt Results True
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "54"
+ scriptarguments:
+ key:
+ simple: Investigation.HasHuntResults
+ value:
+ simple: "True"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 4720
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ fieldMapping:
+ - incidentfield: Threat Hunting Detected IP
+ output:
+ complex:
+ root: PANWHunting
+ accessor: DetectedExternalIPs
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: QRadar.DetectedExternalIPs
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: Splunk.DetectedExternalIPs
+ iscontext: true
+ - operator: uniq
+ - incidentfield: Threat Hunting Detected Hostnames
+ output:
+ complex:
+ root: Splunk
+ accessor: DetectedExternalHosts
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PANWHunting.DetectedExternalHosts
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: QRadar.DetectedExternalHosts
+ iscontext: true
+ - operator: uniq
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "32":
+ id: "32"
+ taskid: 71311a37-ae3f-4522-8429-e68eec247c05
+ type: title
+ task:
+ id: 71311a37-ae3f-4522-8429-e68eec247c05
+ version: -1
+ name: Set Verdict
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "112"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 3500
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "47":
+ id: "47"
+ taskid: 502ceed3-151e-4d5b-8087-cc7c2be1e586
+ type: regular
+ task:
+ id: 502ceed3-151e-4d5b-8087-cc7c2be1e586
+ version: -1
+ name: Set Incident Verdict - Malicious
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setIncident
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "26"
+ scriptarguments:
+ verdict:
+ simple: Malicious
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1190,
+ "y": 3940
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "54":
+ id: "54"
+ taskid: d03b3054-01fb-4e5f-852c-a9629b377f21
+ type: title
+ task:
+ id: d03b3054-01fb-4e5f-852c-a9629b377f21
+ version: -1
+ name: Remediation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "57"
+ - "55"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1190,
+ "y": 4890
+ }
+ }
+ note: false
+ timertriggers:
+ - fieldname: remediationsla
+ action: start
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "55":
+ id: "55"
+ taskid: 58c8104c-b6a6-43bc-86aa-bc717bac9e3f
+ type: condition
+ task:
+ id: 58c8104c-b6a6-43bc-86aa-bc717bac9e3f
+ version: -1
+ name: Endpoint auto-isolation enabled?
+ description: Determine whether the endpoint should be isolated automatically.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "58"
+ "yes":
+ - "56"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: inputs.AutoIsolateEndpoint
+ iscontext: true
+ right:
+ value:
+ simple: "true"
+ ignorecase: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: agent_id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1390,
+ "y": 5035
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "56":
+ id: "56"
+ taskid: 6171569a-753b-4669-840f-1929e5f5ad53
+ type: playbook
+ task:
+ id: 6171569a-753b-4669-840f-1929e5f5ad53
+ version: -1
+ name: Cortex XDR - Isolate Endpoint
+ description: This playbook accepts an XDR endpoint ID and isolates it using the 'Palo Alto Networks Cortex XDR - Investigation and Response' integration.
+ playbookName: Cortex XDR - Isolate Endpoint
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "58"
+ scriptarguments:
+ endpoint_id:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: agent_id
+ transformers:
+ - operator: uniq
+ - operator: SetIfEmpty
+ args:
+ applyIfEmpty:
+ value:
+ simple: "true"
+ defaultValue:
+ value:
+ simple: Missing endpoint ID.Answers.0
+ iscontext: true
+ hostname:
+ complex:
+ root: inputs.SrcHostname
+ transformers:
+ - operator: uniq
+ ip_list:
+ complex:
+ root: inputs.SrcIPAddress
+ transformers:
+ - operator: uniq
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 1390,
+ "y": 5210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "57":
+ id: "57"
+ taskid: 79d0e021-14ff-46d0-8b86-7cbec40eedcb
+ type: playbook
+ task:
+ id: 79d0e021-14ff-46d0-8b86-7cbec40eedcb
+ version: -1
+ name: Block Indicators - Generic v3
+ description: |-
+ This playbook blocks malicious indicators using all integrations that are enabled, using the following sub-playbooks:
+
+ - Block URL - Generic v2
+ - Block Account - Generic v2
+ - Block IP - Generic v3
+ - Block File - Generic v2
+ - Block Email - Generic v2
+ - Block Domain - Generic v2.
+ playbookName: Block Indicators - Generic v3
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "58"
+ scriptarguments:
+ AutoBlockIndicators:
+ complex:
+ root: inputs.AutoBlockIndicators
+ AutoCommit:
+ simple: "No"
+ CustomBlockRule:
+ simple: "True"
+ CustomURLCategory:
+ simple: XSOAR Remediation - Malicious URLs
+ DomainToBlock:
+ complex:
+ root: DBotScore.Indicator
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: domain
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - - operator: notIn
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: IndicatorsToBlock
+ iscontext: true
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedDomains
+ iscontext: true
+ - operator: uniq
+ FilesToBlock:
+ complex:
+ root: DBotScore.Indicator
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: file
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - - operator: notIn
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: IndicatorsToBlock
+ iscontext: true
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedFiles
+ iscontext: true
+ - operator: uniq
+ InputEnrichment:
+ simple: "False"
+ MD5:
+ complex:
+ root: DBotScore.Indicator
+ filters:
+ - - operator: stringHasLength
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: "32"
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: file
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: hash
+ - - operator: notIn
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: IndicatorsToBlock
+ iscontext: true
+ transformers:
+ - operator: uniq
+ RuleDirection:
+ simple: outbound
+ RuleName:
+ simple: XSOAR - Block Indicators playbook - ${incident.id}
+ SHA256:
+ complex:
+ root: DBotScore.Indicator
+ filters:
+ - - operator: stringHasLength
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: "64"
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: file
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: hash
+ - - operator: notIn
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: IndicatorsToBlock
+ iscontext: true
+ transformers:
+ - operator: uniq
+ URL:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: url
+ ignorecase: true
+ - - operator: greaterThanOrEqual
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ accessor: Indicator
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedURLs
+ iscontext: true
+ - operator: uniq
+ UserVerification:
+ complex:
+ root: inputs.BlockIndicators_UserVerification
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 990,
+ "y": 5035
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "58":
+ id: "58"
+ taskid: 4b5714fa-0f03-4f05-8365-26ce106482cd
+ type: title
+ task:
+ id: 4b5714fa-0f03-4f05-8365-26ce106482cd
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3040,
+ "y": 5390
+ }
+ }
+ note: false
+ timertriggers:
+ - fieldname: remediationsla
+ action: stop
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "67":
+ id: "67"
+ taskid: 7b676cac-c8f7-4624-85d1-947158cbb4c8
+ type: title
+ task:
+ id: 7b676cac-c8f7-4624-85d1-947158cbb4c8
+ version: -1
+ name: Suspicious
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "76"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 3810
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "68":
+ id: "68"
+ taskid: 6ec36124-7137-4ad0-8235-3ca3e923c8b0
+ type: title
+ task:
+ id: 6ec36124-7137-4ad0-8235-3ca3e923c8b0
+ version: -1
+ name: Malicious
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "47"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1190,
+ "y": 3810
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "69":
+ id: "69"
+ taskid: d0ce00b0-ef5e-4e11-810e-616da8500cd8
+ type: regular
+ task:
+ id: d0ce00b0-ef5e-4e11-810e-616da8500cd8
+ version: -1
+ name: Get Alert Extra Data
+ description: Returns information about each alert ID.
+ script: '|||xdr-get-cloud-original-alerts'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "114"
+ scriptarguments:
+ alert_ids:
+ complex:
+ root: inputs.Alert_ID
+ transformers:
+ - operator: uniq
+ filter_alert_fields:
+ simple: "false"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 1190
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "70":
+ id: "70"
+ taskid: f5330d0d-0165-4d04-814c-241a7cc190fd
+ type: playbook
+ task:
+ id: f5330d0d-0165-4d04-814c-241a7cc190fd
+ version: -1
+ name: Cortex XDR - Search and Compare Process Executions - XDR Alerts
+ description: |-
+ This playbook is a generic playbook that receives a process name and command-line argument. It uses the "Cortex XDR IR" integration to search for the given process executions inside XDR alerts and compares the command-line argument from the results to the command-line argument received from the playbook input.
+
+ Note: Under the "Processes" input, the playbook should receive an array that contains the following keys:
+ - value: *process name*
+ - commands: *command-line arguments*.
+ playbookName: Cortex XDR - Search and Compare Process Executions - XDR Alerts
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "25"
+ scriptarguments:
+ HuntingTimeFrame:
+ simple: 7 Days
+ Processes:
+ simple: |-
+ - value: ${PaloAltoNetworksXDR.OriginalAlert._all_events.causality_actor_process_image_name},${PaloAltoNetworksXDR.OriginalAlert._all_events.os_actor_process_image_name},${PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_image_name}
+ - commands: ${PaloAltoNetworksXDR.OriginalAlert._all_events.os_actor_process_command_line},${PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_command_line}
+ SearchXDRAlerts:
+ simple: "True"
+ StringSimilarityThreshold:
+ simple: "0.5"
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 2975
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "76":
+ id: "76"
+ taskid: afadc78f-a5bb-4f1c-83e5-fdd043431cca
+ type: condition
+ task:
+ id: afadc78f-a5bb-4f1c-83e5-fdd043431cca
+ version: -1
+ name: Manual Investigation
+ description: "Upon review, the playbook did not identify any obvious malicious indicators associated with the alert. However, there are some noteworthy observations that require manual investigation. \n\n**Review the following alert details and determine whether remediation is necessary:**\n1. Verify if previous false positive incidents with similar characteristics have been detected.\n2. Review the additional alerts within the incident, if any exist.\n3. Examine the additional alerts retrieved by the sub-playbooks associated with the initiator account, similar command line, or initiator host.\n4. Check the FW application ID to ensure it is a known and authorized application within the organization (if the activity and application are valid, add the ID to the 'FWApps_Processes_Whitlist' playbook input to avoid false positives).\n5. Check the initiator process to ensure it is a known and authorized process within the organization (if the activity and process are valid, add the process name to the 'FWApps_Processes_Whitlist' playbook input to avoid false positives).\n6. Analyze the command line analysis results and any additional alerts with similar initiator command lines retrieved from the 'Cortex XDR - Search and Compare Process Executions - XDR Alerts' sub-playbook.\n7. Determine whether the user is aware of the upload activity and what caused the massive upload that triggered the alert.\n8. Verify whether the uploaded files contain sensitive information."
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "58"
+ Remediation:
+ - "54"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 3940
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "78":
+ id: "78"
+ taskid: aeec3291-f750-4743-8442-77286d075ff8
+ type: condition
+ task:
+ id: aeec3291-f750-4743-8442-77286d075ff8
+ version: -1
+ name: Check Uploaded Data Volume
+ description: Determines if the amount of data uploaded to an external host exceeds the defined threshold amount.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "58"
+ More Than Threshold:
+ - "69"
+ separatecontext: false
+ conditions:
+ - label: More Than Threshold
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ accessor: description
+ transformers:
+ - operator: RegexGroups
+ args:
+ flags: {}
+ groups:
+ value:
+ simple: "2"
+ keys: {}
+ regex:
+ value:
+ simple: uploaded\s(.*(MB|GB|TB))
+ iscontext: true
+ right:
+ value:
+ simple: GB
+ ignorecase: true
+ - operator: greaterThanOrEqual
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ accessor: description
+ transformers:
+ - operator: RegexGroups
+ args:
+ flags: {}
+ groups:
+ value:
+ simple: "1"
+ keys: {}
+ regex:
+ value:
+ simple: uploaded\s(.*(MB|GB|TB))
+ - operator: StripChars
+ args:
+ chars:
+ value:
+ simple: MB
+ - operator: SumList
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.Transferred_Data _Threshold
+ iscontext: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ accessor: description
+ transformers:
+ - operator: RegexGroups
+ args:
+ flags: {}
+ groups:
+ value:
+ simple: "2"
+ keys: {}
+ regex:
+ value:
+ simple: uploaded\s(.*(MB|GB|TB))
+ iscontext: true
+ right:
+ value:
+ simple: TB
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 1030
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "82":
+ id: "82"
+ taskid: 3e44628e-0e46-4b23-8d7a-a71a8d1f10ae
+ type: regular
+ task:
+ id: 3e44628e-0e46-4b23-8d7a-a71a8d1f10ae
+ version: -1
+ name: Set Additional Alerts to Context
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ key:
+ simple: Analysis.AdditionalAlerts
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert.alert_name
+ filters:
+ - - operator: containsGeneral
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.product
+ iscontext: true
+ right:
+ value:
+ simple: XDR Agent
+ ignorecase: true
+ - - operator: notContainsGeneral
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert.alert_name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload
+ ignorecase: true
+ transformers:
+ - operator: uniq
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 810,
+ "y": 1660
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "84":
+ id: "84"
+ taskid: 92baac59-9fa0-4f3e-8cd4-66c2a8888c5a
+ type: condition
+ task:
+ id: 92baac59-9fa0-4f3e-8cd4-66c2a8888c5a
+ version: -1
+ name: An Unusual FW App ID was used?
+ description: |
+ Checks whether an unusual FW App ID was used.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "13"
+ "yes":
+ - "86"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: notIn
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: fw_app_id
+ transformers:
+ - operator: split
+ args:
+ delimiter:
+ value:
+ simple: ','
+ - operator: uniq
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.FWApps_Processes_Whitlist
+ transformers:
+ - operator: split
+ args:
+ delimiter:
+ value:
+ simple: ','
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2010,
+ "y": 1660
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "85":
+ id: "85"
+ taskid: b90f9ff3-e42c-46d5-8a6b-e1821aef60ea
+ type: condition
+ task:
+ id: b90f9ff3-e42c-46d5-8a6b-e1821aef60ea
+ version: -1
+ name: An Unusual Process was used?
+ description: |
+ Checks whether an unusual process was used.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "13"
+ "yes":
+ - "87"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: notIn
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: causality_actor_process_image_name
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.os_actor_process_image_name
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_image_name
+ iscontext: true
+ - operator: split
+ args:
+ delimiter:
+ value:
+ simple: ','
+ - operator: uniq
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.FWApps_Processes_Whitlist
+ transformers:
+ - operator: split
+ args:
+ delimiter:
+ value:
+ simple: ','
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2410,
+ "y": 1660
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "86":
+ id: "86"
+ taskid: 33eb43af-a049-47ff-8e56-1889a4989955
+ type: regular
+ task:
+ id: 33eb43af-a049-47ff-8e56-1889a4989955
+ version: -1
+ name: Set Unusual FW App ID
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ key:
+ simple: Analysis.Unusual_FW_App_ID
+ value:
+ simple: "True"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2010,
+ "y": 1835
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "87":
+ id: "87"
+ taskid: d77a3b93-6f7d-4846-8bf0-f6573602a4e1
+ type: regular
+ task:
+ id: d77a3b93-6f7d-4846-8bf0-f6573602a4e1
+ version: -1
+ name: Set Unusual Process
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ key:
+ simple: Analysis.Unusual_Process
+ value:
+ simple: "True"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2410,
+ "y": 1835
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "88":
+ id: "88"
+ taskid: e7cb2076-4c50-44e7-81d5-51cc712011e6
+ type: condition
+ task:
+ id: e7cb2076-4c50-44e7-81d5-51cc712011e6
+ version: -1
+ name: Unsigned Initiator Process?
+ description: Checks whether the initiator process is unsigned.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "13"
+ "yes":
+ - "89"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: causality_actor_process_signature_status
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.os_actor_process_signature_status
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_signature_status
+ iscontext: true
+ - operator: uniq
+ iscontext: true
+ right:
+ value:
+ simple: SIGNATURE_SIGNED
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2810,
+ "y": 1660
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "89":
+ id: "89"
+ taskid: 0a482c04-e216-4e2f-8ed5-311b81f56264
+ type: regular
+ task:
+ id: 0a482c04-e216-4e2f-8ed5-311b81f56264
+ version: -1
+ name: Set Unsigned Process
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ key:
+ simple: Analysis.Unsigned_Process
+ value:
+ simple: "True"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2810,
+ "y": 1835
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "103":
+ id: "103"
+ taskid: b6812984-343e-4a30-8382-b711218f9a6b
+ type: title
+ task:
+ id: b6812984-343e-4a30-8382-b711218f9a6b
+ version: -1
+ name: Benign
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "58"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2040,
+ "y": 3810
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "107":
+ id: "107"
+ taskid: 2bc52b3e-5e56-44c6-8f9e-b32015f195ab
+ type: regular
+ task:
+ id: 2bc52b3e-5e56-44c6-8f9e-b32015f195ab
+ version: -1
+ name: Check Whether The Source Host Is an FTP Server
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ key:
+ simple: Analysis.FTP_Server
+ stringify:
+ simple: "true"
+ value:
+ complex:
+ root: ActiveDirectory.Computers
+ filters:
+ - - operator: match
+ left:
+ value:
+ simple: ActiveDirectory.Computers.memberOf
+ iscontext: true
+ right:
+ value:
+ simple: (?i)CN=.*(?:FTP|File).*,
+ accessor: name
+ transformers:
+ - operator: uniq
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1210,
+ "y": 1660
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "109":
+ id: "109"
+ taskid: 96de7dc0-b937-4303-8c9d-9ce3ef043a53
+ type: regular
+ task:
+ id: 96de7dc0-b937-4303-8c9d-9ce3ef043a53
+ version: -1
+ name: Check Whether The Source Host Is an SMTP Server
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ key:
+ simple: Analysis.SMTP_Server
+ stringify:
+ simple: "true"
+ value:
+ complex:
+ root: ActiveDirectory.Computers
+ filters:
+ - - operator: match
+ left:
+ value:
+ simple: ActiveDirectory.Computers.memberOf
+ iscontext: true
+ right:
+ value:
+ simple: (?i)CN=.*(?:SMTP|Mail).*,
+ accessor: name
+ transformers:
+ - operator: uniq
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 1660
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "112":
+ id: "112"
+ taskid: 2e32afa9-757a-4aa0-83b1-2736da8d3bc0
+ type: condition
+ task:
+ id: 2e32afa9-757a-4aa0-83b1-2736da8d3bc0
+ version: -1
+ name: Calculate Verdict
+ description: Estimate the verdict for the 'large upload HTTPS' Cortex XDR alerts.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "67"
+ Benign:
+ - "103"
+ Malicious:
+ - "68"
+ separatecontext: false
+ conditions:
+ - label: Malicious
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: Investigation
+ accessor: CMDReputation
+ iscontext: true
+ right:
+ value:
+ simple: Malicious
+ ignorecase: true
+ - operator: greaterThanOrEqual
+ left:
+ value:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.dst_action_external_hostname
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.ssl_req_chello_sni_sample
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.target_address
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.domain
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.action_external_hostname
+ iscontext: true
+ ignorecase: true
+ accessor: Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - operator: greaterThanOrEqual
+ left:
+ value:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.os_actor_process_image_sha256
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_image_sha256
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.causality_actor_process_image_sha256
+ iscontext: true
+ ignorecase: true
+ accessor: Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: RelatedAttackPatterns
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedCampaign
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedFiles
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedDomains
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedIPs
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: RelatedURLs
+ iscontext: true
+ iscontext: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.RiskyUser
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.RiskyUser.id
+ iscontext: true
+ right:
+ value:
+ simple: inputs.Username
+ iscontext: true
+ ignorecase: true
+ accessor: risk_level
+ iscontext: true
+ right:
+ value:
+ simple: High
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.RiskyHost
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.RiskyHost.id
+ iscontext: true
+ right:
+ value:
+ simple: inputs.SrcHostname
+ iscontext: true
+ ignorecase: true
+ accessor: 'risk_level '
+ iscontext: true
+ right:
+ value:
+ simple: High
+ ignorecase: true
+ - operator: greaterThanOrEqual
+ left:
+ value:
+ complex:
+ root: NumOfSiemFailedLogon
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR
+ accessor: Alert
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: Splunk.Result
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: QRadar.Search.Result
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: SuspiciousUserAgent
+ iscontext: true
+ iscontext: true
+ - operator: greaterThanOrEqual
+ left:
+ value:
+ complex:
+ root: NumOfThreatLogs
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - operator: greaterThanOrEqual
+ left:
+ value:
+ complex:
+ root: NumOfOktaSuspiciousActivities
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ - label: Benign
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert.alert_name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload (HTTPS)
+ ignorecase: true
+ accessor: raw_abioc.event.is_src_host_dns_server
+ transformers:
+ - operator: uniq
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert.alert_name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload (HTTPS)
+ ignorecase: true
+ accessor: raw_abioc.event.is_src_host_internet_facing_server
+ iscontext: true
+ right:
+ value:
+ simple: "true"
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert.alert_name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload (HTTPS)
+ ignorecase: true
+ accessor: raw_abioc.event.is_src_host_http_server
+ iscontext: true
+ right:
+ value:
+ simple: "true"
+ ignorecase: true
+ - operator: IsInCidrRanges
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert
+ filters:
+ - - operator: isEqualNumber
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.target_port
+ iscontext: true
+ right:
+ value:
+ simple: "22"
+ - operator: isEqualNumber
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.action_remote_port
+ iscontext: true
+ right:
+ value:
+ simple: "22"
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert.alert_name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload (Generic)
+ ignorecase: true
+ accessor: _all_events.target_ip
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.action_remote_ip
+ iscontext: true
+ - operator: uniq
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.InternalIPRanges
+ iscontext: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: inList
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.host_name
+ iscontext: true
+ right:
+ value:
+ simple: Analysis.FTP_Server
+ iscontext: true
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload (FTP)
+ ignorecase: true
+ accessor: host_name
+ transformers:
+ - operator: uniq
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Endpoint
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Endpoint.endpoint_type
+ iscontext: true
+ right:
+ value:
+ simple: AGENT_TYPE_SERVER
+ ignorecase: true
+ accessor: endpoint_name
+ transformers:
+ - operator: uniq
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: inList
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.host_name
+ iscontext: true
+ right:
+ value:
+ simple: Analysis.SMTP_Server
+ iscontext: true
+ ignorecase: true
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload (SMTP)
+ ignorecase: true
+ accessor: host_name
+ transformers:
+ - operator: uniq
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Endpoint
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Endpoint.endpoint_type
+ iscontext: true
+ right:
+ value:
+ simple: AGENT_TYPE_SERVER
+ ignorecase: true
+ accessor: endpoint_name
+ transformers:
+ - operator: uniq
+ iscontext: true
+ ignorecase: true
+ - - operator: lessThanOrEqual
+ left:
+ value:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.dst_action_external_hostname
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.ssl_req_chello_sni_sample
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.target_address
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.domain
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.action_external_hostname
+ iscontext: true
+ ignorecase: true
+ accessor: Score
+ iscontext: true
+ right:
+ value:
+ simple: "1"
+ - operator: isEmpty
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: dst_action_external_hostname
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.ssl_req_chello_sni_sample
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.target_address
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.domain
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.action_external_hostname
+ iscontext: true
+ iscontext: true
+ - - operator: lessThanOrEqual
+ left:
+ value:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.os_actor_process_image_sha256
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_image_sha256
+ iscontext: true
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.causality_actor_process_image_sha256
+ iscontext: true
+ ignorecase: true
+ accessor: Score
+ iscontext: true
+ right:
+ value:
+ simple: "1"
+ - operator: isEmpty
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: os_actor_process_image_sha256
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_image_sha256
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.causality_actor_process_image_sha256
+ iscontext: true
+ iscontext: true
+ - - operator: isNotEqualString
+ left:
+ value:
+ complex:
+ root: Investigation
+ accessor: CMDReputation
+ iscontext: true
+ right:
+ value:
+ simple: Malicious
+ ignorecase: true
+ - - operator: isNotExists
+ left:
+ value:
+ complex:
+ root: Analysis
+ accessor: AdditionalAlerts
+ iscontext: true
+ - - operator: isNotExists
+ left:
+ value:
+ complex:
+ root: Analysis
+ accessor: Unusual_FW_App_ID
+ iscontext: true
+ - - operator: isNotExists
+ left:
+ value:
+ complex:
+ root: Analysis
+ accessor: Unusual_Process
+ iscontext: true
+ - - operator: isNotExists
+ left:
+ value:
+ complex:
+ root: Analysis
+ accessor: Unsigned_Process
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 3630
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "113":
+ id: "113"
+ taskid: 35fcb41b-ade3-48c0-83b2-6192904cb323
+ type: title
+ task:
+ id: 35fcb41b-ade3-48c0-83b2-6192904cb323
+ version: -1
+ name: Enrichment
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "78"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 900
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "114":
+ id: "114"
+ taskid: 1a69ce4f-fcbe-40e7-8164-c4d8dc304ac4
+ type: playbook
+ task:
+ id: 1a69ce4f-fcbe-40e7-8164-c4d8dc304ac4
+ version: -1
+ name: Entity Enrichment - Generic v3
+ description: Enrich entities using one or more integrations.
+ playbookName: Entity Enrichment - Generic v3
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ AccountDomain:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events.causality_actor_primary_normalized_user
+ accessor: domain
+ transformers:
+ - operator: uniq
+ Domain:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: dst_action_external_hostname
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.ssl_req_chello_sni_sample
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.target_address
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.domain
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.action_external_hostname
+ iscontext: true
+ - operator: uniq
+ Hostname:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ accessor: host_name
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.agent_hostname
+ iscontext: true
+ - operator: uniq
+ IP:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: action_local_ip
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.target_ip
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.action_remote_ip
+ iscontext: true
+ - operator: uniq
+ InternalRange:
+ complex:
+ root: inputs.InternalIPRanges
+ ResolveIP:
+ simple: "True"
+ SHA256:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events
+ accessor: os_actor_process_image_sha256
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.actor_process_image_sha256
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXDR.OriginalAlert._all_events.causality_actor_process_image_sha256
+ iscontext: true
+ - operator: uniq
+ URLSSLVerification:
+ simple: "False"
+ UseReputationCommand:
+ simple: "True"
+ Username:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert._all_events.causality_actor_primary_normalized_user
+ accessor: username
+ transformers:
+ - operator: uniq
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 0
+ view: |-
+ {
+ "position": {
+ "x": 1610,
+ "y": 1350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "115":
+ id: "115"
+ taskid: 4c79791f-f37d-4426-8ad4-92be82ab9ae0
+ type: regular
+ task:
+ id: 4c79791f-f37d-4426-8ad4-92be82ab9ae0
+ version: -1
+ name: Set Indicator Relationships Analysis Results
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setIncident
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "32"
+ scriptarguments:
+ partofcampaign:
+ complex:
+ root: RelatedCampaign
+ transformers:
+ - operator: uniq
+ relatedreport:
+ complex:
+ root: RelatedReport
+ transformers:
+ - operator: uniq
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 2975
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ fieldMapping:
+ - incidentfield: Command Line
+ output:
+ simple: ${commandline.original}
+ - incidentfield: Command Line Verdict
+ output:
+ simple: Malicious
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {
+ "14_16_yes": 0.7,
+ "14_17_#default#": 0.52,
+ "15_17_#default#": 0.17,
+ "22_32_#default#": 0.23,
+ "24_32_#default#": 0.23,
+ "25_32_#default#": 0.17,
+ "28_54_#default#": 0.33,
+ "30_54_#default#": 0.47,
+ "3_113_#default#": 0.11,
+ "55_58_#default#": 0.14,
+ "76_54_Remediation": 0.11,
+ "76_58_#default#": 0.1,
+ "78_58_#default#": 0.1,
+ "7_113_Yes": 0.1,
+ "7_58_#default#": 0.1,
+ "84_13_#default#": 0.41,
+ "85_13_#default#": 0.11,
+ "88_13_#default#": 0.1
+ },
+ "paper": {
+ "dimensions": {
+ "height": 5485,
+ "width": 2610,
+ "x": 810,
+ "y": -30
+ }
+ }
+ }
+inputs:
+- key: InternalIPRanges
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexReplace
+ args:
+ action_dt: {}
+ ignore_case: {}
+ multi_line: {}
+ output_format: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: IANA_Private_Address
+ required: false
+ description: 'A list of IP ranges to check the IP against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ playbookInputQuery:
+- key: FurtherInvestigation
+ value:
+ simple: "False"
+ required: true
+ description: |-
+ Determines whether an incident should be further investigated if similar previous false positive incidents were found.
+ Possible values:True/False. Default: False.
+ playbookInputQuery:
+- key: AutoBlockIndicators
+ value:
+ simple: "True"
+ required: true
+ description: |-
+ Determine whether the given indicators be automatically blocked, or if the user should be given the option to choose.
+ Possible values:True/False. Default: True.
+ If set to False - no prompt will appear, and all provided indicators will be blocked automatically.
+ If set to True - the user will be prompted to select which indicators to block.
+ playbookInputQuery:
+- key: BlockIndicators_UserVerification
+ value:
+ simple: "False"
+ required: false
+ description: |-
+ Determine whether the blocking of any indicator requires the verification of the user.
+ Possible values:True/False. Default: False.
+ playbookInputQuery:
+- key: EarlyContainment
+ value:
+ simple: "True"
+ required: true
+ description: |-
+ Whether early containment should be allowed when the IP address is known to be malicious.
+ Possible values:True/False. Default: True.
+ playbookInputQuery:
+- key: AutoIsolateEndpoint
+ value:
+ simple: "False"
+ required: true
+ description: |-
+ Whether to isolate the initiating endpoint automatically if the investigation verdict is malicious.
+ Possible values:True/False. Default: False.
+ playbookInputQuery:
+- key: Alert_ID
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: containsGeneral
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload
+ ignorecase: true
+ accessor: alert_id
+ transformers:
+ - operator: uniq
+ required: false
+ description: The Cortex XDR alert ID.
+ playbookInputQuery:
+- key: Transferred_Data _Threshold
+ value:
+ simple: "150"
+ required: true
+ description: |-
+ Specify the uploaded data threshold volume (in MB) from which large upload alerts should be investigated.
+ By setting a threshold, you will be able to determine which large upload alerts require investigation.
+ Default value: 150 (MB).
+ playbookInputQuery:
+- key: FWApps_Processes_Whitlist
+ value:
+ simple: ip,tcp,udp,ssl,syslog,quic,Chrome.exe,Firefox.exe,Opera.exe,Safari.exe,iexplore.exe,msedge.exe,brave.exe
+ required: false
+ description: A list of known and authorized FW application IDs and processes used in the organization.
+ playbookInputQuery:
+outputs: []
+tests:
+- No tests (auto formatted)
+fromversion: 6.10.0
+marketplaces:
+- xsoar
\ No newline at end of file
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Large_Upload_README.md b/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Large_Upload_README.md
new file mode 100644
index 000000000000..7c7f4f299e94
--- /dev/null
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Large_Upload_README.md
@@ -0,0 +1,73 @@
+The playbook investigates Cortex XDR incidents involving large upload alerts. The playbook is designed to run as a sub-playbook of ‘Cortex XDR Alerts Handling v2’.
+
+The playbook consists of the following procedures:
+- Searches for similar previous incidents that were closed as false positives.
+- Enrichment and investigation of the initiator and destination hostname and IP address.
+- Enrichment and investigation of the initiator user, process, file, or command if it exists.
+- Detection of related indicators and analysis of the relationship between the detected indicators.
+- Utilize the detected indicators to conduct threat hunting.
+- Blocks detected malicious indicators.
+- Endpoint isolation.
+
+This playbook supports the following Cortex XDR alert names:
+- Large Upload (Generic)
+- Large Upload (SMTP)
+- Large Upload (FTP)
+- Large Upload (HTTPS)
+
+## Dependencies
+
+This playbook uses the following sub-playbooks, integrations, and scripts.
+
+### Sub-playbooks
+
+* Block Indicators - Generic v3
+* Entity Enrichment - Generic v3
+* TIM - Indicator Relationships Analysis
+* Cortex XDR - Isolate Endpoint
+* Cortex XDR - Endpoint Investigation
+* Cortex XDR - Search and Compare Process Executions - XDR Alerts
+* User Investigation - Generic
+* Threat Hunting - Generic
+* Command-Line Analysis
+
+### Integrations
+
+* CortexXDRIR
+
+### Scripts
+
+* Set
+* DBotFindSimilarIncidents
+
+### Commands
+
+* setIncident
+* xdr-get-cloud-original-alerts
+
+## Playbook Inputs
+
+---
+
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| InternalIPRanges | A list of IP ranges to check the IP against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | lists.PrivateIPs | Optional |
+| FurtherInvestigation | Determines whether an incident should be further investigated if similar previous false positive incidents were found. Possible values:True/False. Default: False. | False | Required |
+| AutoBlockIndicators | Determine whether the given indicators be automatically blocked, or if the user should be given the option to choose. Possible values:True/False. Default: True. If set to False - no prompt will appear, and all provided indicators will be blocked automatically. If set to True - the user will be prompted to select which indicators to block. | True | Required |
+| BlockIndicators_UserVerification | Determine whether the blocking of any indicator requires the verification of the user. Possible values:True/False. Default: False. | False | Optional |
+| EarlyContainment | Whether early containment should be allowed when the IP address is known to be malicious. Possible values:True/False. Default: True. | True | Required |
+| AutoIsolateEndpoint | Whether to isolate the initiating endpoint automatically if the investigation verdict is malicious. Possible values:True/False. Default: False. | False | Required |
+| Alert_ID | The Cortex XDR alert ID. | PaloAltoNetworksXDR.Incident.alerts.alert_id | Optional |
+| Transferred_Data _Threshold | Specify the uploaded data threshold volume \(in MB\) from which large upload alerts should be investigated. By setting a threshold, you will be able to determine which large upload alerts require investigation. Default value: 150 \(MB\). | 150 | Required |
+| FWApps_Processes_Whitlist | A list of known and authorized FW application IDs and processes used in the organization. | ip,tcp,udp,ssl,syslog,quic,Chrome.exe,Firefox.exe,Opera.exe,Safari.exe,iexplore.exe,msedge.exe,brave.exe | Optional |
+
+## Playbook Outputs
+
+---
+There are no outputs for this playbook.
+
+## Playbook Image
+
+---
+
+![Cortex XDR - Large Upload](../doc_files/Cortex_XDR_-_Large_Upload.png)
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Port_Scan_-_Adjusted.yml b/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Port_Scan_-_Adjusted.yml
index eb4589ebe6bb..a6b5cfcb9a38 100644
--- a/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Port_Scan_-_Adjusted.yml
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Port_Scan_-_Adjusted.yml
@@ -2644,18 +2644,23 @@ inputs:
root: lists
accessor: PrivateIPs
transformers:
- - operator: RegexReplace
+ - operator: RegexExtractAll
args:
- action_dt: {}
+ error_if_no_match: {}
ignore_case: {}
multi_line: {}
- output_format: {}
period_matches_newline: {}
regex:
value:
- simple: ' IANA_Private_Address'
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: 'A list of IP ranges to check the IP against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ description: 'A list of IP ranges to check the IP against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).'
playbookInputQuery:
- key: Username
value:
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Port_Scan_-_Adjusted_README.md b/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Port_Scan_-_Adjusted_README.md
index b1c2ef2aa0a1..6befe6cd3610 100644
--- a/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Port_Scan_-_Adjusted_README.md
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_-_Port_Scan_-_Adjusted_README.md
@@ -12,7 +12,7 @@ This playbook supports the following Cortex XDR alert names:
- Suspicious port scan
- Port scan by suspicious process
- Highly suspicious port scan
-- Port scan
+- Port scan.
## Dependencies
@@ -20,16 +20,16 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Block Indicators - Generic v3
+* Cortex XDR - Endpoint Investigation
* Account Enrichment - Generic v2.1
-* File Enrichment - Generic v2
-* Threat Hunting - Generic
* Cortex XDR - Isolate Endpoint
-* Cortex XDR - Endpoint Investigation
-* User Investigation - Generic
-* IP Enrichment - Generic v2
* TIM - Indicator Relationships Analysis
+* User Investigation - Generic
* Command-Line Analysis
+* Block Indicators - Generic v3
+* File Enrichment - Generic v2
+* IP Enrichment - Generic v2
+* Threat Hunting - Generic
### Integrations
@@ -37,9 +37,9 @@ This playbook does not use any integrations.
### Scripts
-* Set
* IsIPInRanges
* GetTime
+* Set
### Commands
@@ -51,7 +51,7 @@ This playbook does not use any integrations.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
-| InternalIPRanges | A list of IP ranges to check the IP against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | | Required |
+| InternalIPRanges | A list of IP ranges to check the IP against. The comma-separated list should be provided in CIDR notation. For example, a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| Username | The user name used for port scanning. | PaloAltoNetworksXDR.Incident.alerts.user_name | Optional |
| SrcIPAddress | The source IP address from which the port scanning was initiated. | PaloAltoNetworksXDR.Incident.alerts.action_local_ip | Optional |
| DstIPAddress | Scanned destination IP address. | PaloAltoNetworksXDR.Incident.alerts.action_remote_ip | Optional |
@@ -76,7 +76,7 @@ This playbook does not use any integrations.
| PortScan.AttackerUsername | Attacker username from the port scan alert. | unknown |
| PortScan.FileArtifacts | File artifacts from the port scan alert. | unknown |
| PortScan.LateralMovementFirstDatetime | Lateral Movement First Date time from the port scan alert. | unknown |
-| PortScan.PortScanFirstDatetime | Port Scan First Date time | unknown |
+| PortScan.PortScanFirstDatetime | Port Scan First Date time. | unknown |
## Playbook Image
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2.yml b/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2.yml
index 63feff3e700d..39519fabe269 100644
--- a/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2.yml
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2.yml
@@ -3,7 +3,7 @@ version: -1
contentitemexportablefields:
contentitemfields: {}
name: Cortex XDR Alerts Handling v2
-description: "This playbook is used to loop over every alert in a Cortex XDR incident. \nSupported alert categories:\n- Malware\n- Port Scan\n- Cloud Cryptojacking\n- Cloud Token Theft\n- RDP Brute-Force\n- First SSO Access\n- Cloud IAM User Access Investigation."
+description: "This playbook is used to loop over every alert in a Cortex XDR incident. \nSupported alert categories:\n- Malware\n- Port Scan\n- Cloud Cryptojacking\n- Cloud Token Theft\n- RDP Brute-Force\n- First SSO Access\n- Cloud IAM User Access Investigation\n- Identity Analytics"
starttaskid: "0"
tasks:
"0":
@@ -38,10 +38,10 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: fd96111f-f463-49f6-86ec-a05763d4cc99
+ taskid: 3779395c-e33b-45d4-8084-c8658dfc1c7e
type: condition
task:
- id: fd96111f-f463-49f6-86ec-a05763d4cc99
+ id: 3779395c-e33b-45d4-8084-c8658dfc1c7e
version: -1
name: Choose playbook by category
description: Choose the playbook to run by the alert category.
@@ -57,6 +57,10 @@ tasks:
- "11"
First SSO Access:
- "14"
+ Identity Analytics:
+ - "19"
+ Large Upload:
+ - "20"
Malware:
- "9"
Port Scan:
@@ -110,6 +114,29 @@ tasks:
value:
simple: Port Scan
ignorecase: true
+ - label: Identity Analytics
+ condition:
+ - - operator: containsString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.alert_id
+ iscontext: true
+ right:
+ value:
+ simple: inputs.alert_id
+ iscontext: true
+ accessor: tags
+ iscontext: true
+ right:
+ value:
+ simple: DT:Identity Analytics
+ ignorecase: true
- label: Cloud
condition:
- - operator: containsGeneral
@@ -211,6 +238,30 @@ tasks:
value:
simple: Remote PsExec-like LOLBIN command execution from an unsigned non-standard PsExec service
ignorecase: true
+ - label: Large Upload
+ condition:
+ - - operator: containsString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.alert_id
+ iscontext: true
+ right:
+ value:
+ simple: inputs.alert_id
+ iscontext: true
+ ignorecase: true
+ accessor: name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload
+ ignorecase: true
continueonerrortype: ""
view: |-
{
@@ -268,13 +319,13 @@ tasks:
description: ''
nexttasks:
'#none#':
- - "5"
+ - "21"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 1880,
+ "x": 2230,
"y": 405
}
}
@@ -397,14 +448,14 @@ tasks:
view: |-
{
"position": {
- "x": 920,
+ "x": 480,
"y": 405
}
}
note: false
timertriggers: []
ignoreworker: false
- skipunavailable: false
+ skipunavailable: true
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
@@ -429,7 +480,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "5"
+ - "21"
scriptarguments:
endpoint_id:
complex:
@@ -534,7 +585,7 @@ tasks:
view: |-
{
"position": {
- "x": 480,
+ "x": 1800,
"y": 405
}
}
@@ -624,7 +675,7 @@ tasks:
view: |-
{
"position": {
- "x": -940,
+ "x": -1580,
"y": 750
}
}
@@ -681,7 +732,7 @@ tasks:
view: |-
{
"position": {
- "x": -940,
+ "x": -1580,
"y": 405
}
}
@@ -706,7 +757,7 @@ tasks:
brand: ""
nexttasks:
'#default#':
- - "5"
+ - "22"
Cryptojacking:
- "10"
Data Exfiltration:
@@ -933,7 +984,7 @@ tasks:
view: |-
{
"position": {
- "x": -940,
+ "x": -1580,
"y": 560
}
}
@@ -1210,7 +1261,7 @@ tasks:
view: |-
{
"position": {
- "x": -430,
+ "x": -400,
"y": 405
}
}
@@ -1275,7 +1326,7 @@ tasks:
view: |-
{
"position": {
- "x": -1780,
+ "x": -2420,
"y": 750
}
}
@@ -1370,14 +1421,14 @@ tasks:
view: |-
{
"position": {
- "x": -1360,
+ "x": -2000,
"y": 750
}
}
note: false
timertriggers: []
ignoreworker: false
- skipunavailable: false
+ skipunavailable: true
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
@@ -1421,7 +1472,7 @@ tasks:
view: |-
{
"position": {
- "x": -2200,
+ "x": -2840,
"y": 750
}
}
@@ -1501,7 +1552,7 @@ tasks:
view: |-
{
"position": {
- "x": 1390,
+ "x": 920,
"y": 405
}
}
@@ -1512,22 +1563,286 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
-system: true
+ "19":
+ id: "19"
+ taskid: 85150d2a-010a-4720-8926-c0f9f711a68d
+ type: playbook
+ task:
+ id: 85150d2a-010a-4720-8926-c0f9f711a68d
+ version: -1
+ name: Cortex XDR - Identity Analytics
+ description: |
+ The `Cortex XDR - Identity Analytics` playbook is designed to handle Cortex XDR Identity Analytics alerts and executes the following:
+
+ Analysis:
+ - Enriches the IP address and the account, providing additional context and information about these indicators.
+
+ Verdict:
+ - Determines the appropriate verdict based on the data collected from the enrichment phase.
+
+ Investigation:
+ - Checks for related Cortex XDR alerts to the user by Mitre tactics to identify malicious activity.
+ - Checks for specific arguments for malicious usage from Okta using the 'Okta User Investigation' sub-playbook.
+ - Checks for specific arguments for malicious usage from Azure using the 'Azure User Investigation' sub-playbook.
+
+ Verdict Handling:
+ - Handles malicious alerts by initiating appropriate response actions, including blocking malicious IP addresses and revoking or clearing user's sessions.
+ - Handles non-malicious alerts identified during the investigation.
+
+ The playbook is used as a sub-playbook in ‘Cortex XDR Alerts Handling v2’.
+ playbookName: Cortex XDR - Identity Analytics
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ AlertName:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.alert_id
+ iscontext: true
+ right:
+ value:
+ simple: inputs.alert_id
+ iscontext: true
+ ignorecase: true
+ accessor: name
+ AutoRemediation:
+ simple: "False"
+ FailedLogonThreshold:
+ simple: "30"
+ IAMRemediationType:
+ simple: Revoke
+ IPAddress:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.alert_id
+ iscontext: true
+ right:
+ value:
+ simple: inputs.alert_id
+ iscontext: true
+ ignorecase: true
+ accessor: host_ip_list
+ OktaSuspiciousEventsThreshold:
+ simple: "5"
+ RelatedAlertsThreshold:
+ simple: "5"
+ Username:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.alert_id
+ iscontext: true
+ right:
+ value:
+ simple: inputs.alert_id
+ iscontext: true
+ ignorecase: true
+ accessor: user_name
+ alert_id:
+ simple: ${inputs.alert_id}
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": -840,
+ "y": 405
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "20":
+ id: "20"
+ taskid: 031e700e-2cbd-474c-8d78-6a7364d2e8f1
+ type: playbook
+ task:
+ id: 031e700e-2cbd-474c-8d78-6a7364d2e8f1
+ version: -1
+ name: Cortex XDR - Large Upload
+ description: "The playbook investigates Cortex XDR incidents involving large upload alerts. The playbook is designed to run as a sub-playbook of ‘Cortex XDR Alerts Handling v2’. \n\nThe playbook consists of the following procedures:\n- Searches for similar previous incidents that were closed as false positives.\n- Enrichment and investigation of the initiator and destination hostname and IP address.\n- Enrichment and investigation of the initiator user, process, file, or command if it exists.\n- Detection of related indicators and analysis of the relationship between the detected indicators.\n- Utilize the detected indicators to conduct threat hunting.\n- Blocks detected malicious indicators.\n- Endpoint isolation.\n\nThis playbook supports the following Cortex XDR alert names:\n- Large Upload (Generic)\n- Large Upload (SMTP)\n- Large Upload (FTP)\n- Large Upload (HTTPS)"
+ playbookName: Cortex XDR - Large Upload
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ Alert_ID:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: containsGeneral
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Large Upload
+ ignorecase: true
+ accessor: alert_id
+ transformers:
+ - operator: uniq
+ AutoBlockIndicators:
+ simple: "True"
+ AutoIsolateEndpoint:
+ simple: "False"
+ BlockIndicators_UserVerification:
+ simple: "False"
+ EarlyContainment:
+ simple: "True"
+ FWApps_Processes_Whitlist:
+ simple: ip,tcp,udp,ssl,syslog,quic,Chrome.exe,Firefox.exe,Opera.exe,Safari.exe,iexplore.exe,msedge.exe,brave.exe
+ FurtherInvestigation:
+ simple: "False"
+ InternalIPRanges:
+ complex:
+ root: inputs.InternalIPRanges
+ Transferred_Data _Threshold:
+ simple: "150"
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 1360,
+ "y": 405
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "21":
+ id: "21"
+ taskid: 173fd476-12f1-403c-839d-8087028dbf73
+ type: regular
+ task:
+ id: 173fd476-12f1-403c-839d-8087028dbf73
+ version: -1
+ name: Set Alert ID to continue with the investigation and response
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: ContinueResponseForAlerts
+ value:
+ simple: ${inputs.alert_id}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 580
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "22":
+ id: "22"
+ taskid: b9efc9da-9971-4e94-885f-ad668b030f3c
+ type: regular
+ task:
+ id: b9efc9da-9971-4e94-885f-ad668b030f3c
+ version: -1
+ name: Set Alert ID to continue with the investigation and response
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: ContinueResponseForAlerts
+ value:
+ simple: ${inputs.alert_id}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -1160,
+ "y": 750
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {
"12_15_IAM User Access": 0.74,
"12_16_Token Theft": 0.58,
- "1_11_Cloud": 0.81,
- "1_14_First SSO Access": 0.82,
- "1_7_#default#": 0.63,
- "1_9_Malware": 0.65
+ "1_11_Cloud": 0.9,
+ "1_14_First SSO Access": 0.86,
+ "1_18_ Remote PsExec with LOLBIN command": 0.67,
+ "1_19_Identity Analytics": 0.9,
+ "1_20_Large Upload": 0.89,
+ "1_7_#default#": 0.9,
+ "1_9_Malware": 0.9
},
"paper": {
"dimensions": {
"height": 925,
- "width": 4460,
- "x": -2200,
+ "width": 5450,
+ "x": -2840,
"y": 70
}
}
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2_README.md b/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2_README.md
index 440116d2dabe..a726cfee582a 100644
--- a/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2_README.md
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2_README.md
@@ -6,7 +6,8 @@ Supported alert categories:
- Cloud Token Theft
- RDP Brute-Force
- First SSO Access
-- Cloud IAM User Access Investigation.
+- Cloud IAM User Access Investigation
+- Identity Analytics
## Dependencies
@@ -14,15 +15,18 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Cortex XDR - Malware Investigation
-* Cortex XDR - XCloud Token Theft Response
-* Cortex XDR - Possible External RDP Brute-Force
* GenericPolling
+* Cortex XDR - Possible External RDP Brute-Force
* Cortex XDR - XCloud Cryptojacking
-* Cortex XDR - Cloud Data Exfiltration Response
-* Cortex XDR - Cloud IAM User Access Investigation
* Cortex XDR - Port Scan - Adjusted
* Cortex XDR - First SSO Access
+* Cortex XDR - XCloud Token Theft Response
+* Cortex XDR Remote PsExec with LOLBIN command execution alert
+* Cortex XDR - Cloud IAM User Access Investigation
+* Cortex XDR - Large Upload
+* Cortex XDR - Cloud Data Exfiltration Response
+* Cortex XDR - Malware Investigation
+* Cortex XDR - Identity Analytics
### Integrations
@@ -44,6 +48,7 @@ This playbook does not use any commands.
| --- | --- | --- | --- |
| incident_id | Incident ID. | PaloAltoNetworksXDR.Incident.incident_id | Optional |
| alert_id | Alert ID. | PaloAltoNetworksXDR.Incident.alerts.alert_id | Optional |
+| InternalIPRanges | A list of IP ranges to check the IP against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | lists.PrivateIPs | Optional |
## Playbook Outputs
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_incident_handling_v3_6_5.yml b/Packs/CortexXDR/Playbooks/Cortex_XDR_incident_handling_v3_6_5.yml
index 89322d9bd8d8..1e348f912cd2 100644
--- a/Packs/CortexXDR/Playbooks/Cortex_XDR_incident_handling_v3_6_5.yml
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_incident_handling_v3_6_5.yml
@@ -1648,30 +1648,28 @@ tasks:
task:
id: 3ba84f9e-aaf6-4bd3-8d22-cfc00499e075
version: -1
- name: Should continue with the playbook's investigation and response?
- description: Checks if the playbook should continue or exit.
+ name: Check whether there are any unhandled alerts?
+ description: Checks if there are any unhandled alerts and the playbook should continue with the investigation and response or exit.
type: condition
iscommand: false
brand: ""
nexttasks:
'#default#':
- - "20"
- "yes":
- "87"
+ "yes":
+ - "20"
separatecontext: false
conditions:
- label: "yes"
condition:
- - - operator: inList
+ - - operator: isExists
left:
value:
complex:
- root: PaloAltoNetworksXDR.Incident.alerts
- accessor: name
+ root: ContinueResponseForAlerts
iscontext: true
right:
- value:
- simple: Unusual allocation of multiple cloud compute resources
+ value: {}
continueonerrortype: ""
view: |-
{
@@ -1981,9 +1979,7 @@ view: |-
"44_46_yes": 0.41,
"52_11_#default#": 0.43,
"52_53_yes": 0.63,
- "53_11_#default#": 0.31,
- "79_20_#default#": 0.35,
- "79_87_yes": 0.89
+ "53_11_#default#": 0.31
},
"paper": {
"dimensions": {
@@ -2050,18 +2046,23 @@ inputs:
root: lists
accessor: PrivateIPs
transformers:
- - operator: RegexReplace
+ - operator: RegexExtractAll
args:
- action_dt: {}
+ error_if_no_match: {}
ignore_case: {}
multi_line: {}
- output_format: {}
period_matches_newline: {}
regex:
value:
- simple: IANA_Private_Address
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
- description: "A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. An example of a list \n\"172.16.0.0/12,10.0.0.0/8,192.168.0.0/16\" (without quotes). \nIf a list is not provided, will use the default list provided in the IsIPInRanges."
+ description: "A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. For example, \n\"172.16.0.0/12,10.0.0.0/8,192.168.0.0/16\" (without quotes)."
playbookInputQuery:
- key: InternalDomainName
value: {}
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_incident_handling_v3_6_5_README.md b/Packs/CortexXDR/Playbooks/Cortex_XDR_incident_handling_v3_6_5_README.md
index 3c8b97ebbe7b..5374d6f4e275 100644
--- a/Packs/CortexXDR/Playbooks/Cortex_XDR_incident_handling_v3_6_5_README.md
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_incident_handling_v3_6_5_README.md
@@ -48,7 +48,7 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
| CriticalADGroups | A comma-separated list of DN names of critical Active Directory groups. This will affect the severity calculated for this incident. | | Optional |
| incident_id | Incident ID. | incident.xdrincidentid | Optional |
| XDRDomain | XDR instance domain | incident.xdrurl | Optional |
-| InternalRange | A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. An example of a list "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use the default list provided in the IsIPInRanges. | lists.PrivateIPs | Optional |
+| InternalRange | A comma-separated list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation. For example: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| InternalDomainName | The organizations internal domain name. This is provided for the IsInternalHostName script that checks if the detected host names are internal or external if the hosts contain the internal domains suffix. For example, paloaltonetworks.com. If there is more than one domain, use the \| character to separate values such as \(paloaltonetworks.com\|test.com\). | | Optional |
| InternalHostRegex | This is provided for the IsInternalHostName script that checks if the detected host names are internal or external if the hosts match the organization's naming convention. For example, the host testpc1 will have the following regex \\w\{6\}\\d\{1\}. | | Optional |
| Hunting | This input indicates whether the playbook will hunt for related IOCs. Specify Yes/No. | Yes | Optional |
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access.yml b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access.yml
index 80650e931308..e7b811256cb5 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access.yml
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access.yml
@@ -2,6 +2,7 @@ id: Cortex XDR - First SSO Access
version: -1
name: Cortex XDR - First SSO Access
description: |-
+ Deprecated. Use `Cortex XDR - Identity Analytics` instead.
Investigates a Cortex XDR incident containing First SSO access from ASN in organization
or First successful SSO connection from a country in organization.
@@ -12,6 +13,7 @@ description: |-
- Response based on the verdict.
The playbook is used as a sub-playbook in ‘Cortex XDR Incident Handling - v3’.
+deprecated: true
starttaskid: "0"
tasks:
"0":
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_-_Set_Verdict.yml b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_-_Set_Verdict.yml
index d1612736e8d1..dc8295920345 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_-_Set_Verdict.yml
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_-_Set_Verdict.yml
@@ -2,11 +2,13 @@ id: Cortex XDR - First SSO Access - Set Verdict
version: -1
name: Cortex XDR - First SSO Access - Set Verdict
description: |-
+ Deprecated. Use `Cortex XDR - Identity Analytics` instead.
This playbook determines the alert’s verdict based on the results of multiple checks.
By default, if at least two of the checks' results are true, the verdict is set to malicious.
else if only one check's results are true, the verdict is set to suspicious.
If none of the conditions is true, the verdict is set to non-malicious.
It is possible to change the threshold value of the inputs to change the sensitivity of the verdict.
+deprecated: true
starttaskid: "0"
tasks:
"0":
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_-_Set_Verdict_README.md b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_-_Set_Verdict_README.md
index 78b599bcdb52..50d5e2b57926 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_-_Set_Verdict_README.md
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_-_Set_Verdict_README.md
@@ -1,3 +1,4 @@
+Deprecated. Use `Cortex XDR - Identity Analytics` instead.
This playbook determines the alert’s verdict based on the results of multiple checks.
By default, if at least two of the checks' results are true, the verdict is set to malicious.
else if only one check's results are true, the verdict is set to suspicious.
@@ -18,9 +19,9 @@ This playbook does not use any integrations.
### Scripts
+* SetGridField
* SetMultipleValues
* Set
-* SetGridField
### Commands
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_README.md b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_README.md
index a39b007149e5..772ebd6d9e8b 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_README.md
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_First_SSO_Access_README.md
@@ -1,3 +1,4 @@
+Deprecated. Use `Cortex XDR - Identity Analytics` instead.
Investigates a Cortex XDR incident containing First SSO access from ASN in organization
or First successful SSO connection from a country in organization.
@@ -15,18 +16,18 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+* Cortex XDR - First SSO Access - Set Verdict
+* User Investigation - Generic
* TIM - Indicator Relationships Analysis
-* Endpoint Enrichment - Generic v2.1
* Block Account - Generic v2
-* User Investigation - Generic
* Account Enrichment - Generic v2.1
-* Cortex XDR - First SSO Access - Set Verdict
+* Endpoint Enrichment - Generic v2.1
### Integrations
* CortexXDRIR
-* XDR_iocs
* XQLQueryingEngine
+* XDR_iocs
### Scripts
@@ -34,10 +35,10 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Commands
-* setIncident
+* okta-clear-user-sessions
* xdr-endpoint-isolate
* ip
-* okta-clear-user-sessions
+* setIncident
## Playbook Inputs
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Identity_Analytics.yml b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Identity_Analytics.yml
new file mode 100644
index 000000000000..c65500ba2211
--- /dev/null
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Identity_Analytics.yml
@@ -0,0 +1,1842 @@
+id: Cortex XDR - Identity Analytics
+version: -1
+name: Cortex XDR - Identity Analytics
+description: |
+ The `Cortex XDR - Identity Analytics` playbook is designed to handle Cortex XDR Identity Analytics alerts and executes the following:
+
+ Analysis:
+ - Enriches the IP address and the account, providing additional context and information about these indicators.
+
+ Verdict:
+ - Determines the appropriate verdict based on the data collected from the enrichment phase.
+
+ Investigation:
+ - Checks for related Cortex XDR alerts to the user by Mitre tactics to identify malicious activity.
+ - Checks for specific arguments for malicious usage from Okta using the 'Okta User Investigation' sub-playbook.
+ - Checks for specific arguments for malicious usage from Azure using the 'Azure User Investigation' sub-playbook.
+
+ Verdict Handling:
+ - Handles malicious alerts by initiating appropriate response actions, including blocking malicious IP addresses and revoking or clearing user's sessions.
+ - Handles non-malicious alerts identified during the investigation.
+
+ The playbook is used as a sub-playbook in ‘Cortex XDR Alerts Handling v2’.
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: ef865a2c-4ee4-42dd-8efe-be6e3b5cc202
+ type: start
+ task:
+ id: ef865a2c-4ee4-42dd-8efe-be6e3b5cc202
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -240
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: addb2b17-14c9-46e4-8677-8989f55ad66a
+ type: title
+ task:
+ id: addb2b17-14c9-46e4-8677-8989f55ad66a
+ version: -1
+ name: Analysis
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "2"
+ - "3"
+ - "4"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -100
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: e51ee96a-1c2c-4cb8-8697-a406c1b38dda
+ type: condition
+ task:
+ id: e51ee96a-1c2c-4cb8-8697-a406c1b38dda
+ version: -1
+ name: Is the resource log is Azure?
+ description: Checks if the resource log is Azure.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "5"
+ "yes":
+ - "29"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: containsString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.alert_id
+ iscontext: true
+ right:
+ value:
+ simple: inputs.alert_id
+ iscontext: true
+ accessor: tags
+ iscontext: true
+ right:
+ value:
+ simple: Azure
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 40
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: d4f82d0a-07b5-45b9-869f-8c8ec6c3ed14
+ type: regular
+ task:
+ id: d4f82d0a-07b5-45b9-869f-8c8ec6c3ed14
+ version: -1
+ name: IP Enrichment
+ description: Checks the reputation of an IP address.
+ script: '|||ip'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ ip:
+ complex:
+ root: inputs.IPAddress
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 40
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ fieldMapping:
+ - incidentfield: Source IP
+ output:
+ simple: ${inputs.IPAddress}
+ - incidentfield: ASN
+ output:
+ simple: ${IP.ASN}
+ - incidentfield: Country Code
+ output:
+ simple: ${IP.Geo.Country}
+ - incidentfield: IP Reputation
+ output:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: IP
+ ignorecase: true
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: inputs.IPAddress
+ iscontext: true
+ accessor: Score
+ - incidentfield: Detected IPs
+ output:
+ simple: ${inputs.IPAddress}
+ - incidentfield: Alert tags
+ output:
+ complex:
+ root: incident.xdralerts.tags
+ filters:
+ - - operator: containsString
+ left:
+ value:
+ simple: incident.xdralerts.tags
+ iscontext: true
+ right:
+ value:
+ simple: DT:Identity Analytics
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: ce0268ab-fcbf-4352-81bf-e30101dee854
+ type: playbook
+ task:
+ id: ce0268ab-fcbf-4352-81bf-e30101dee854
+ version: -1
+ name: Account Enrichment - Generic v2.1
+ description: |-
+ Enrich accounts using one or more integrations.
+ Supported integrations:
+ - Active Directory
+ - Microsoft Graph User
+ - SailPoint IdentityNow
+ - SailPoint IdentityIQ
+ - PingOne
+ - Okta
+ - AWS IAM
+ - Cortex XDR (account enrichment and reputation)
+
+ Also, the playbook supports the generic command 'iam-get-user' (implemented in IAM integrations. For more information, visit https://xsoar.pan.dev/docs/integrations/iam-integrations.
+ playbookName: Account Enrichment - Generic v2.1
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ Domain:
+ complex:
+ root: inputs.Username
+ transformers:
+ - operator: Cut
+ args:
+ delimiter:
+ value:
+ simple: \
+ fields:
+ value:
+ simple: "1"
+ - operator: uniq
+ Username:
+ complex:
+ root: inputs.Username
+ transformers:
+ - operator: Cut
+ args:
+ delimiter:
+ value:
+ simple: \
+ fields:
+ value:
+ simple: "2"
+ - operator: uniq
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": -80,
+ "y": 40
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "5":
+ id: "5"
+ taskid: 135b1582-a41f-45bd-8197-2424a96f309b
+ type: title
+ task:
+ id: 135b1582-a41f-45bd-8197-2424a96f309b
+ version: -1
+ name: Verdict
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "7"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 550
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: a4b86401-f970-4fd5-88b0-6cdac93953c4
+ type: playbook
+ task:
+ id: a4b86401-f970-4fd5-88b0-6cdac93953c4
+ version: -1
+ name: Cloud IAM Enrichment - Generic
+ description: This playbook is responsible for collecting and enriching data on Identity Access Management (IAM) in cloud environments (AWS, Azure, and GCP).
+ playbookName: Cloud IAM Enrichment - Generic
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ cloudProvider:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert.raw_abioc.event
+ accessor: auth_server
+ transformers:
+ - operator: uniq
+ username:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert.raw_abioc.event
+ accessor: auth_identity
+ transformers:
+ - operator: uniq
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 380
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: 4757a9a5-de0e-42e6-8abf-f232e773de2a
+ type: condition
+ task:
+ id: 4757a9a5-de0e-42e6-8abf-f232e773de2a
+ version: -1
+ name: Found malicious evidence based on enrichment data?
+ description: Checks if malicious evidence is found based on enrichment data.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "8"
+ "yes":
+ - "16"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: IP
+ ignorecase: true
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ simple: inputs.IPAddress
+ iscontext: true
+ accessor: Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ ignorecase: true
+ - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.RiskyUser.risk_level
+ iscontext: true
+ right:
+ value:
+ simple: HIGH
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: c1d72706-a600-4e93-86aa-6c1272e4ff0d
+ type: title
+ task:
+ id: c1d72706-a600-4e93-86aa-6c1272e4ff0d
+ version: -1
+ name: Investigation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "11"
+ - "33"
+ - "32"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1120,
+ "y": 870
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: a192dde2-2de2-42d6-8584-2bb4069088ea
+ type: playbook
+ task:
+ id: a192dde2-2de2-42d6-8584-2bb4069088ea
+ version: -1
+ name: Azure - User Investigation
+ description: |-
+ This playbook performs an investigation on a specific user in Azure environments, using queries and logs from Azure Log Analytics to locate the following activities performed by the user:
+ - Script-based user agent usage
+ - Administrative user activities
+ - Security rules and policies changes
+ - Failed login attempt
+ - MFA failed login attempt
+ - Login attempt from an uncommon country
+ - Anomalies activities
+ - Risky users
+ - Uncommon high volume of actions
+ - Action uncommonly performed by the user
+ playbookName: Azure - User Investigation
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "35"
+ scriptarguments:
+ AzureSearchTime:
+ simple: ago(7d)
+ MfaAttemptThreshold:
+ simple: "10"
+ Username:
+ complex:
+ root: PaloAltoNetworksXDR.OriginalAlert.raw_abioc.event
+ accessor: auth_identity
+ transformers:
+ - operator: uniq
+ failedLogonThreshold:
+ simple: "20"
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 710,
+ "y": 1010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "12":
+ id: "12"
+ taskid: e889fef1-f79e-4d52-8059-6447eb6ed7e1
+ type: condition
+ task:
+ id: e889fef1-f79e-4d52-8059-6447eb6ed7e1
+ version: -1
+ name: Found any malicious user activity?
+ description: Determine if the activity is malicious based on the investigation findings.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "13"
+ "yes":
+ - "16"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: AzureScriptBasedUserAgentEvents
+ iscontext: true
+ right:
+ value: {}
+ - operator: greaterThan
+ left:
+ value:
+ complex:
+ root: AzureFailLoginCount
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.FailedLogonThreshold
+ iscontext: true
+ - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: SuspiciousUserAgent
+ iscontext: true
+ - operator: greaterThan
+ left:
+ value:
+ complex:
+ root: NumOfOktaSuspiciousActivities
+ iscontext: true
+ right:
+ value:
+ simple: inputs.OktaSuspiciousActivitiesThreshold
+ iscontext: true
+ - operator: greaterThan
+ left:
+ value:
+ complex:
+ root: NumOfFailedLogon
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.FailedLogonThreshold
+ iscontext: true
+ - operator: greaterThan
+ left:
+ value:
+ simple: NumOfRelatedAlerts
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.RelatedAlertsThreshold
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1120,
+ "y": 1340
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 0e16a93b-530b-4839-80d3-6a918fcc3e34
+ type: condition
+ task:
+ id: 0e16a93b-530b-4839-80d3-6a918fcc3e34
+ version: -1
+ name: Analyst Decision
+ description: An analyst’s decision is required to determine whether it is a malicious or non-malicious activity.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ Malicious:
+ - "16"
+ Non-Malicious:
+ - "14"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1120,
+ "y": 1560
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to:
+ subject:
+ body:
+ simple: An analyst's decision is required to determine whether it is a malicious or non-malicious activity.
+ methods: []
+ format: ""
+ bcc:
+ cc:
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ replyOptions:
+ - Malicious
+ - Non-Malicious
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "14":
+ id: "14"
+ taskid: 90ea8626-7183-4c9c-84c5-d490649668e6
+ type: title
+ task:
+ id: 90ea8626-7183-4c9c-84c5-d490649668e6
+ version: -1
+ name: No Malicious activity identified
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "15"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1530,
+ "y": 1740
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "15":
+ id: "15"
+ taskid: 31e31e62-739d-412e-8993-7ff2d9ba736c
+ type: regular
+ task:
+ id: 31e31e62-739d-412e-8993-7ff2d9ba736c
+ version: -1
+ name: Set incident Verdict
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setIncident
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "24"
+ scriptarguments:
+ verdict:
+ simple: Non-Malicious
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1530,
+ "y": 2320
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "16":
+ id: "16"
+ taskid: d2604f19-0e0d-4c0b-8f4e-46629aa84e28
+ type: title
+ task:
+ id: d2604f19-0e0d-4c0b-8f4e-46629aa84e28
+ version: -1
+ name: Malicious Activity identified
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "17"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1740
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "17":
+ id: "17"
+ taskid: c3879797-d655-4256-8203-fb4fc80d5841
+ type: regular
+ task:
+ id: c3879797-d655-4256-8203-fb4fc80d5841
+ version: -1
+ name: Set incident Verdict
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setIncident
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "18"
+ scriptarguments:
+ verdict:
+ simple: Malicious
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1890
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ fieldMapping:
+ - incidentfield: User Risk Level
+ output:
+ simple: ${PaloAltoNetworksXDR.RiskyUser.risk_level}
+ - incidentfield: Failed Logon Events
+ output:
+ complex:
+ root: AzureFailLoginCount
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: NumOfOktaFailedLogon
+ iscontext: true
+ - incidentfield: Email
+ output:
+ simple: ${ActiveDirectory.Users.mail}
+ - incidentfield: Account Member Of
+ output:
+ simple: ${ActiveDirectory.Users.memberOf}
+ - incidentfield: Account Status
+ output:
+ simple: ${Account.Status}
+ - incidentfield: Cloud Account ID
+ output:
+ simple: ${MSGraphUser.ID}
+ - incidentfield: Account ID
+ output:
+ complex:
+ root: Account
+ filters:
+ - - operator: notContainsGeneral
+ left:
+ value:
+ simple: Account.ID
+ iscontext: true
+ right:
+ value:
+ simple: "="
+ - - operator: isEqualString
+ left:
+ value:
+ simple: Account.Type
+ iscontext: true
+ right:
+ value:
+ simple: Okta
+ ignorecase: true
+ accessor: ID
+ - incidentfield: Manager Email Address
+ output:
+ simple: ${UserManagerEmail}
+ - incidentfield: Alert Name
+ output:
+ simple: ${inputs.AlertName}
+ - incidentfield: Detected User
+ output:
+ simple: ${inputs.Username}
+ - incidentfield: Username
+ output:
+ simple: ${inputs.Username}
+ - incidentfield: XDR Alert Search Results
+ output:
+ simple: ${PaloAltoNetworksXDR.Alert}
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "18":
+ id: "18"
+ taskid: 6bd47335-240d-4bba-88e9-0eefce8b8aea
+ type: title
+ task:
+ id: 6bd47335-240d-4bba-88e9-0eefce8b8aea
+ version: -1
+ name: Remediation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "19"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2050
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "19":
+ id: "19"
+ taskid: 54cf6461-81ef-4d7f-805e-41f5082e6726
+ type: condition
+ task:
+ id: 54cf6461-81ef-4d7f-805e-41f5082e6726
+ version: -1
+ name: Should perform remediation actions automatically?
+ description: Whether to perform automatic remediation actions based on the input’s value. (AutoRemediation)
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "25"
+ "yes":
+ - "20"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: inputs.AutoRemediation
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2180
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "20":
+ id: "20"
+ taskid: b39cf718-fb84-4dff-8c5e-fa0cb3e2ed05
+ type: title
+ task:
+ id: b39cf718-fb84-4dff-8c5e-fa0cb3e2ed05
+ version: -1
+ name: Auto Remediation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "34"
+ - "27"
+ - "22"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "21":
+ id: "21"
+ taskid: f4cea7e3-11aa-4d81-89fd-919f33f6f1a9
+ type: regular
+ task:
+ id: f4cea7e3-11aa-4d81-89fd-919f33f6f1a9
+ version: -1
+ name: Okta - Clear user sessions
+ description: |-
+ Removes all active identity provider sessions. This forces the user to authenticate upon the next operation. Optionally revokes OpenID Connect and OAuth refresh and access tokens issued to the user.
+ For more information and examples:
+ https://developer.okta.com/docs/reference/api/users/#user-sessions
+ script: '|||okta-clear-user-sessions'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "24"
+ scriptarguments:
+ userId:
+ complex:
+ root: Account
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: Account.Type
+ iscontext: true
+ right:
+ value:
+ simple: Okta
+ ignorecase: true
+ accessor: ID
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 860,
+ "y": 2730
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "22":
+ id: "22"
+ taskid: 48e20d8a-2273-473e-8152-71ac1048deec
+ type: condition
+ task:
+ id: 48e20d8a-2273-473e-8152-71ac1048deec
+ version: -1
+ name: Should Perform Cloud Remediation?
+ description: Whether to perform cloud remediation actions.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "24"
+ "yes":
+ - "23"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: containsString
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ accessor: tags
+ iscontext: true
+ right:
+ value:
+ simple: Azure
+ ignorecase: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUser.ID
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 30,
+ "y": 2530
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "23":
+ id: "23"
+ taskid: a21fdd8c-9e57-4a59-8bc3-5bfdce5d7a4b
+ type: playbook
+ task:
+ id: a21fdd8c-9e57-4a59-8bc3-5bfdce5d7a4b
+ version: -1
+ name: Cloud Credentials Rotation - Azure
+ description: |-
+ ## **Azure Credentials Rotation Playbook**
+
+ ### **IAM Remediation**
+ Protect your identity and access management:
+ - **Reset Password**: Resets the user password to halt any unauthorized access.
+
+ - **Revoke Session**: Terminates current active sessions to ensure the malicious actor is locked out.
+
+ - **Combo Action**: Resets the password and terminates all active sessions.
+
+ ### **Service Principal Remediation**
+ Guard your applications:
+ - **Password Regeneration**: Generate a new password for the service principal, making sure the old one becomes obsolete.
+ playbookName: Cloud Credentials Rotation - Azure
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "24"
+ scriptarguments:
+ IAMRemediationType:
+ simple: ${inputs.IAMRemediationType}
+ identityType:
+ simple: IAM
+ userID:
+ simple: ${MSGraphUser.ID}
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 30,
+ "y": 2730
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "24":
+ id: "24"
+ taskid: fd48c7e6-3043-4d7f-8536-ca6b610eb4c2
+ type: title
+ task:
+ id: fd48c7e6-3043-4d7f-8536-ca6b610eb4c2
+ version: -1
+ name: Done
+ description: commands.local.cmd.close.inv
+ type: title
+ iscommand: false
+ brand: Builtin
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2930
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "25":
+ id: "25"
+ taskid: cbca2254-0769-443c-85e8-8b6b35eeb770
+ type: title
+ task:
+ id: cbca2254-0769-443c-85e8-8b6b35eeb770
+ version: -1
+ name: Manual Remediation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "24"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -410,
+ "y": 2545
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "27":
+ id: "27"
+ taskid: bf0ca540-08c2-4b81-8cc8-68e83d308a0b
+ type: playbook
+ task:
+ id: bf0ca540-08c2-4b81-8cc8-68e83d308a0b
+ version: -1
+ name: Block IP - Generic v3
+ description: "This playbook blocks malicious IP addresses using all integrations that are enabled. The direction of the traffic that will be blocked is determined by the Cortex XSOAR user (and set by default to outgoing)\nNote the following:\n- some of those integrations require specific parameters to run, which are based on the playbook inputs. Also, certain integrations use FW rules or appended network objects.\n- Note that the appended network objects should be specified in blocking rules inside the system later on. \n\n\nSupported integrations for this playbook [Network security products such as FW/WAF/IPs/etc.]: \n\n* Check Point Firewall\n* Palo Alto Networks PAN-OS\n* Zscaler\n* FortiGate\n* Aria Packet Intelligence\n* Cisco Firepower \n* Cisco Secure Cloud Analytics\n* Cisco ASA\n* Akamai WAF\n* F5 SilverLine\n* ThreatX\n* Signal Sciences WAF\n* Sophos Firewall\n\n"
+ playbookName: Block IP - Generic v3
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "24"
+ scriptarguments:
+ AutoCommit:
+ simple: ${inputs.FWAutoCommit}
+ CustomBlockRule:
+ simple: "True"
+ Folder:
+ simple: Shared
+ IP:
+ complex:
+ root: DBotScore
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value:
+ simple: IP
+ ignorecase: true
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value:
+ simple: "3"
+ ignorecase: true
+ accessor: Indicator
+ transformers:
+ - operator: uniq
+ InputEnrichment:
+ simple: "False"
+ InternalRange:
+ simple: ${inputs.InternalRange}
+ RuleDirection:
+ simple: outbound
+ RuleName:
+ simple: XSOAR - Block IP playbook - ${incident.id}
+ UserVerification:
+ simple: ${inputs.UserVerification}
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2530
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "29":
+ id: "29"
+ taskid: 63352686-7a00-4344-8fda-800c7c8e0dd2
+ type: regular
+ task:
+ id: 63352686-7a00-4344-8fda-800c7c8e0dd2
+ version: -1
+ name: Fetch cloud alert extra data
+ description: Returns information about each alert ID.
+ script: '|||xdr-get-cloud-original-alerts'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "6"
+ scriptarguments:
+ alert_ids:
+ complex:
+ root: inputs.alert_id
+ filter_alert_fields:
+ simple: "false"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 220
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "32":
+ id: "32"
+ taskid: 040a6334-1211-4a14-80b0-3630b5cc6a8a
+ type: playbook
+ task:
+ id: 040a6334-1211-4a14-80b0-3630b5cc6a8a
+ version: -1
+ name: Okta - User Investigation
+ description: This playbook performs an investigation on a specific user, using queries and logs from Okta.
+ playbookName: Okta - User Investigation
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "35"
+ scriptarguments:
+ ASN:
+ complex:
+ root: IP
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: IP.Address
+ iscontext: true
+ right:
+ value:
+ simple: inputs.IPAddress
+ iscontext: true
+ accessor: ASN
+ transformers:
+ - operator: uniq
+ LoginCountry:
+ complex:
+ root: IP
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: IP.Address
+ iscontext: true
+ right:
+ value:
+ simple: inputs.IPAddress
+ iscontext: true
+ accessor: Geo.Country
+ transformers:
+ - operator: uniq
+ UserEmail:
+ complex:
+ root: Account.Email
+ accessor: Address
+ transformers:
+ - operator: uniq
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 1530,
+ "y": 1010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "33":
+ id: "33"
+ taskid: 8ecccbd5-8eeb-4ca7-8c26-5dbc8b10ad8a
+ type: playbook
+ task:
+ id: 8ecccbd5-8eeb-4ca7-8c26-5dbc8b10ad8a
+ version: -1
+ name: Cortex XDR - Get entity alerts by MITRE tactics
+ description: |-
+ This playbook is part of the Cortex XDR by Palo Alto Networks’ pack. This playbook searches alerts related to specific entities from Cortex XDR, on a given timeframe, based on MITRE tactics.
+ Note: The playbook's inputs enable manipulating the execution flow. Read the input descriptions for details.
+ playbookName: Cortex XDR - Get entity alerts by MITRE tactics
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "35"
+ scriptarguments:
+ EntityType:
+ simple: actor_effective_username
+ HuntCnCTechniques:
+ simple: "False"
+ HuntCollectionTechniques:
+ simple: "False"
+ HuntCredentialAccessTechniques:
+ simple: "False"
+ HuntDefenseEvasionTechniques:
+ simple: "False"
+ HuntDiscoveryTechniques:
+ simple: "False"
+ HuntExecutionTechniques:
+ simple: "False"
+ HuntImpactTechniques:
+ simple: "False"
+ HuntInitialAccessTechniques:
+ simple: "False"
+ HuntLateralMovementTechniques:
+ simple: "False"
+ HuntPersistenceTechniques:
+ simple: "False"
+ HuntPrivilegeEscalationTechniques:
+ simple: "False"
+ HuntReconnaissanceTechniques:
+ simple: "False"
+ RunAll:
+ simple: "True"
+ entityID:
+ complex:
+ root: inputs.Username
+ transformers:
+ - operator: Cut
+ args:
+ delimiter:
+ value:
+ simple: \
+ fields:
+ value:
+ simple: "2"
+ timeRange:
+ simple: 1 day
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 1120,
+ "y": 1010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "34":
+ id: "34"
+ taskid: 01943894-59fd-4c5b-8480-a37cbc91a7f5
+ type: condition
+ task:
+ id: 01943894-59fd-4c5b-8480-a37cbc91a7f5
+ version: -1
+ name: Is Okta integration availale?
+ description: Returns 'yes' if integration brand is available. Otherwise returns 'no'.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "24"
+ "yes":
+ - "21"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: modules
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: modules.brand
+ iscontext: true
+ right:
+ value:
+ simple: Okta v2
+ ignorecase: true
+ accessor: state
+ iscontext: true
+ right:
+ value:
+ simple: active
+ ignorecase: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: Account
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: Account.Type
+ iscontext: true
+ right:
+ value:
+ simple: Okta
+ ignorecase: true
+ accessor: ID
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 860,
+ "y": 2530
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "35":
+ id: "35"
+ taskid: c2870a26-eb1e-4de8-8557-76cb91e5da68
+ type: regular
+ task:
+ id: c2870a26-eb1e-4de8-8557-76cb91e5da68
+ version: -1
+ name: Set the Number of related alerts
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.12/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "12"
+ scriptarguments:
+ key:
+ simple: NumOfRelatedAlerts
+ value:
+ complex:
+ root: ArraySize
+ transformers:
+ - operator: SetIfEmpty
+ args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: "0"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1120,
+ "y": 1180
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ fieldMapping:
+ - incidentfield: User Risk Level
+ output:
+ simple: ${PaloAltoNetworksXDR.RiskyUser.risk_level}
+ - incidentfield: Failed Logon Events
+ output:
+ complex:
+ root: AzureFailLoginCount
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: NumOfOktaFailedLogon
+ iscontext: true
+ - incidentfield: Email
+ output:
+ simple: ${ActiveDirectory.Users.mail}
+ - incidentfield: Account Member Of
+ output:
+ simple: ${ActiveDirectory.Users.memberOf}
+ - incidentfield: Account Status
+ output:
+ simple: ${Account.Status}
+ - incidentfield: Cloud Account ID
+ output:
+ simple: ${MSGraphUser.ID}
+ - incidentfield: Account ID
+ output:
+ complex:
+ root: Account
+ filters:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: Account.Type
+ iscontext: true
+ right:
+ value:
+ simple: Okta
+ ignorecase: true
+ - - operator: notContainsGeneral
+ left:
+ value:
+ simple: Account.ID
+ iscontext: true
+ right:
+ value:
+ simple: "="
+ accessor: ID
+ - incidentfield: Manager Email Address
+ output:
+ simple: ${UserManagerEmail}
+ - incidentfield: XDR Alert Search Results
+ output:
+ simple: ${PaloAltoNetworksXDR.Alert}
+ - incidentfield: Alert Name
+ output:
+ simple: ${inputs.AlertName}
+ - incidentfield: Detected User
+ output:
+ simple: ${inputs.Username}
+ - incidentfield: Username
+ output:
+ simple: ${inputs.Username}
+ - incidentfield: Number Of Found Related Alerts
+ output:
+ simple: ${NumOfRelatedAlerts}
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {
+ "12_13_#default#": 0.51,
+ "12_16_yes": 0.33,
+ "13_14_Non-Malicious": 0.61,
+ "13_16_Malicious": 0.35,
+ "22_23_yes": 0.44,
+ "22_24_#default#": 0.33,
+ "2_29_yes": 0.44,
+ "34_24_#default#": 0.31
+ },
+ "paper": {
+ "dimensions": {
+ "height": 3235,
+ "width": 2320,
+ "x": -410,
+ "y": -240
+ }
+ }
+ }
+inputs:
+- key: AlertName
+ value: {}
+ required: false
+ description: Alert name.
+ playbookInputQuery:
+- key: alert_id
+ value: {}
+ required: false
+ description: Alert ID.
+ playbookInputQuery:
+- key: IPAddress
+ value: {}
+ required: false
+ description: IP address from the XDR alert.
+ playbookInputQuery:
+- key: Username
+ value: {}
+ required: false
+ description: User name.
+ playbookInputQuery:
+- key: RelatedAlertsThreshold
+ value:
+ simple: "5"
+ required: false
+ description: |
+ This is the minimum threshold for Cortex XDR related alerts, based on MITRE tactics used to identify malicious activity by the user in the last 1 day.
+ playbookInputQuery:
+- key: FailedLogonThreshold
+ value:
+ simple: "30"
+ required: false
+ description: |-
+ This is the minimum threshold for user login failures within the last 1 day.
+ For example: If this input is set to '30', and the 'Okta - User Investigation' or the 'Azure - User Investigation' sub-playbooks have found 31 failed login attempts - It will classify this behavior as malicious activity.
+ The default value is '30'.
+ playbookInputQuery:
+- key: OktaSuspiciousActivitiesThreshold
+ value:
+ simple: "5"
+ required: false
+ description: |-
+ This is the minimum threshold for suspicious Okta activity events by the user in the last 1 day.
+ For example: If this input is set to '5', and the 'Okta - User Investigation' sub-playbooks have found 6 events of suspicious activity by the user - It will classify this behavior as malicious activity.
+ The default value is '5'.
+ playbookInputQuery:
+- key: AutoRemediation
+ value:
+ simple: "False"
+ required: false
+ description: |-
+ Whether to execute the remediation flow automatically.
+ Possible values are: "True" and "False".
+ playbookInputQuery:
+- key: IAMRemediationType
+ value:
+ simple: Revoke
+ required: false
+ description: |-
+ The response playbook provides the following remediation actions using MSGraph Users:
+
+ Reset: By entering "Reset" in the input, the playbook will execute password reset.
+
+ Revoke: By entering "Revoke" in the input, the playbook will revoke the user's session.
+
+ ALL: By entering "ALL" in the input, the playbook will execute the reset password and revoke session tasks.
+ playbookInputQuery:
+- key: FWAutoCommit
+ value:
+ simple: "Yes"
+ required: false
+ description: "This input determines whether to commit the configuration automatically on PAN-OS devices and other firewalls. \nYes - Commit automatically.\nNo - Commit manually."
+ playbookInputQuery:
+- key: UserVerification
+ value:
+ simple: "False"
+ required: false
+ description: "Whether to provide user verification for blocking those IPs. \nPossible values: True/False. Default: True. \n\nFalse - No prompt will be displayed to the user.\nTrue - The server will ask the user for blocking verification and will display the blocking list."
+ playbookInputQuery:
+- key: InternalRange
+ value:
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexReplace
+ args:
+ action_dt: {}
+ ignore_case: {}
+ multi_line: {}
+ output_format: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: IANA_Private_Address
+ required: false
+ description: 'A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, will use the default list provided in the IsIPInRanges script (the known IPv4 private address ranges).'
+ playbookInputQuery:
+inputSections:
+- inputs:
+ - AlertName
+ - alert_id
+ name: Incident Management
+ description: Incident management settings and data, including escalation processes, user engagements and ticketing methods.
+- inputs:
+ - IPAddress
+ - Username
+ name: Enrichment
+ description: Enrichment settings and data, including assets and indicators enrichment using third-party enrichers.
+- inputs:
+ - RelatedAlertsThreshold
+ - FailedLogonThreshold
+ - OktaSuspiciousActivitiesThreshold
+ name: Investigation
+ description: Investigation settings and data, including any deep dive incident investigation and verdict determination.
+- inputs:
+ - AutoRemediation
+ - IAMRemediationType
+ - FWAutoCommit
+ - UserVerification
+ - InternalRange
+ name: Remediation
+ description: Remediation settings and data, including containment, eradication, and recovery.
+outputSections:
+- outputs: []
+ name: General (Outputs group)
+ description: Generic group for outputs
+outputs: []
+tests:
+- No tests (auto formatted)
+fromversion: 6.10.0
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Identity_Analytics_README.md b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Identity_Analytics_README.md
new file mode 100644
index 000000000000..2843782af542
--- /dev/null
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Identity_Analytics_README.md
@@ -0,0 +1,80 @@
+The `Cortex XDR - Identity Analytics` playbook is designed to handle Cortex XDR Identity Analytics alerts and executes the following:
+
+Analysis:
+- Enriches the IP address and the account, providing additional context and information about these indicators.
+
+Verdict:
+- Determines the appropriate verdict based on the data collected from the enrichment phase.
+
+Investigation:
+- Checks for related Cortex XDR alerts to the user by Mitre tactics to identify malicious activity.
+- Checks for specific arguments for malicious usage from Okta using the 'Okta User Investigation' sub-playbook.
+- Checks for specific arguments for malicious usage from Azure using the 'Azure User Investigation' sub-playbook.
+
+Verdict Handling:
+- Handles malicious alerts by initiating appropriate response actions, including blocking malicious IP addresses and revoking or clearing user's sessions.
+- Handles non-malicious alerts identified during the investigation.
+
+The playbook is used as a sub-playbook in ‘Cortex XDR Alerts Handling v2’.
+
+
+## Dependencies
+
+This playbook uses the following sub-playbooks, integrations, and scripts.
+
+### Sub-playbooks
+
+* Azure - User Investigation
+* Cloud Credentials Rotation - Azure
+* Okta - User Investigation
+* Cortex XDR - Get entity alerts by MITRE tactics
+* Block IP - Generic v3
+* Cloud IAM Enrichment - Generic
+* Account Enrichment - Generic v2.1
+
+### Integrations
+
+* XDR_iocs
+* CortexXDRIR
+* XQLQueryingEngine
+
+### Scripts
+
+SetAndHandleEmpty
+
+### Commands
+
+* okta-clear-user-sessions
+* xdr-get-cloud-original-alerts
+* setIncident
+* ip
+
+## Playbook Inputs
+
+---
+
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| AlertName | Alert name. | | Optional |
+| alert_id | Alert ID. | | Optional |
+| IPAddress | IP address from the XDR alert. | | Optional |
+| Username | User name. | | Optional |
+| RelatedAlertsThreshold | This is the minimum threshold for Cortex XDR related alerts, based on MITRE tactics used to identify malicious activity by the user in the last 1 day. | 5 | Optional |
+| FailedLogonThreshold | This is the minimum threshold for user login failures within the last 1 day. For example: If this input is set to '30', and the 'Okta - User Investigation' or the 'Azure - User Investigation' sub-playbooks have found 31 failed login attempts - It will classify this behavior as malicious activity. The default value is '30'. | 30 | Optional |
+| OktaSuspiciousActivitiesThreshold | This is the minimum threshold for suspicious Okta activity events by the user in the last 1 day. For example: If this input is set to '5', and the 'Okta - User Investigation' sub-playbooks have found 6 events of suspicious activity by the user - It will classify this behavior as malicious activity. The default value is '5'. | 5 | Optional |
+| AutoRemediation | Whether to execute the remediation flow automatically. Possible values are: "True" and "False". | False | Optional |
+| IAMRemediationType | The response playbook provides the following remediation actions using MSGraph Users:
Reset: By entering "Reset" in the input, the playbook will execute password reset.
Revoke: By entering "Revoke" in the input, the playbook will revoke the user's session.
ALL: By entering "ALL" in the input, the playbook will execute the reset password and revoke session tasks. | Revoke | Optional |
+| FWAutoCommit | This input determines whether to commit the configuration automatically on PAN-OS devices and other FWs. Yes - Commit automatically. No - Commit manually. | Yes | Optional |
+| UserVerification | Possible values: True/False. Default: True. Whether to provide user verification for blocking those IPs.
False - No prompt will be displayed to the user. True - The server will ask the user for blocking verification and will display the blocking list. | False | Optional |
+| InternalRange | A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges would be: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, will use the default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | lists.PrivateIPs | Optional |
+
+## Playbook Outputs
+
+---
+There are no outputs for this playbook.
+
+## Playbook Image
+
+---
+
+![Cortex XDR - Identity Analytics](../doc_files/Cortex_XDR_-_Identity_Analytics.png)
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Search_and_Compare_Process_Executions_-_XDR_Alerts.yml b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Search_and_Compare_Process_Executions_-_XDR_Alerts.yml
index 39491a42e06b..e76811925028 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Search_and_Compare_Process_Executions_-_XDR_Alerts.yml
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Search_and_Compare_Process_Executions_-_XDR_Alerts.yml
@@ -2,7 +2,7 @@ id: Cortex XDR - Search and Compare Process Executions - XDR Alerts
version: -1
name: Cortex XDR - Search and Compare Process Executions - XDR Alerts
description: |-
- This playbook is a generic playbook that receives a process name and command-line argument. It uses the "Cortex XDR IR" integration to search for the given process executions inside XDR alerts and compares the command-line argument from the results to the command-line argument received from the playbook input.
+ This playbook is a generic playbook that receives a process name and command-line argument. It uses the "Cortex XDR IR" integration to search for the given process executions inside Cortex XDR alerts and compares the command-line argument from the results to the command-line argument received from the playbook input.
Note: Under the "Processes" input, the playbook should receive an array that contains the following keys:
- value: *process name*
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Search_and_Compare_Process_Executions_-_XDR_Alerts_README.md b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Search_and_Compare_Process_Executions_-_XDR_Alerts_README.md
index 89589635b650..23748aa6520a 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Search_and_Compare_Process_Executions_-_XDR_Alerts_README.md
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_-_Search_and_Compare_Process_Executions_-_XDR_Alerts_README.md
@@ -1,4 +1,4 @@
-This playbook is a generic playbook that receives a process name and command-line argument. It uses the "Cortex XDR IR" integration to search for the given process executions inside XDR alerts and compares the command-line argument from the results to the command-line argument received from the playbook input.
+This playbook is a generic playbook that receives a process name and command-line argument. It uses the "Cortex XDR IR" integration to search for the given process executions inside Cortex XDR alerts and compares the command-line argument from the results to the command-line argument received from the playbook input.
Note: Under the "Processes" input, the playbook should receive an array that contains the following keys:
- value: *process name*
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_Lite_-_Incident_Handling.yml b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_Lite_-_Incident_Handling.yml
index 0e9483186303..b8bbad67d2fc 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_Lite_-_Incident_Handling.yml
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_Lite_-_Incident_Handling.yml
@@ -176,6 +176,8 @@ tasks:
- operator: uniq
URLSSLVerification:
simple: "False"
+ UseReputationCommand:
+ simple: "True"
Username:
complex:
root: inputs.Username
@@ -2142,11 +2144,29 @@ inputs:
playbookInputQuery:
- key: InternalRange
value:
- simple: 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16
+ complex:
+ root: lists
+ accessor: PrivateIPs
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: (\b(?:\d{1,3}\.){3}\d{1,3}\b/\d{1,2})
+ unpack_matches: {}
+ - operator: join
+ args:
+ separator:
+ value:
+ simple: ','
required: false
description: |-
This input is used in the "Entity Enrichment - Generic v3" playbook.
- A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges is: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes). If a list is not provided, uses the default list provided in the IsIPInRanges script (the known IPv4 private address ranges).
+ A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" (without quotes).
playbookInputQuery:
- key: XDRRelatedAlertsThreshold
value:
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_Lite_-_Incident_Handling_README.md b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_Lite_-_Incident_Handling_README.md
index 155605857bec..d1f734d8c650 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_Lite_-_Incident_Handling_README.md
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_Lite_-_Incident_Handling_README.md
@@ -23,11 +23,11 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Entity Enrichment - Generic v3
-* Cortex XDR - Get entity alerts by MITRE tactics
+* Cortex XDR - Isolate Endpoint
* Block Indicators - Generic v3
* Command-Line Analysis
-* Cortex XDR - Isolate Endpoint
+* Entity Enrichment - Generic v3
+* Cortex XDR - Get entity alerts by MITRE tactics
### Integrations
@@ -36,8 +36,8 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Scripts
-* SetAndHandleEmpty
* Set
+* SetAndHandleEmpty
### Commands
@@ -55,7 +55,7 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
| Hostname | Hostname. | PaloAltoNetworksXDR.Incident.alerts.host_name | Optional |
| EndpointID | XDR endpoint ID. | PaloAltoNetworksXDR.Incident.alerts.endpoint_id | Optional |
| XDRDomain | XDR instance domain. | incident.xdrurl | Optional |
-| InternalRange | This input is used in the "Entity Enrichment - Generic v3" playbook. A list of internal IP ranges to check IP addresses against. The list should be provided in CIDR notation, separated by commas. An example of a list of ranges is: "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). If a list is not provided, uses the default list provided in the IsIPInRanges script \(the known IPv4 private address ranges\). | 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16 | Optional |
+| InternalRange | This input is used in the "Entity Enrichment - Generic v3" playbook. A list of internal IP ranges to check IP addresses against. The comma-separated list should be provided in CIDR notation. For example, "172.16.0.0/12,10.0.0.0/8,192.168.0.0/16" \(without quotes\). | lists.PrivateIPs | Optional |
| XDRRelatedAlertsThreshold | This is the minimum threshold for XDR-related alerts of medium severity or higher, based on MITRE tactics used to identify malicious activity on the endpoint and by the user. Example: If this input is set to '5' and it detects '6' XDR-related alerts, it will classify this check as indicating malicious activity. The default value is '5'. | 5 | Optional |
| AutoBlockIndicators | Possible values: True/False. Default: True. Should the given indicators be automatically blocked, or should the user be given the option to choose?
If set to False - no prompt will appear, and all provided indicators will be blocked automatically. If set to True - the user will be prompted to select which indicators to block. | False | Optional |
| AutoIsolateEndpoint | Whether to isolate the endpoint automatically. | False | Optional |
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_18.md b/Packs/CortexXDR/ReleaseNotes/6_1_18.md
new file mode 100644
index 000000000000..5393849fde26
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_18.md
@@ -0,0 +1,40 @@
+
+#### Layouts
+
+##### Cortex XDR Incident
+
+Added a dedicated tab for Identity Analytics alerts handling.
+
+#### Mappers
+
+##### XDR - Incoming Mapper
+Added a new mapping for the incident field `alert tags`.
+
+#### Playbooks
+
+##### Cortex XDR Alerts Handling v2
+Added a flow for the new *Identity Analytics* playbook.
+##### New: Cortex XDR - Identity Analytics
+New: The `Cortex XDR - Identity Analytics` playbook is designed to handle Cortex XDR Identity Analytics alerts and executes the following:
+
+Analysis:
+- Enriches the IP address and the account, providing additional context and information about these indicators.
+
+Verdict:
+- Determines the appropriate verdict based on the data collected from the enrichment phase.
+
+Investigation:
+- Checks for related Cortex XDR alerts to the user by Mitre tactics to identify malicious activity.
+- Checks for specific arguments for malicious usage from Okta using the 'Okta User Investigation' sub-playbook.
+- Checks for specific arguments for malicious usage from Azure using the 'Azure User Investigation' sub-playbook.
+
+Verdict Handling:
+- Handles malicious alerts by initiating appropriate response actions, including blocking malicious IP addresses and revoking or clearing user's sessions.
+- Handles non-malicious alerts identified during the investigation.
+
+The playbook is used as a sub-playbook in ‘Cortex XDR Alerts Handling v2’.
+ (Available from Cortex XSOAR 6.10.0).
+##### Cortex XDR - First SSO Access - Set Verdict
+Deprecated. Use `Cortex XDR - Identity Analytics` instead.
+##### Cortex XDR - First SSO Access
+Deprecated. Use `Cortex XDR - Identity Analytics` instead.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_19.md b/Packs/CortexXDR/ReleaseNotes/6_1_19.md
new file mode 100644
index 000000000000..141124a12071
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_19.md
@@ -0,0 +1,6 @@
+#### Integrations
+
+##### Cortex XDR - IOC
+
+- Added a functionality to display XSOAR indicator link in XDR when **indicator_link** is received as the comment.
+- Updated the Docker image to: *demisto/python3:3.10.13.89009*.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_20.md b/Packs/CortexXDR/ReleaseNotes/6_1_20.md
new file mode 100644
index 000000000000..5f714f1481f0
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_20.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+Added support for mirrored flexible close-reason mapping from Cortex XSOAR > Cortex XDR and vice-versa.
\ No newline at end of file
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_21.md b/Packs/CortexXDR/ReleaseNotes/6_1_21.md
new file mode 100644
index 000000000000..5267c27fb14b
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_21.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+Improved implementation.
\ No newline at end of file
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_22.md b/Packs/CortexXDR/ReleaseNotes/6_1_22.md
new file mode 100644
index 000000000000..1e08ba8c6baf
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_22.md
@@ -0,0 +1,33 @@
+
+#### Layouts
+
+##### Cortex XDR Incident
+
+- Added a dedicated tab for the "Large Upload" alert handling.
+
+#### Playbooks
+
+##### New: Cortex XDR - Large Upload
+
+- The playbook investigates Cortex XDR incidents involving large upload alerts. The playbook is designed to run as a sub-playbook of ‘Cortex XDR Alerts Handling v2’.
+
+The playbook consists of the following procedures:
+- Searches for similar previous incidents that were closed as false positives.
+- Enrichment and investigation of the initiator and destination hostname and IP address.
+- Enrichment and investigation of the initiator user, process, file, or command if it exists.
+- Detection of related indicators and analysis of the relationship between the detected indicators.
+- Utilize the detected indicators to conduct threat hunting.
+- Blocks detected malicious indicators.
+- Endpoint isolation.
+
+This playbook supports the following Cortex XDR alert names:
+- Large Upload (Generic)
+- Large Upload (SMTP)
+- Large Upload (FTP)
+- Large Upload (HTTPS) (Available from Cortex XSOAR 6.10.0).
+##### Cortex XDR Alerts Handling v2
+
+- Added the 'Cortex XDR - Large Upload' sub-playbook.
+##### Cortex XDR - Search and Compare Process Executions - XDR Alerts
+
+- Updated the playbook description.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_23.md b/Packs/CortexXDR/ReleaseNotes/6_1_23.md
new file mode 100644
index 000000000000..f97132bb7a34
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_23.md
@@ -0,0 +1,14 @@
+
+#### Playbooks
+
+##### Cortex XDR incident handling v3
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Cortex XDR - Port Scan - Adjusted
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
+
+##### Cortex XDR Lite - Incident Handling
+
+Updated the input 'InternalRange' to use the 'PrivateIPs' list.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_24.md b/Packs/CortexXDR/ReleaseNotes/6_1_24.md
new file mode 100644
index 000000000000..e3a050be65b4
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_24.md
@@ -0,0 +1,10 @@
+
+#### Playbooks
+
+##### Cortex XDR incident handling v3
+
+Updated the conditional task that determines whether to continue with the investigation and response to any unhandled alerts if they are found.
+
+##### Cortex XDR Alerts Handling v2
+
+Added a new task to set context key for unhandled alerts.
diff --git a/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Endpoint_Investigation.yml b/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Endpoint_Investigation.yml
index a626f4321beb..d7605a42e700 100644
--- a/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Endpoint_Investigation.yml
+++ b/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Endpoint_Investigation.yml
@@ -111,7 +111,7 @@ tasks:
{
"position": {
"x": 2170,
- "y": -270
+ "y": -80
}
}
note: false
@@ -154,7 +154,7 @@ tasks:
{
"position": {
"x": 1765,
- "y": -270
+ "y": -80
}
}
note: false
@@ -196,7 +196,7 @@ tasks:
{
"position": {
"x": 3040,
- "y": -270
+ "y": -80
}
}
note: false
@@ -240,7 +240,7 @@ tasks:
{
"position": {
"x": 2605,
- "y": -270
+ "y": -80
}
}
note: false
@@ -317,7 +317,7 @@ tasks:
{
"position": {
"x": 1765,
- "y": -420
+ "y": -225
}
}
note: false
@@ -348,7 +348,7 @@ tasks:
{
"position": {
"x": 1360,
- "y": -80
+ "y": 110
}
}
note: false
@@ -380,7 +380,7 @@ tasks:
{
"position": {
"x": 2605,
- "y": -25
+ "y": 165
}
}
note: false
@@ -412,7 +412,7 @@ tasks:
{
"position": {
"x": 3040,
- "y": -25
+ "y": 165
}
}
note: false
@@ -444,7 +444,7 @@ tasks:
{
"position": {
"x": 1770,
- "y": -25
+ "y": 165
}
}
note: false
@@ -476,7 +476,7 @@ tasks:
{
"position": {
"x": 2170,
- "y": -25
+ "y": 165
}
}
note: false
@@ -843,7 +843,7 @@ tasks:
{
"position": {
"x": 3440,
- "y": -270
+ "y": -80
}
}
note: false
@@ -875,7 +875,7 @@ tasks:
{
"position": {
"x": 3440,
- "y": -25
+ "y": 165
}
}
note: false
@@ -917,7 +917,7 @@ tasks:
{
"position": {
"x": 3840,
- "y": -270
+ "y": -80
}
}
note: false
@@ -949,7 +949,7 @@ tasks:
{
"position": {
"x": 3840,
- "y": -25
+ "y": 165
}
}
note: false
@@ -991,7 +991,7 @@ tasks:
{
"position": {
"x": 4240,
- "y": -270
+ "y": -80
}
}
note: false
@@ -1023,7 +1023,7 @@ tasks:
{
"position": {
"x": 4240,
- "y": -25
+ "y": 165
}
}
note: false
@@ -1065,7 +1065,7 @@ tasks:
{
"position": {
"x": 4640,
- "y": -270
+ "y": -80
}
}
note: false
@@ -1097,7 +1097,7 @@ tasks:
{
"position": {
"x": 4640,
- "y": -25
+ "y": 165
}
}
note: false
@@ -1139,7 +1139,7 @@ tasks:
{
"position": {
"x": 5040,
- "y": -270
+ "y": -80
}
}
note: false
@@ -1171,7 +1171,7 @@ tasks:
{
"position": {
"x": 5040,
- "y": -25
+ "y": 165
}
}
note: false
@@ -1213,7 +1213,7 @@ tasks:
{
"position": {
"x": 5440,
- "y": -270
+ "y": -80
}
}
note: false
@@ -1245,7 +1245,7 @@ tasks:
{
"position": {
"x": 5440,
- "y": -25
+ "y": 165
}
}
note: false
@@ -1287,7 +1287,7 @@ tasks:
{
"position": {
"x": 5840,
- "y": -270
+ "y": -80
}
}
note: false
@@ -1319,7 +1319,7 @@ tasks:
{
"position": {
"x": 5840,
- "y": -25
+ "y": 165
}
}
note: false
@@ -1361,7 +1361,7 @@ tasks:
{
"position": {
"x": 6250,
- "y": -270
+ "y": -80
}
}
note: false
@@ -1393,7 +1393,7 @@ tasks:
{
"position": {
"x": 6250,
- "y": -25
+ "y": 165
}
}
note: false
@@ -1435,7 +1435,7 @@ tasks:
{
"position": {
"x": 6660,
- "y": -270
+ "y": -80
}
}
note: false
@@ -1467,7 +1467,7 @@ tasks:
{
"position": {
"x": 6660,
- "y": -25
+ "y": 165
}
}
note: false
@@ -1509,7 +1509,7 @@ tasks:
{
"position": {
"x": 7060,
- "y": -270
+ "y": -80
}
}
note: false
@@ -1541,7 +1541,7 @@ tasks:
{
"position": {
"x": 7060,
- "y": -25
+ "y": 165
}
}
note: false
@@ -1849,7 +1849,7 @@ tasks:
description: ''
nexttasks:
'#none#':
- - "112"
+ - "366"
separatecontext: false
continueonerrortype: ""
view: |-
@@ -3049,7 +3049,47 @@ tasks:
{
"position": {
"x": 540,
- "y": 150
+ "y": 250
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "366":
+ id: "366"
+ taskid: a6713fd8-827f-4339-845b-def430819831
+ type: condition
+ task:
+ id: a6713fd8-827f-4339-845b-def430819831
+ version: -1
+ name: Is the PaloAltoNetworksXDR Alert key defined?
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "113"
+ "yes":
+ - "112"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Alert
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1765,
+ "y": -420
}
}
note: false
@@ -3087,6 +3127,7 @@ view: |-
"359_338_Verified": 0.1,
"361_338_Verified": 0.1,
"363_338_Verified": 0.1,
+ "366_112_yes": 0.55,
"46_113_Verified": 0.1,
"73_113_Verified": 0.3,
"73_116_#default#": 0.7,
@@ -3095,7 +3136,7 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 1680,
+ "height": 1780,
"width": 13370,
"x": -5930,
"y": -1465
diff --git a/Packs/CortexXDR/doc_files/Cortex_XDR_-_Identity_Analytics.png b/Packs/CortexXDR/doc_files/Cortex_XDR_-_Identity_Analytics.png
new file mode 100644
index 000000000000..356af7712c96
Binary files /dev/null and b/Packs/CortexXDR/doc_files/Cortex_XDR_-_Identity_Analytics.png differ
diff --git a/Packs/CortexXDR/doc_files/Cortex_XDR_-_Large_Upload.png b/Packs/CortexXDR/doc_files/Cortex_XDR_-_Large_Upload.png
new file mode 100644
index 000000000000..c5688f9f813f
Binary files /dev/null and b/Packs/CortexXDR/doc_files/Cortex_XDR_-_Large_Upload.png differ
diff --git a/Packs/CortexXDR/doc_files/Cortex_XDR_Alerts_Handling_v2.png b/Packs/CortexXDR/doc_files/Cortex_XDR_Alerts_Handling_v2.png
index dc081b4e760f..c39c3ef84187 100644
Binary files a/Packs/CortexXDR/doc_files/Cortex_XDR_Alerts_Handling_v2.png and b/Packs/CortexXDR/doc_files/Cortex_XDR_Alerts_Handling_v2.png differ
diff --git a/Packs/CortexXDR/pack_metadata.json b/Packs/CortexXDR/pack_metadata.json
index eba560837ef6..6efe702e5898 100644
--- a/Packs/CortexXDR/pack_metadata.json
+++ b/Packs/CortexXDR/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex XDR by Palo Alto Networks",
"description": "Automates Cortex XDR incident response, and includes custom Cortex XDR incident views and layouts to aid analyst investigations.",
"support": "xsoar",
- "currentVersion": "6.1.17",
+ "currentVersion": "6.1.24",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
index dab56bcfb32f..a552d85e7e74 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
@@ -1,5 +1,7 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
+
+
from typing import Any, cast
import urllib3
@@ -7,9 +9,8 @@
# Disable insecure warnings
urllib3.disable_warnings()
-DEFAULT_SEARCH_LIMIT = 100
+DEFAULT_SEARCH_LIMIT = int(demisto.params().get('search_limit', 100))
MAX_ALERTS = 100 # max alerts per fetch
-ONE_HOUR = 3600
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
V1_URL_SUFFIX = "/public_api/v1"
V2_URL_SUFFIX = "/public_api/v2"
@@ -1290,11 +1291,9 @@ def fetch_incidents(client: Client, max_fetch: int, last_run: dict[str, int],
# Handle first time fetch
last_fetch = first_fetch_time if last_fetch is None else int(last_fetch)
-
latest_created_time = cast(int, last_fetch)
- # because some values are not populated immediately at alert creation time,
- # we will add an additional offset to increase the likelihood that these are available
- latest_created_time = latest_created_time + ONE_HOUR
+
+ demisto.debug(f"CortexXpanse - last fetched alert timestamp: {str(last_fetch)}")
incidents = []
@@ -1330,6 +1329,10 @@ def fetch_incidents(client: Client, max_fetch: int, last_run: dict[str, int],
latest_created_time = incident_created_time
next_run = {'last_fetch': latest_created_time}
+
+ demisto.debug(f"CortexXpanse - Number of incidents: {len(incidents)}")
+ demisto.debug(f"CortexXpanse - Next run after incidents fetching: : {next_run}")
+
return next_run, incidents
@@ -1416,6 +1419,14 @@ def main() -> None:
headers=headers,
proxy=proxy)
+ # To debug integration instance configuration.
+ integration_context = demisto.getIntegrationContext()
+ if 'xpanse_integration_severity' in integration_context:
+ xpanse_integration_severity = integration_context.get('xpanse_integration_severity')
+ if xpanse_integration_severity != severity:
+ demisto.setIntegrationContext({"xpanse_integration_severity": severity})
+ demisto.debug(demisto.debug(f"CortexXpanse - Integration Severity: {severity}"))
+
commands = {
'asm-list-external-service': list_external_service_command,
'asm-get-external-service': get_external_service_command,
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
index 12f8ac5add35..8c4d92a130f6 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
@@ -56,6 +56,7 @@ configuration:
- critical
- informational
type: 16
+ required: false
- display: Alert Statuses to fetch
name: status
type: 16
@@ -89,6 +90,12 @@ configuration:
- F - Reliability cannot be judged
required: false
type: 15
+- additionalinfo: Maximum amount of search results for listing commands.
+ defaultvalue: '100'
+ display: Search Limit
+ name: search_limit
+ required: false
+ type: 0
description: Integration to pull assets and other ASM related information.
display: Cortex Xpanse
name: Cortex Xpanse
@@ -847,7 +854,7 @@ script:
- contextPath: ASM.ExternalWebsite
description: A list of the websites results assets.
description: Get external websites assets.
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.89009
isFetchSamples: true
isfetch: true
script: ''
diff --git a/Packs/CortexXpanse/ReleaseNotes/1_0_19.md b/Packs/CortexXpanse/ReleaseNotes/1_0_19.md
new file mode 100644
index 000000000000..589524adfc01
--- /dev/null
+++ b/Packs/CortexXpanse/ReleaseNotes/1_0_19.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Cortex Xpanse
+Updated the fetch logic to no longer contain a one hour offset.
diff --git a/Packs/CortexXpanse/ReleaseNotes/1_0_20.md b/Packs/CortexXpanse/ReleaseNotes/1_0_20.md
new file mode 100644
index 000000000000..0e83c591c9b5
--- /dev/null
+++ b/Packs/CortexXpanse/ReleaseNotes/1_0_20.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Cortex Xpanse
+
+- Added the *Search Limit* parameter to set a specific default search limit for commands returning a list of objects. For example: ***asm-list-external-service***, ***asm-list-external-ip-address-range***, ***asm-list-asset-internet-exposure*** and ***asm-list-attack-surface-rule*** commands.
diff --git a/Packs/CortexXpanse/pack_metadata.json b/Packs/CortexXpanse/pack_metadata.json
index 4b1bf85a265a..dc34c5e21480 100644
--- a/Packs/CortexXpanse/pack_metadata.json
+++ b/Packs/CortexXpanse/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex Xpanse",
"description": "Content for working with Attack Surface Management (ASM).",
"support": "xsoar",
- "currentVersion": "1.0.18",
+ "currentVersion": "1.0.20",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CounterTack/Integrations/CounterTack/CounterTack.yml b/Packs/CounterTack/Integrations/CounterTack/CounterTack.yml
index 41c497d843a2..4d93f3613e6d 100644
--- a/Packs/CounterTack/Integrations/CounterTack/CounterTack.yml
+++ b/Packs/CounterTack/Integrations/CounterTack/CounterTack.yml
@@ -1145,7 +1145,7 @@ script:
script: '-'
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.13.86272
tests:
- no tests
fromversion: 5.0.0
diff --git a/Packs/CounterTack/ReleaseNotes/1_0_9.md b/Packs/CounterTack/ReleaseNotes/1_0_9.md
new file mode 100644
index 000000000000..18619bce4e1c
--- /dev/null
+++ b/Packs/CounterTack/ReleaseNotes/1_0_9.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CounterTack
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/CounterTack/pack_metadata.json b/Packs/CounterTack/pack_metadata.json
index 902b7561c756..d5e62aa6af2a 100644
--- a/Packs/CounterTack/pack_metadata.json
+++ b/Packs/CounterTack/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CounterTack",
"description": "CounterTack empowers endpoint security teams to assure endpoint protection for Identifying Cyber Threats. Integrating a predictive endpoint protection platform",
"support": "xsoar",
- "currentVersion": "1.0.8",
+ "currentVersion": "1.0.9",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Incident_Classifier.json b/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Incident_Classifier.json
index 1f17e333b2be..d88345829701 100644
--- a/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Incident_Classifier.json
+++ b/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Incident_Classifier.json
@@ -10,7 +10,8 @@
"incident": "CrowdStrike Falcon Incident",
"IDP detection": "CrowdStrike Falcon IDP Detection",
"iom_configurations": "CrowdStrike Falcon IOM Event",
- "ioa_events": "CrowdStrike Falcon IOA Event"
+ "ioa_events": "CrowdStrike Falcon IOA Event",
+ "MOBILE detection": "CrowdStrike Falcon Mobile Detection"
},
"transformer": {
"complex": null,
diff --git a/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper.json b/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper.json
index 733e100895d1..ae393424db2a 100644
--- a/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper.json
+++ b/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper.json
@@ -547,6 +547,116 @@
}
}
},
+ "CrowdStrike Falcon Mobile Detection": {
+ "dontMapEventToLabels": true,
+ "internalMapping": {
+ "Agent ID": {
+ "simple": "agent_id"
+ },
+ "State": {
+ "simple": "status"
+ },
+ "CrowdStrike Falcon Crawled Timestamp": {
+ "simple": "crawled_timestamp"
+ },
+ "CrowdStrike Falcon Detection Type": {
+ "simple": "type"
+ },
+ "CrowdStrike Falcon Firmware Build Fingerprint": {
+ "simple": "firmware_build_fingerprint"
+ },
+ "CrowdStrike Falcon Firmware Build Time": {
+ "simple": "firmware_build_time"
+ },
+ "Behaviour Objective": {
+ "simple": "objective"
+ },
+ "Behaviour Scenario": {
+ "simple": "scenario"
+ },
+ "Behaviour Tactic": {
+ "simple": "tactic"
+ },
+ "CrowdStrike Falcon Mobile Manufacturer": {
+ "simple": "mobile_manufacturer"
+ },
+ "CrowdStrike Falcon Mobile Product": {
+ "simple": "mobile_product"
+ },
+ "CrowdStrike Falcon Mobile platform version": {
+ "simple": "platform_version"
+ },
+ "CrowdStrike Falcon Security patch level": {
+ "simple": "security_patch_level"
+ },
+ "Description": {
+ "simple": "description"
+ },
+ "Device Name": {
+ "simple": "computer_name"
+ },
+ "External Confidence": {
+ "simple": "confidence"
+ },
+ "Mobile Device Model": {
+ "simple": "mobile_model"
+ },
+ "OS": {
+ "simple": "platform"
+ },
+ "OS Version": {
+ "simple": "os_version"
+ },
+ "Tactic ID": {
+ "simple": "tactic_id"
+ },
+ "Technique": {
+ "simple": "technique"
+ },
+ "Technique ID": {
+ "simple": "tactic_id"
+ },
+ "Threat Name": {
+ "simple": "name"
+ },
+ "occurred": {
+ "simple": "created_timestamp"
+ },
+ "severity": {
+ "simple": "severity"
+ },
+ "Display Name": {
+ "simple": "display_name"
+ },
+ "Email": {
+ "simple": "enrollment_email"
+ },
+ "Detection ID": {
+ "simple": "mobile_detection_id"
+ },
+ "Vendor Product": {
+ "simple": "product"
+ },
+ "Last Update Time": {
+ "simple": "updated_timestamp"
+ },
+ "dbotMirrorDirection": {
+ "simple": "mirror_direction"
+ },
+ "dbotMirrorId": {
+ "simple": "composite_id"
+ },
+ "dbotMirrorInstance": {
+ "simple": "mirror_instance"
+ },
+ "IncomingMirrorError": {
+ "simple": "in_mirror_error"
+ },
+ "name": {
+ "simple": "composite_id"
+ }
+ }
+ },
"dbot_classification_incident_type_all": {
"dontMapEventToLabels": false,
"internalMapping": {
diff --git a/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper_6.5.json b/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper_6.5.json
index 0e97b8e11b48..76652aad3fc1 100644
--- a/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper_6.5.json
+++ b/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper_6.5.json
@@ -3,6 +3,116 @@
"feed": false,
"id": "CrowdStrike Falcon-Mapper",
"mapping": {
+ "CrowdStrike Falcon Mobile Detection": {
+ "dontMapEventToLabels": true,
+ "internalMapping": {
+ "Agent ID": {
+ "simple": "agent_id"
+ },
+ "State": {
+ "simple": "status"
+ },
+ "CrowdStrike Falcon Crawled Timestamp": {
+ "simple": "crawled_timestamp"
+ },
+ "CrowdStrike Falcon Detection Type": {
+ "simple": "type"
+ },
+ "CrowdStrike Falcon Firmware Build Fingerprint": {
+ "simple": "firmware_build_fingerprint"
+ },
+ "CrowdStrike Falcon Firmware Build Time": {
+ "simple": "firmware_build_time"
+ },
+ "Behaviour Objective": {
+ "simple": "objective"
+ },
+ "Behaviour Scenario": {
+ "simple": "scenario"
+ },
+ "Behaviour Tactic": {
+ "simple": "tactic"
+ },
+ "CrowdStrike Falcon Mobile Manufacturer": {
+ "simple": "mobile_manufacturer"
+ },
+ "CrowdStrike Falcon Mobile Product": {
+ "simple": "mobile_product"
+ },
+ "CrowdStrike Falcon Mobile platform version": {
+ "simple": "platform_version"
+ },
+ "CrowdStrike Falcon Security patch level": {
+ "simple": "security_patch_level"
+ },
+ "Description": {
+ "simple": "description"
+ },
+ "Device Name": {
+ "simple": "computer_name"
+ },
+ "External Confidence": {
+ "simple": "confidence"
+ },
+ "Mobile Device Model": {
+ "simple": "mobile_model"
+ },
+ "OS": {
+ "simple": "platform"
+ },
+ "OS Version": {
+ "simple": "os_version"
+ },
+ "Tactic ID": {
+ "simple": "tactic_id"
+ },
+ "Technique": {
+ "simple": "technique"
+ },
+ "Technique ID": {
+ "simple": "tactic_id"
+ },
+ "Threat Name": {
+ "simple": "name"
+ },
+ "occurred": {
+ "simple": "created_timestamp"
+ },
+ "severity": {
+ "simple": "severity"
+ },
+ "Display Name": {
+ "simple": "display_name"
+ },
+ "Email": {
+ "simple": "enrollment_email"
+ },
+ "Detection ID": {
+ "simple": "mobile_detection_id"
+ },
+ "Vendor Product": {
+ "simple": "product"
+ },
+ "Last Update Time": {
+ "simple": "updated_timestamp"
+ },
+ "dbotMirrorDirection": {
+ "simple": "mirror_direction"
+ },
+ "dbotMirrorId": {
+ "simple": "composite_id"
+ },
+ "dbotMirrorInstance": {
+ "simple": "mirror_instance"
+ },
+ "IncomingMirrorError": {
+ "simple": "in_mirror_error"
+ },
+ "name": {
+ "simple": "mobile_detection_id"
+ }
+ }
+ },
"CrowdStrike Falcon IOA Event": {
"dontMapEventToLabels": false,
"internalMapping": {
diff --git a/Packs/CrowdStrikeFalcon/Classifiers/classifier-mapper-outgoing-CrowdStrike_Falcon.json b/Packs/CrowdStrikeFalcon/Classifiers/classifier-mapper-outgoing-CrowdStrike_Falcon.json
index a4e879562aff..26f0fd0449fb 100644
--- a/Packs/CrowdStrikeFalcon/Classifiers/classifier-mapper-outgoing-CrowdStrike_Falcon.json
+++ b/Packs/CrowdStrikeFalcon/Classifiers/classifier-mapper-outgoing-CrowdStrike_Falcon.json
@@ -19,6 +19,14 @@
}
}
},
+ "CrowdStrike Falcon Mobile Detection": {
+ "dontMapEventToLabels": true,
+ "internalMapping": {
+ "status": {
+ "simple": "state"
+ }
+ }
+ },
"CrowdStrike Falcon Incident": {
"dontMapEventToLabels": true,
"internalMapping": {
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Objective.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Objective.json
index c2f99d44128e..a15dec680e16 100644
--- a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Objective.json
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Objective.json
@@ -2,7 +2,8 @@
"associatedToAll": false,
"associatedTypes": [
"CrowdStrike Falcon Detection",
- "CrowdStrike Falcon IDP Detection"
+ "CrowdStrike Falcon IDP Detection",
+ "CrowdStrike Falcon Mobile Detection"
],
"breachScript": "",
"caseInsensitive": true,
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Scenario.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Scenario.json
index 08cf836bf9a5..6aabb2f2f260 100644
--- a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Scenario.json
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Scenario.json
@@ -2,7 +2,8 @@
"associatedToAll": false,
"associatedTypes": [
"CrowdStrike Falcon Detection",
- "CrowdStrike Falcon IDP Detection"
+ "CrowdStrike Falcon IDP Detection",
+ "CrowdStrike Falcon Mobile Detection"
],
"breachScript": "",
"caseInsensitive": true,
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Tactic.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Tactic.json
index 60e08e5f0eac..9cd865aff508 100644
--- a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Tactic.json
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Behaviour_Tactic.json
@@ -2,7 +2,8 @@
"associatedToAll": false,
"associatedTypes": [
"CrowdStrike Falcon Detection",
- "CrowdStrike Falcon IDP Detection"
+ "CrowdStrike Falcon IDP Detection",
+ "CrowdStrike Falcon Mobile Detection"
],
"breachScript": "",
"caseInsensitive": true,
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Crawled_Timestamp.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Crawled_Timestamp.json
index 56b7b0de848b..63c28204a34c 100644
--- a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Crawled_Timestamp.json
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Crawled_Timestamp.json
@@ -19,7 +19,8 @@
"hidden": false,
"openEnded": false,
"associatedTypes": [
- "CrowdStrike Falcon IDP Detection"
+ "CrowdStrike Falcon IDP Detection",
+ "CrowdStrike Falcon Mobile Detection"
],
"associatedToAll": false,
"unmapped": false,
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Detection_Type.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Detection_Type.json
index bd9c4f313b98..c850fec61060 100644
--- a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Detection_Type.json
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Detection_Type.json
@@ -19,7 +19,8 @@
"hidden": false,
"openEnded": false,
"associatedTypes": [
- "CrowdStrike Falcon IDP Detection"
+ "CrowdStrike Falcon IDP Detection",
+ "CrowdStrike Falcon Mobile Detection"
],
"associatedToAll": false,
"unmapped": false,
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Firmware_Build_Time.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Firmware_Build_Time.json
new file mode 100644
index 000000000000..01b1ec653ca8
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Firmware_Build_Time.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_crowdstrikefalconfirmwarebuildtime",
+ "version": -1,
+ "modified": "2024-02-25T15:47:43.680520106Z",
+ "name": "CrowdStrike Falcon Firmware Build Time",
+ "ownerOnly": false,
+ "cliName": "crowdstrikefalconfirmwarebuildtime",
+ "type": "date",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "CrowdStrike Falcon Mobile Detection"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Firmware_build_Fingerprint.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Firmware_build_Fingerprint.json
new file mode 100644
index 000000000000..cc559f616828
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Firmware_build_Fingerprint.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_crowdstrikefalconfirmwarebuildfingerprint",
+ "version": -1,
+ "modified": "2024-02-25T15:46:11.027991681Z",
+ "name": "CrowdStrike Falcon Firmware Build Fingerprint",
+ "ownerOnly": false,
+ "cliName": "crowdstrikefalconfirmwarebuildfingerprint",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "CrowdStrike Falcon Mobile Detection"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Manufacturer.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Manufacturer.json
new file mode 100644
index 000000000000..d3ce05edb3f9
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Manufacturer.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_crowdstrikefalconmobilemanufacturer",
+ "version": -1,
+ "modified": "2024-02-25T15:49:57.121260285Z",
+ "name": "CrowdStrike Falcon Mobile Manufacturer",
+ "ownerOnly": false,
+ "cliName": "crowdstrikefalconmobilemanufacturer",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "CrowdStrike Falcon Mobile Detection"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Platform_Version.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Platform_Version.json
new file mode 100644
index 000000000000..e135c3d423d9
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Platform_Version.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_crowdstrikefalconmobileplatformversion",
+ "version": -1,
+ "modified": "2024-02-25T15:48:24.075185814Z",
+ "name": "CrowdStrike Falcon Mobile platform version",
+ "ownerOnly": false,
+ "cliName": "crowdstrikefalconmobileplatformversion",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "CrowdStrike Falcon Mobile Detection"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Product.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Product.json
new file mode 100644
index 000000000000..6f8b3e38d33b
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Mobile_Product.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_crowdstrikefalconmobileproduct",
+ "version": -1,
+ "modified": "2024-02-25T15:49:42.375129493Z",
+ "name": "CrowdStrike Falcon Mobile Product",
+ "ownerOnly": false,
+ "cliName": "crowdstrikefalconmobileproduct",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "CrowdStrike Falcon Mobile Detection"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Security_Patch_level.json b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Security_Patch_level.json
new file mode 100644
index 000000000000..9a6f11951af2
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/IncidentFields/incidentfield-Security_Patch_level.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_crowdstrikefalconsecuritypatchlevel",
+ "version": -1,
+ "modified": "2024-02-25T15:49:12.710595921Z",
+ "name": "CrowdStrike Falcon Security patch level",
+ "ownerOnly": false,
+ "cliName": "crowdstrikefalconsecuritypatchlevel",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "CrowdStrike Falcon Mobile Detection"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/IncidentTypes/incidenttype-CrowdStrikeFalconMobileDetection.json b/Packs/CrowdStrikeFalcon/IncidentTypes/incidenttype-CrowdStrikeFalconMobileDetection.json
new file mode 100644
index 000000000000..da8ac07c768a
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/IncidentTypes/incidenttype-CrowdStrikeFalconMobileDetection.json
@@ -0,0 +1,28 @@
+{
+ "id": "CrowdStrike Falcon Mobile Detection",
+ "version": -1,
+ "vcShouldIgnore": false,
+ "locked": false,
+ "name": "CrowdStrike Falcon Mobile Detection",
+ "prevName": "CrowdStrike Falcon Mobile Detection",
+ "color": "#E5CF7C",
+ "hours": 0,
+ "days": 0,
+ "weeks": 0,
+ "hoursR": 0,
+ "daysR": 0,
+ "weeksR": 0,
+ "system": false,
+ "readonly": false,
+ "default": false,
+ "autorun": false,
+ "disabled": false,
+ "reputationCalc": 0,
+ "onChangeRepAlg": 0,
+ "detached": false,
+ "extractSettings": {
+ "mode": "Specific",
+ "fieldCliNameToExtractSettings": {}
+ },
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
index ed70f3d33a39..0c9040978ca1 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
@@ -20,14 +20,18 @@
''' GLOBALS/PARAMS '''
INTEGRATION_NAME = 'CrowdStrike Falcon'
IDP_DETECTION = "IDP detection"
-CLIENT_ID = demisto.params().get('credentials', {}).get('identifier') or demisto.params().get('client_id')
-SECRET = demisto.params().get('credentials', {}).get('password') or demisto.params().get('secret')
+MOBILE_DETECTION = "MOBILE detection"
+IDP_DETECTION_FETCH_TYPE = "IDP Detection"
+MOBILE_DETECTION_FETCH_TYPE = "Mobile Detection"
+PARAMS = demisto.params()
+CLIENT_ID = PARAMS.get('credentials', {}).get('identifier') or PARAMS.get('client_id')
+SECRET = PARAMS.get('credentials', {}).get('password') or PARAMS.get('secret')
# Remove trailing slash to prevent wrong URL path to service
-SERVER = demisto.params()['url'].removesuffix('/')
+SERVER = PARAMS['url'].removesuffix('/')
# Should we use SSL
-USE_SSL = not demisto.params().get('insecure', False)
+USE_SSL = not PARAMS.get('insecure', False)
# How many time before the first fetch to retrieve incidents
-FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+FETCH_TIME = PARAMS.get('fetch_time', '3 days')
BYTE_CREDS = f'{CLIENT_ID}:{SECRET}'.encode()
# Headers to be sent in requests
HEADERS = {
@@ -37,9 +41,9 @@
}
# Note: True life time of token is actually 30 mins
TOKEN_LIFE_TIME = 28
-INCIDENTS_PER_FETCH = int(demisto.params().get('incidents_per_fetch', 15))
+INCIDENTS_PER_FETCH = int(PARAMS.get('incidents_per_fetch', 15))
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
-DETECTION_DATE_FORMAT = IDP_DATE_FORMAT = IOM_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+DETECTION_DATE_FORMAT = IOM_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
DEFAULT_TIMEOUT = 30
# Remove proxy if not set to true in params
handle_proxy()
@@ -228,7 +232,7 @@
''' MIRRORING DICTIONARIES & PARAMS '''
DETECTION_STATUS = {'new', 'in_progress', 'true_positive', 'false_positive', 'ignored', 'closed', 'reopened'}
-IDP_DETECTION_STATUS = {'new', 'in_progress', 'closed', 'reopened'}
+IDP_AND_MOBILE_DETECTION_STATUS = {'new', 'in_progress', 'closed', 'reopened'}
CS_FALCON_DETECTION_OUTGOING_ARGS = {'status': f'Updated detection status, one of {"/".join(DETECTION_STATUS)}'}
@@ -279,7 +283,7 @@
class IncidentType(Enum):
INCIDENT = 'inc'
DETECTION = 'ldt'
- IDP_DETECTION = ':ind:'
+ IDP_OR_MOBILE_DETECTION = ':ind:'
IOM_CONFIGURATIONS = 'iom_configurations'
IOA_EVENTS = 'ioa_events'
@@ -567,26 +571,28 @@ def incident_to_incident_context(incident):
return incident_context
-def idp_detection_to_incident_context(idp_detection):
+def detection_to_incident_context(detection, detection_type, start_time_key: str = 'start_time'):
"""
- Creates an incident context of an IDP detection.
+ Creates an incident context of an IDP/Mobile detection.
- :type idp_detection: ``dict``
- :param idp_detection: Single IDP detection object
+ :type detection: ``dict``
+ :param detection: Single detection object.
- :return: Incident context representation of an IDP detection.
- :rtype ``dict``
- """
- add_mirroring_fields(idp_detection)
- if status := idp_detection.get('status'):
- idp_detection['status'] = status
+ :return: Incident context representation of an IDP/Mobile detection.
+ :rtype ``dict``
+ """
+ add_mirroring_fields(detection)
incident_context = {
- 'name': f'IDP Detection ID: {idp_detection.get("composite_id")}',
- 'occurred': idp_detection.get('start_time'),
- 'last_updated': idp_detection.get('updated_timestamp'),
- 'rawJSON': json.dumps(idp_detection)
+ 'occurred': detection.get(start_time_key),
+ 'rawJSON': json.dumps(detection)
}
+ if detection_type == IDP_DETECTION_FETCH_TYPE:
+ incident_context['name'] = f'{detection_type} ID: {detection.get("composite_id")}'
+ incident_context['last_updated'] = detection.get('updated_timestamp')
+ elif detection_type == MOBILE_DETECTION_FETCH_TYPE:
+ incident_context['name'] = f'{detection_type} ID: {detection.get("mobile_detection_id")}'
+ incident_context['severity'] = detection.get('severity')
return incident_context
@@ -1397,7 +1403,7 @@ def get_incidents_ids(last_created_timestamp=None, filter_arg=None, offset: int
return response
-def get_idp_detections_ids(filter_arg=None, offset: int = 0, limit=INCIDENTS_PER_FETCH):
+def get_detections_ids(filter_arg=None, offset: int = 0, limit=INCIDENTS_PER_FETCH, product_type='idp'):
"""
Send a request to retrieve IDP detections IDs.
@@ -1420,7 +1426,7 @@ def get_idp_detections_ids(filter_arg=None, offset: int = 0, limit=INCIDENTS_PER
params['limit'] = limit
endpoint_url = "/alerts/queries/alerts/v1"
response = http_request('GET', endpoint_url, params)
- demisto.debug(f"CrowdStrikeFalconMsg: Getting idp detections from {endpoint_url} with {params=}. {response=}")
+ demisto.debug(f"CrowdStrikeFalconMsg: Getting {product_type} detections from {endpoint_url} with {params=}. {response=}")
return response
@@ -1435,9 +1441,9 @@ def get_incidents_entities(incidents_ids: list):
return response
-def get_idp_detection_entities(incidents_ids: list):
+def get_detection_entities(incidents_ids: list):
"""
- Send a request to retrieve IDP detection entities.
+ Send a request to retrieve IDP and mobile detection entities.
:type incidents_ids: ``list``
:param incidents_ids: The list of ids to search their entities.
@@ -1763,6 +1769,8 @@ def search_device(filter_operator='AND'):
'site_name': str(args.get('site_name', '')).split(','),
'local_ip': str(args.get('ip', '')).split(',')
}
+ limit = int(args.get('limit', 50))
+ offset = int(args.get('offset', 0))
url_filter = '{}'.format(str(args.get('filter', '')))
op = ',' if filter_operator == 'OR' else '+'
# In Falcon Query Language, '+' stands for AND and ',' for OR
@@ -1783,7 +1791,7 @@ def search_device(filter_operator='AND'):
# All args should be a list. this is a fallback
url_filter = "{url_filter}{operator}{inp_arg}:'{arg_val}'".format(url_filter=url_filter, operator=op,
inp_arg=k, arg_val=arg)
- raw_res = http_request('GET', '/devices/queries/devices/v1', params={'filter': url_filter})
+ raw_res = http_request('GET', '/devices/queries/devices/v1', params={'filter': url_filter, 'limit': limit, 'offset': offset})
device_ids = raw_res.get('resources')
if not device_ids:
return None
@@ -1842,9 +1850,9 @@ def resolve_detection(ids, status, assigned_to_uuid, show_in_ui, comment):
return http_request('PATCH', '/detects/entities/detects/v2', data=data)
-def resolve_idp_detection(ids, status):
+def resolve_idp_or_mobile_detection(ids, status):
"""
- Send a request to update IDP detection status.
+ Send a request to update IDP/Mobile detection status.
:type ids: ``list``
:param ids: The list of ids to update.
:type status: ``str``
@@ -1992,9 +2000,9 @@ def update_detection_request(ids: list[str], status: str) -> dict:
return resolve_detection(ids=ids, status=status, assigned_to_uuid=None, show_in_ui=None, comment=None)
-def update_idp_detection_request(ids: list[str], status: str) -> dict:
+def update_idp_or_mobile_detection_request(ids: list[str], status: str) -> dict:
"""
- Manage the status to send to update to for IDP detections.
+ Manage the status to send to update to for IDP/Mobile detections.
:type ids: ``list``
:param ids: The list of ids to update.
:type status: ``str``
@@ -2002,10 +2010,10 @@ def update_idp_detection_request(ids: list[str], status: str) -> dict:
:return: The response.
:rtype ``dict``
"""
- if status not in IDP_DETECTION_STATUS:
+ if status not in IDP_AND_MOBILE_DETECTION_STATUS:
raise DemistoException(f'CrowdStrike Falcon Error: '
- f'Status given is {status} and it is not in {IDP_DETECTION_STATUS}')
- return resolve_idp_detection(ids=ids, status=status)
+ f'Status given is {status} and it is not in {IDP_AND_MOBILE_DETECTION_STATUS}')
+ return resolve_idp_or_mobile_detection(ids=ids, status=status)
def list_host_groups(filter: str | None, limit: str | None, offset: str | None) -> dict:
@@ -2194,11 +2202,12 @@ def get_remote_data_command(args: dict[str, Any]):
demisto.debug(f'Update detection {remote_incident_id} with fields: {updated_object}')
set_xsoar_detection_entries(updated_object, entries, remote_incident_id) # sets in place
- elif incident_type == IncidentType.IDP_DETECTION:
- mirrored_data, updated_object = get_remote_idp_detection_data(remote_incident_id)
+ elif incident_type == IncidentType.IDP_OR_MOBILE_DETECTION:
+ mirrored_data, updated_object, detection_type = get_remote_idp_or_mobile_detection_data(remote_incident_id)
if updated_object:
- demisto.debug(f'Update IDP detection {remote_incident_id} with fields: {updated_object}')
- set_xsoar_idp_detection_entries(updated_object, entries, remote_incident_id) # sets in place
+ demisto.debug(f'Update {detection_type} detection {remote_incident_id} with fields: {updated_object}')
+ set_xsoar_idp_or_mobile_detection_entries(
+ updated_object, entries, remote_incident_id, detection_type) # sets in place
else:
# this is here as prints can disrupt mirroring
@@ -2224,8 +2233,8 @@ def find_incident_type(remote_incident_id: str):
return IncidentType.INCIDENT
if remote_incident_id[0:3] == IncidentType.DETECTION.value:
return IncidentType.DETECTION
- if IncidentType.IDP_DETECTION.value in remote_incident_id:
- return IncidentType.IDP_DETECTION
+ if IncidentType.IDP_OR_MOBILE_DETECTION.value in remote_incident_id:
+ return IncidentType.IDP_OR_MOBILE_DETECTION
return None
@@ -2262,27 +2271,32 @@ def get_remote_detection_data(remote_incident_id: str):
return mirrored_data, updated_object
-def get_remote_idp_detection_data(remote_incident_id):
+def get_remote_idp_or_mobile_detection_data(remote_incident_id):
"""
- Gets the relevant IDP detection entity from the remote system (CrowdStrike Falcon).
+ Gets the relevant IDP or Mobile detection entity from the remote system (CrowdStrike Falcon).
:type remote_incident_id: ``str``
:param remote_incident_id: The incident id to return its information.
- :return: The IDP detection entity.
+ :return: The IDP or Mobile detection entity.
:rtype ``dict``
:return: The object with the updated fields.
:rtype ``dict``
+ :return: The detection type (idp or mobile).
+ :rtype ``str``
"""
- mirrored_data_list = get_idp_detection_entities([remote_incident_id]).get('resources', []) # a list with one dict in it
+ mirrored_data_list = get_detection_entities([remote_incident_id]).get('resources', []) # a list with one dict in it
mirrored_data = mirrored_data_list[0]
-
- if 'status' in mirrored_data:
- mirrored_data['status'] = mirrored_data.get('status')
-
- updated_object: dict[str, Any] = {'incident_type': IDP_DETECTION}
+ detection_type = ''
+ updated_object: dict[str, Any] = {}
+ if 'idp' in mirrored_data['product']:
+ updated_object = {'incident_type': IDP_DETECTION}
+ detection_type = 'IDP'
+ if 'mobile' in mirrored_data['product']:
+ updated_object = {'incident_type': MOBILE_DETECTION}
+ detection_type = 'Mobile'
set_updated_object(updated_object, mirrored_data, ['status'])
- return mirrored_data, updated_object
+ return mirrored_data, updated_object, detection_type
def set_xsoar_incident_entries(updated_object: dict[str, Any], entries: list, remote_incident_id: str):
@@ -2301,7 +2315,8 @@ def set_xsoar_detection_entries(updated_object: dict[str, Any], entries: list, r
reopen_in_xsoar(entries, remote_detection_id, 'Detection')
-def set_xsoar_idp_detection_entries(updated_object: dict[str, Any], entries: list, remote_idp_detection_id: str):
+def set_xsoar_idp_or_mobile_detection_entries(updated_object: dict[str, Any], entries: list,
+ remote_idp_detection_id: str, incident_type_name: str):
"""
Send the updated object to the relevant status handler
@@ -2317,9 +2332,9 @@ def set_xsoar_idp_detection_entries(updated_object: dict[str, Any], entries: lis
"""
if demisto.params().get('close_incident'):
if updated_object.get('status') == 'closed':
- close_in_xsoar(entries, remote_idp_detection_id, IDP_DETECTION)
- elif updated_object.get('status') in (set(IDP_DETECTION_STATUS) - {'closed'}):
- reopen_in_xsoar(entries, remote_idp_detection_id, IDP_DETECTION)
+ close_in_xsoar(entries, remote_idp_detection_id, incident_type_name)
+ elif updated_object.get('status') in (set(IDP_AND_MOBILE_DETECTION_STATUS) - {'closed'}):
+ reopen_in_xsoar(entries, remote_idp_detection_id, incident_type_name)
def close_in_xsoar(entries: list, remote_incident_id: str, incident_type_name: str):
@@ -2403,9 +2418,14 @@ def get_modified_remote_data_command(args: dict[str, Any]):
if 'Detections' in fetch_types or "Endpoint Detection" in fetch_types:
raw_ids += get_fetch_detections(last_updated_timestamp=last_update_timestamp, has_limit=False).get('resources', [])
- if "IDP Detection" in fetch_types:
- raw_ids += get_idp_detections_ids(
- filter_arg=f"updated_timestamp:>'{last_update_utc.strftime(IDP_DATE_FORMAT)}'+product:'idp'"
+ if IDP_DETECTION_FETCH_TYPE in fetch_types:
+ raw_ids += get_detections_ids(
+ filter_arg=f"updated_timestamp:>'{last_update_utc.strftime(DETECTION_DATE_FORMAT)}'+product:'idp'"
+ ).get('resources', [])
+
+ if MOBILE_DETECTION_FETCH_TYPE in fetch_types:
+ raw_ids += get_detections_ids(
+ filter_arg=f"updated_timestamp:>'{last_update_utc.strftime(DETECTION_DATE_FORMAT)}'+product:'mobile'"
).get('resources', [])
modified_ids_to_mirror = list(map(str, raw_ids))
@@ -2443,10 +2463,10 @@ def update_remote_system_command(args: dict[str, Any]) -> str:
if result:
demisto.debug(f'Detection updated successfully. Result: {result}')
- elif incident_type == IncidentType.IDP_DETECTION:
- result = update_remote_idp_detection(delta, parsed_args.inc_status, remote_incident_id)
+ elif incident_type == IncidentType.IDP_OR_MOBILE_DETECTION:
+ result = update_remote_idp_or_mobile_detection(delta, parsed_args.inc_status, remote_incident_id)
if result:
- demisto.debug(f'IDP Detection updated successfully. Result: {result}')
+ demisto.debug(f'IDP/Mobile Detection updated successfully. Result: {result}')
else:
raise Exception(f'Executed update-remote-system command with undefined id: {remote_incident_id}')
@@ -2487,25 +2507,25 @@ def update_remote_detection(delta, inc_status: IncidentStatus, detection_id: str
return ''
-def update_remote_idp_detection(delta, inc_status: IncidentStatus, detection_id: str) -> str:
+def update_remote_idp_or_mobile_detection(delta, inc_status: IncidentStatus, detection_id: str) -> str:
"""
- Sends the request the request to update the relevant IDP detection entity.
+ Sends the request the request to update the relevant IDP/Mobile detection entity.
:type delta: ``dict``
:param delta: The modified fields.
:type inc_status: ``IncidentStatus``
- :param inc_status: The IDP detection status.
+ :param inc_status: The IDP/Mobile detection status.
:type detection_id: ``str``
- :param detection_id: The IDP detection ID to update.
+ :param detection_id: The IDP/Mobile detection ID to update.
"""
if inc_status == IncidentStatus.DONE and close_in_cs_falcon(delta):
- demisto.debug(f'Closing IDP detection with remote ID {detection_id} in remote system.')
- return str(update_idp_detection_request([detection_id], 'closed'))
+ demisto.debug(f'Closing IDP/Mobile detection with remote ID {detection_id} in remote system.')
+ return str(update_idp_or_mobile_detection_request([detection_id], 'closed'))
# status field in CS Falcon is mapped to State field in XSOAR
elif 'status' in delta:
demisto.debug(f'Detection with remote ID {detection_id} status will change to "{delta.get("status")}" in remote system.')
- return str(update_idp_detection_request([detection_id], delta.get('status')))
+ return str(update_idp_or_mobile_detection_request([detection_id], delta.get('status')))
return ''
@@ -2615,18 +2635,21 @@ def fetch_incidents():
idp_detections: list = []
iom_incidents: list[dict[str, Any]] = []
ioa_incidents: list[dict[str, Any]] = []
+ mobile_detections: list[dict[str, Any]] = []
last_run = demisto.getLastRun()
demisto.debug(f'CrowdStrikeFalconMsg: Current last run object is {last_run}')
if not last_run:
- last_run = [{}, {}, {}, {}, {}]
+ last_run = [{}, {}, {}, {}, {}, {}]
last_run = migrate_last_run(last_run)
current_fetch_info_detections: dict = last_run[0]
current_fetch_info_incidents: dict = last_run[1]
current_fetch_info_idp_detections: dict = {} if len(last_run) < 3 else last_run[2]
iom_last_run: dict = {} if len(last_run) < 4 else last_run[3]
ioa_last_run: dict = {} if len(last_run) < 5 else last_run[4]
- fetch_incidents_or_detections = demisto.params().get('fetch_incidents_or_detections', "")
- look_back = int(demisto.params().get('look_back') or 1)
+ current_fetch_info_mobile_detections: dict = {} if len(last_run) < 6 else last_run[5]
+ params = demisto.params()
+ fetch_incidents_or_detections = params.get('fetch_incidents_or_detections', "")
+ look_back = int(params.get('look_back') or 1)
fetch_limit = INCIDENTS_PER_FETCH
demisto.debug(f"CrowdstrikeFalconMsg: Starting fetch incidents with {fetch_incidents_or_detections}")
@@ -2639,7 +2662,7 @@ def fetch_incidents():
date_format=DETECTION_DATE_FORMAT)
fetch_limit = current_fetch_info_detections.get('limit') or INCIDENTS_PER_FETCH
incident_type = 'detection'
- fetch_query = demisto.params().get('fetch_query')
+ fetch_query = params.get('fetch_query')
if fetch_query:
fetch_query = f"created_timestamp:>'{start_fetch_time}'+{fetch_query}"
response = get_fetch_detections(filter_arg=fetch_query, limit=fetch_limit, offset=detections_offset)
@@ -2696,7 +2719,7 @@ def fetch_incidents():
incident_type = 'incident'
- fetch_query = demisto.params().get('incidents_fetch_query')
+ fetch_query = params.get('incidents_fetch_query')
if fetch_query:
fetch_query = f"start:>'{start_fetch_time}'+{fetch_query}"
@@ -2736,55 +2759,32 @@ def fetch_incidents():
new_offset=incidents_offset)
demisto.debug(f"CrowdstrikeFalconMsg: Ending fetch Incidents. Fetched {len(incidents)}")
- if "IDP Detection" in fetch_incidents_or_detections:
- idp_detections_offset: int = current_fetch_info_idp_detections.get('offset') or 0
-
- start_fetch_time, end_fetch_time = get_fetch_run_time_range(last_run=current_fetch_info_idp_detections,
- first_fetch=FETCH_TIME,
- look_back=look_back,
- date_format=IDP_DATE_FORMAT)
- fetch_limit = current_fetch_info_idp_detections.get('limit') or INCIDENTS_PER_FETCH
- fetch_query = demisto.params().get('idp_detections_fetch_query', "")
- filter = f"product:'idp'+created_timestamp:>'{start_fetch_time}'"
-
- if fetch_query:
- filter += f"+{fetch_query}"
- response = get_idp_detections_ids(filter_arg=filter, limit=fetch_limit, offset=idp_detections_offset)
- idp_detections_ids: list[dict] = demisto.get(response, "resources", [])
- total_idp_detections = demisto.get(response, "meta.pagination.total")
- idp_detections_offset = calculate_new_offset(idp_detections_offset, len(idp_detections_ids), total_idp_detections)
- if idp_detections_offset:
- demisto.debug(f"CrowdStrikeFalconMsg: The new idp detections offset is {idp_detections_offset}")
-
- if idp_detections_ids:
- raw_res = get_idp_detection_entities(idp_detections_ids)
- if "resources" in raw_res:
- full_detections = demisto.get(raw_res, "resources")
- for idp_detection in full_detections:
- idp_detection['incident_type'] = IDP_DETECTION
- idp_detection_to_context = idp_detection_to_incident_context(idp_detection)
- idp_detections.append(idp_detection_to_context)
-
- idp_detections = filter_incidents_by_duplicates_and_limit(incidents_res=idp_detections,
- last_run=current_fetch_info_idp_detections,
- fetch_limit=INCIDENTS_PER_FETCH, id_field='name')
-
- current_fetch_info_idp_detections = update_last_run_object(last_run=current_fetch_info_idp_detections,
- incidents=idp_detections,
- fetch_limit=fetch_limit,
- start_fetch_time=start_fetch_time,
- end_fetch_time=end_fetch_time,
- look_back=look_back,
- created_time_field='occurred',
- id_field='name',
- date_format=IDP_DATE_FORMAT,
- new_offset=idp_detections_offset)
- demisto.debug(f"CrowdstrikeFalconMsg: Ending fetch idp_detections. Fetched {len(idp_detections)}")
+ if IDP_DETECTION_FETCH_TYPE in fetch_incidents_or_detections:
+ idp_detections, current_fetch_info_idp_detections = fetch_idp_and_mobile_detections(
+ current_fetch_info_idp_detections,
+ look_back=look_back,
+ fetch_query=params.get(
+ 'idp_detections_fetch_query', ""),
+ detections_type=IDP_DETECTION,
+ product_type='idp',
+ detection_name_prefix=IDP_DETECTION_FETCH_TYPE,
+ start_time_key='start_time')
+
+ if MOBILE_DETECTION_FETCH_TYPE in fetch_incidents_or_detections:
+ mobile_detections, current_fetch_info_mobile_detections = fetch_idp_and_mobile_detections(
+ current_fetch_info_mobile_detections,
+ look_back=look_back,
+ fetch_query=params.get(
+ 'mobile_detections_fetch_query', ""),
+ detections_type=MOBILE_DETECTION,
+ product_type='mobile',
+ detection_name_prefix=MOBILE_DETECTION_FETCH_TYPE,
+ start_time_key='timestamp')
if 'Indicator of Misconfiguration' in fetch_incidents_or_detections:
demisto.debug('Fetching Indicator of Misconfiguration incidents')
demisto.debug(f'{iom_last_run=}')
- fetch_query = demisto.params().get('iom_fetch_query', '')
+ fetch_query = params.get('iom_fetch_query', '')
validate_iom_fetch_query(iom_fetch_query=fetch_query)
last_resource_ids, iom_next_token, last_scan_time, first_fetch_timestamp = get_current_fetch_data(
@@ -2818,7 +2818,7 @@ def fetch_incidents():
if 'Indicator of Attack' in fetch_incidents_or_detections:
demisto.debug('Fetching Indicator of Attack incidents')
demisto.debug(f'{ioa_last_run=}')
- fetch_query = demisto.params().get('ioa_fetch_query', '')
+ fetch_query = params.get('ioa_fetch_query', '')
validate_ioa_fetch_query(ioa_fetch_query=fetch_query)
last_fetch_event_ids, ioa_next_token, last_date_time_since, _ = get_current_fetch_data(
@@ -2848,8 +2848,71 @@ def fetch_incidents():
ioa_last_run = {'ioa_next_token': ioa_new_next_token, 'last_date_time_since': new_date_time_since,
'last_fetch_query': ioa_fetch_query, 'last_event_ids': ioa_event_ids or last_fetch_event_ids}
demisto.setLastRun([current_fetch_info_detections, current_fetch_info_incidents, current_fetch_info_idp_detections,
- iom_last_run, ioa_last_run])
- return incidents + detections + idp_detections + iom_incidents + ioa_incidents
+ iom_last_run, ioa_last_run, current_fetch_info_mobile_detections])
+ return incidents + detections + idp_detections + iom_incidents + ioa_incidents + mobile_detections
+
+
+def fetch_idp_and_mobile_detections(current_fetch_info: dict, look_back: int, product_type: str,
+ fetch_query: str, detections_type: str, detection_name_prefix: str,
+ start_time_key: str) -> tuple[List, dict]:
+ """The fetch logic for idp and mobile detections.
+
+ Args:
+ current_fetch_info (dict): The last run object.
+ look_back (int): The number of minutes to lookback.
+ product_type (str): The product_type, used for debug & query.
+ fetch_query (str): The user's query param.
+ detections_type (str): The detection type, used for debugging and context save.
+ detection_name_prefix (str): The name prefix for the fetched incidents.
+ start_time_key (str): The key to save as the incident occurred time.
+
+ Returns:
+ tuple[List, dict]: The list of the fetched incidents and the updated last object.
+ """
+ detections: List = []
+ offset: int = current_fetch_info.get('offset') or 0
+
+ start_fetch_time, end_fetch_time = get_fetch_run_time_range(last_run=current_fetch_info,
+ first_fetch=FETCH_TIME,
+ look_back=look_back,
+ date_format=DETECTION_DATE_FORMAT)
+ fetch_limit = current_fetch_info.get('limit') or INCIDENTS_PER_FETCH
+ filter = f"product:'{product_type}'+created_timestamp:>'{start_fetch_time}'"
+
+ if fetch_query:
+ filter += f"+{fetch_query}"
+ response = get_detections_ids(filter_arg=filter, limit=fetch_limit, offset=offset, product_type=product_type)
+ detections_ids: list[dict] = demisto.get(response, "resources", [])
+ total_detections = demisto.get(response, "meta.pagination.total")
+ offset = calculate_new_offset(offset, len(detections_ids), total_detections)
+ if offset:
+ demisto.debug(f"CrowdStrikeFalconMsg: The new {detections_type} offset is {offset}")
+
+ if detections_ids:
+ raw_res = get_detection_entities(detections_ids)
+ if "resources" in raw_res:
+ full_detections = demisto.get(raw_res, "resources")
+ for detection in full_detections:
+ detection['incident_type'] = detections_type
+ detection_to_context = detection_to_incident_context(detection, detection_name_prefix, start_time_key)
+ detections.append(detection_to_context)
+
+ detections = filter_incidents_by_duplicates_and_limit(incidents_res=detections,
+ last_run=current_fetch_info,
+ fetch_limit=INCIDENTS_PER_FETCH, id_field='name')
+
+ current_fetch_info = update_last_run_object(last_run=current_fetch_info,
+ incidents=detections,
+ fetch_limit=fetch_limit,
+ start_fetch_time=start_fetch_time,
+ end_fetch_time=end_fetch_time,
+ look_back=look_back,
+ created_time_field='occurred',
+ id_field='name',
+ date_format=DETECTION_DATE_FORMAT,
+ new_offset=offset)
+ demisto.debug(f"CrowdstrikeFalconMsg: Ending fetch {detections_type}. Fetched {len(detections)}")
+ return detections, current_fetch_info
def parse_ioa_iom_incidents(fetched_data: list[dict[str, Any]], last_date: str,
@@ -6400,9 +6463,9 @@ def cs_falcon_cspm_update_policy_settings_command(args: dict[str, Any]) -> Comma
return CommandResults(readable_output=f'Policy {policy_id} was updated successfully')
-def resolve_identity_detection_prepare_body_request(ids: list[str],
- action_params_values: dict[str, Any]) -> dict[str, Any]:
- """Create the body of the request to resolve an identity detection.
+def resolve_detections_prepare_body_request(ids: list[str],
+ action_params_values: dict[str, Any]) -> dict[str, Any]:
+ """Create the body of the request to resolve detections.
Args:
ids (list[str]): The IDs of the detections.
@@ -6425,8 +6488,8 @@ def resolve_identity_detection_prepare_body_request(ids: list[str],
return {'action_parameters': action_params, 'ids': ids}
-def resolve_identity_detection_request(ids: list[str], **kwargs) -> dict[str, Any]:
- """Do an API call to resolve an identity detection.
+def resolve_detections_request(ids: list[str], **kwargs) -> dict[str, Any]:
+ """Do an API call to resolve detections.
Args:
ids (list[str]): The IDs of the detections.
@@ -6434,12 +6497,36 @@ def resolve_identity_detection_request(ids: list[str], **kwargs) -> dict[str, An
Returns:
dict[str, Any]: The raw response of the API.
"""
- body_payload = resolve_identity_detection_prepare_body_request(ids=ids, action_params_values=kwargs)
+ body_payload = resolve_detections_prepare_body_request(ids=ids, action_params_values=kwargs)
return http_request(method='PATCH', url_suffix='/alerts/entities/alerts/v2', json=body_payload)
def cs_falcon_resolve_identity_detection(args: dict[str, Any]) -> CommandResults:
- """Command to resolve idenetiy detections.
+ """Command to resolve identity detections.
+
+ Args:
+ args (dict[str, Any]): The arguments of the command.
+
+ Returns:
+ CommandResults: The command results object.
+ """
+ return handle_resolve_detections(args, 'IDP Detection(s) {} were successfully updated')
+
+
+def cs_falcon_resolve_mobile_detection(args: dict[str, Any]) -> CommandResults:
+ """Command to resolve mobile detections.
+
+ Args:
+ args (dict[str, Any]): The arguments of the command.
+
+ Returns:
+ CommandResults: The command results object.
+ """
+ return handle_resolve_detections(args, 'Mobile Detection(s) {} were successfully updated')
+
+
+def handle_resolve_detections(args: dict[str, Any], hr_template: str) -> CommandResults:
+ """Handle the mobile & identity detections resolve commands.
Args:
args (dict[str, Any]): The arguments of the command.
@@ -6463,10 +6550,10 @@ def cs_falcon_resolve_identity_detection(args: dict[str, Any]) -> CommandResults
show_in_ui = args.get('show_in_ui', '')
# We pass the arguments in the form of **kwargs, since we also need the arguments' names for the API,
# and it easier to achieve that using **kwargs
- resolve_identity_detection_request(ids=ids, update_status=update_status, assign_to_name=assign_to_name,
- assign_to_uuid=assign_to_uuid, unassign=unassign, append_comment=append_comment,
- add_tag=add_tag, remove_tag=remove_tag, show_in_ui=show_in_ui)
- return CommandResults(readable_output=f'IDP Detection(s) {", ".join(ids)} were successfully updated')
+ resolve_detections_request(ids=ids, update_status=update_status, assign_to_name=assign_to_name,
+ assign_to_uuid=assign_to_uuid, unassign=unassign, append_comment=append_comment,
+ add_tag=add_tag, remove_tag=remove_tag, show_in_ui=show_in_ui)
+ return CommandResults(readable_output=hr_template.format(", ".join(ids)))
def cs_falcon_list_users_command(args: dict[str, Any]) -> CommandResults:
@@ -6746,7 +6833,6 @@ def main():
return_results(get_modified_remote_data_command(args))
elif command == 'update-remote-system':
return_results(update_remote_system_command(args))
-
elif demisto.command() == 'get-mapping-fields':
return_results(get_mapping_fields_command())
elif command == 'cs-falcon-spotlight-search-vulnerability':
@@ -6800,6 +6886,8 @@ def main():
return_results(cs_falcon_cspm_update_policy_settings_command(args=args))
elif command == 'cs-falcon-resolve-identity-detection':
return_results(cs_falcon_resolve_identity_detection(args=args))
+ elif command == 'cs-falcon-resolve-mobile-detection':
+ return_results(cs_falcon_resolve_mobile_detection(args=args))
elif command == 'cs-falcon-list-users':
return_results(cs_falcon_list_users_command(args=args))
elif command == 'cs-falcon-get-incident-behavior':
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
index f9d0ebcc0b6b..b77db3a6e1fd 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
@@ -77,6 +77,12 @@ configuration:
type: 0
section: Collect
advanced: true
+- display: Mobile Detections fetch query
+ name: mobile_detections_fetch_query
+ type: 0
+ section: Collect
+ required: false
+ advanced: true
- display: IOM fetch query
name: iom_fetch_query
type: 0
@@ -90,14 +96,13 @@ configuration:
section: Collect
required: false
advanced: true
- additionalinfo: "In the format: cloud_provider=aws&aws_account_id=1234.
- The query must have the argument 'cloud_provider' configured. Multiple values for the same parameter is not supported. For more information, refer to the integration docs."
+ additionalinfo: "In the format: cloud_provider=aws&aws_account_id=1234. The query must have the argument 'cloud_provider' configured. Multiple values for the same parameter is not supported. For more information, refer to the integration docs."
- display: Fetch incidents
name: isFetch
type: 8
- defaultvalue: 'true'
section: Collect
required: false
+ defaultvalue: 'true'
- display: Incident type
name: incidentType
type: 13
@@ -121,42 +126,43 @@ configuration:
section: Connect
advanced: true
required: false
-- display: Use system proxy settings
+- display: 'Use system proxy settings'
name: proxy
type: 8
section: Connect
advanced: true
required: false
-- additionalinfo: When selected, closes the CrowdStrike Falcon incident or detection, which is mirrored in the Cortex XSOAR incident.
- defaultvalue: 'false'
+- defaultvalue: 'false'
display: 'Close Mirrored XSOAR Incident'
name: close_incident
type: 8
section: Collect
advanced: true
required: false
-- defaultvalue: 'false'
+ additionalinfo: When selected, closes the CrowdStrike Falcon incident or detection, which is mirrored in the Cortex XSOAR incident.
+- additionalinfo: When selected, closes the Cortex XSOAR incident, which is mirrored in the CrowdStrike Falcon incident or detection, according to the types that were chosen to be fetched and mirrored.
+ defaultvalue: 'false'
display: 'Close Mirrored CrowdStrike Falcon Incident or Detection'
name: close_in_cs_falcon
type: 8
section: Collect
advanced: true
required: false
- additionalinfo: When selected, closes the Cortex XSOAR incident, which is mirrored in the CrowdStrike Falcon incident or detection, according to the types that were chosen to be fetched and mirrored.
-- additionalinfo: Choose what to fetch - incidents, detections, IDP detections. You can choose any combination.
- defaultvalue: 'Endpoint Detection'
+- defaultvalue: 'Endpoint Detection'
display: 'Fetch types'
name: fetch_incidents_or_detections
type: 16
section: Collect
advanced: true
required: false
+ additionalinfo: Choose what to fetch - incidents, detections, IDP detections. You can choose any combination.
options:
- IDP Detection
- Endpoint Incident
- Endpoint Detection
- Indicator of Misconfiguration
- Indicator of Attack
+ - Mobile Detection
- defaultvalue: '1'
display: 'Incidents Fetch Interval'
name: incidentFetchInterval
@@ -186,29 +192,35 @@ script:
auto: PREDEFINED
- description: The query to filter the device.
name: filter
- - description: A comma-separated list of device IDs to limit the results.
+ - description: The maximum records to return [1-5000].
+ name: limit
+ defaultValue: 50
+ - description: The offset to start retrieving records from.
+ name: offset
+ defaultValue: 0
+ - description: 'A comma-separated list of device IDs to limit the results.'
name: ids
- - auto: PREDEFINED
- description: 'The status of the device. Possible values are: "Normal", "containment_pending", "contained", and "lift_containment_pending".'
+ - description: 'The status of the device. Possible values are: "Normal", "containment_pending", "contained", and "lift_containment_pending".'
name: status
+ auto: PREDEFINED
predefined:
- - normal
+ - 'normal'
- containment_pending
- contained
- lift_containment_pending
- - auto: PREDEFINED
- description: The host name of the device.
+ - description: 'The host name of the device.'
name: hostname
+ auto: PREDEFINED
predefined:
- ''
- - auto: PREDEFINED
- description: 'The platform name of the device. Possible values are: Windows, Mac, and Linux.'
+ - description: 'The platform name of the device. Possible values are: Windows, Mac, and Linux.'
name: platform_name
+ auto: PREDEFINED
predefined:
- - Windows
+ - 'Windows'
- Mac
- Linux
- - description: The site name of the device.
+ - description: 'The site name of the device.'
name: site_name
description: Searches for a device that matches the query.
name: cs-falcon-search-device
@@ -4710,8 +4722,45 @@ script:
- 'false'
- 'true'
auto: PREDEFINED
- description: Perform actions on alerts.
+ description: Perform actions on identity detection alerts.
name: cs-falcon-resolve-identity-detection
+ - arguments:
+ - description: IDs of the alerts to update.
+ name: ids
+ isArray: true
+ required: true
+ - description: 'Assign the specified detections to a user based on their username.'
+ name: assign_to_name
+ - description: Assign the specified detections to a user based on their UUID.
+ name: assign_to_uuid
+ - description: Appends a new comment to any existing comments for the specified detections.
+ name: append_comment
+ - description: Add a tag to the specified detections.
+ name: add_tag
+ - description: Remove a tag from the specified detections.
+ name: remove_tag
+ - description: Update status of the alert to the specified value.
+ name: update_status
+ predefined:
+ - 'new'
+ - 'in_progress'
+ - 'closed'
+ - 'reopened'
+ auto: PREDEFINED
+ - description: Whether to unassign any assigned users to the specified detections.
+ name: unassign
+ predefined:
+ - 'false'
+ - 'true'
+ auto: PREDEFINED
+ - description: If true, displays the detection in the UI.
+ name: show_in_ui
+ predefined:
+ - 'false'
+ - 'true'
+ auto: PREDEFINED
+ description: Perform actions on mobile detection alerts.
+ name: cs-falcon-resolve-mobile-detection
- arguments:
- description: IDs (UUID) of specific users to list.
name: id
@@ -4962,7 +5011,7 @@ script:
- contextPath: CrowdStrike.IOARules.version_ids
description: The IOA Rule's version ID.
type: String
- dockerimage: demisto/py3-tools:1.0.0.86612
+ dockerimage: demisto/py3-tools:1.0.0.88283
isfetch: true
ismappable: true
isremotesyncin: true
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
index a7cde8a8792c..622b63c68abc 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
@@ -85,9 +85,9 @@ def test_incident_to_incident_context():
assert res == incident_context
-def test_idp_detectionin_to_incident_context():
- from CrowdStrikeFalcon import idp_detection_to_incident_context
- res = idp_detection_to_incident_context(input_data.response_idp_detection.copy())
+def test_detection_to_incident_context():
+ from CrowdStrikeFalcon import detection_to_incident_context
+ res = detection_to_incident_context(input_data.response_idp_detection.copy(), "IDP Detection")
assert res == input_data.context_idp_detection
@@ -2220,7 +2220,7 @@ def test_old_fetch_to_new_fetch(self, set_up_mocks, mocker):
})
fetch_incidents()
assert demisto.setLastRun.mock_calls[0][1][0] == [
- {'time': '2020-09-04T09:16:10Z'}, {'time': '2020-09-04T09:22:10Z'}, {}, {}, {}]
+ {'time': '2020-09-04T09:16:10Z'}, {'time': '2020-09-04T09:22:10Z'}, {}, {}, {}, {}]
@freeze_time("2020-09-04T09:16:10Z")
def test_new_fetch(self, set_up_mocks, mocker, requests_mock):
@@ -3517,7 +3517,7 @@ def test_get_endpoint_command(requests_mock, mocker):
- The user is running cs-falcon-search-device with an id
Then
- Return an Endpoint context output
- """
+ """
from CrowdStrikeFalcon import get_endpoint_command
response = {'resources': {'meta': {'query_time': 0.010188508, 'pagination': {'offset': 1, 'limit': 100, 'total': 1},
'powered_by': 'device-api', 'trace_id': 'c876614b-da71-4942-88db-37b939a78eb3'},
@@ -3560,13 +3560,14 @@ def test_get_endpoint_command(requests_mock, mocker):
status_code=200,
)
- mocker.patch.object(demisto, 'args', return_value={'id': 'dentifier_numbe', 'hostname': 'falcon-crowdstr'})
+ mocker.patch.object(demisto, 'args', return_value={'id': 'identifier_numbe', 'hostname': 'falcon-crowdstr'})
outputs = get_endpoint_command()
result = outputs[0].to_context()
context = result.get('EntryContext')
- assert unquote(query_mocker.last_request.query) == "filter=device_id:'dentifier_numbe',hostname:'falcon-crowdstr'"
+ api_query = "filter=device_id:'identifier_numbe',hostname:'falcon-crowdstr'&limit=50&offset=0"
+ assert unquote(query_mocker.last_request.query) == api_query
assert context['Endpoint(val.ID && val.ID == obj.ID && val.Vendor == obj.Vendor)'] == [endpoint_context]
@@ -4233,7 +4234,7 @@ def test_get_modified_remote_data_command(mocker):
Given
- arguments - lastUpdate time
- raw incidents, detection, and idp_detection (results of get_incidents_ids, get_fetch_detections,
- and get_idp_detections_ids)
+ and get_detections_ids)
When
- running get_modified_remote_data_command
Then
@@ -5918,12 +5919,12 @@ def test_http_request_arguments(self, mocker: MockerFixture):
Then
- Validate that the arguments are mapped correctly to the json body.
"""
- from CrowdStrikeFalcon import resolve_identity_detection_request
+ from CrowdStrikeFalcon import resolve_detections_request
http_request_mocker = mocker.patch('CrowdStrikeFalcon.http_request')
ids = ['1,2']
action_param_values = {'update_status': 'new', 'assign_to_name': 'bot'}
action_params_http_body = [{'name': 'update_status', 'value': 'new'}, {'name': 'assign_to_name', 'value': 'bot'}]
- resolve_identity_detection_request(ids=ids, **action_param_values)
+ resolve_detections_request(ids=ids, **action_param_values)
assert http_request_mocker.call_args_list[0][1].get('json') == {'action_parameters': action_params_http_body,
'ids': ids}
@@ -5942,6 +5943,21 @@ def test_resolve_identity_detection(self, mocker: MockerFixture):
assert isinstance(command_results.readable_output, str)
assert 'IDP Detection(s) 1, 2 were successfully updated' in command_results.readable_output
+ def test_resolve_mobile_detection(self, mocker: MockerFixture):
+ """
+ Given:
+ - Arguments for the command.
+ When
+ - Calling the cs-falcon-resolve-mobile-detection command.
+ Then
+ - Validate the data of the CommandResults object returned.
+ """
+ from CrowdStrikeFalcon import cs_falcon_resolve_mobile_detection
+ mocker.patch('CrowdStrikeFalcon.http_request', return_value=requests.Response())
+ command_results = cs_falcon_resolve_mobile_detection(args={'ids': '1,2'})
+ assert isinstance(command_results.readable_output, str)
+ assert 'Mobile Detection(s) 1, 2 were successfully updated' in command_results.readable_output
+
class TestIOAFetch:
# Since this integration fetches multiple incidents, the last run object contains a list of
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
index 29454898c5ec..24926ee14128 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
@@ -8,7 +8,7 @@ The CrowdStrike Falcon OAuth 2 API integration (formerly Falcon Firehose API), e
| **Parameter** | **Description** | **Required** |
| --- | --- | --- |
- | Server URL (e.g., https://api.crowdstrike.com) | | True |
+ | Server URL (e.g., ) | | True |
| Client ID | | True |
| Secret | | True |
| Source Reliability | Reliability of the source providing the intelligence data. Currently used for “CVE” reputation command. | False |
@@ -135,6 +135,7 @@ Available parameters:
Exmample: `cloud_provider=aws®ion=eu-west-2`
More information about the parameters can be found [here](https://www.falconpy.io/Service-Collections/CSPM-Registration.html#keyword-arguments-13).
+
### 1. Search for a device
---
@@ -156,6 +157,8 @@ Searches for a device that matches the query.
| hostname | The host name of the device. Possible values are: . | Optional |
| platform_name | The platform name of the device. Possible values are: Windows, Mac, and Linux. Possible values are: Windows, Mac, Linux. | Optional |
| site_name | The site name of the device. | Optional |
+| limit | The maximum number of records to return. Default is 50. | Optional |
+| offset | The offset to begin the list from. For example, start from the 10th record and return the list. Default is 0. | Optional |
#### Context Output
@@ -343,7 +346,7 @@ or by providing the IDs of the detections.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| ids | The IDs of the detections to search. If provided, will override other arguments. | Optional |
-| filter | Filter detections using a query in Falcon Query Language (FQL). For example, filter="device.hostname:'CS-SE-TG-W7-01'" For a full list of valid filter options, see: https://falcon.crowdstrike.com/support/documentation/2/query-api-reference#detectionsearch. | Optional |
+| filter | Filter detections using a query in Falcon Query Language (FQL). For example, filter="device.hostname:'CS-SE-TG-W7-01'" For a full list of valid filter options, see: . | Optional |
| extended_data | Whether to get additional data such as device and behaviors processed. Possible values are: Yes, No. | Optional |
#### Context Output
@@ -4778,7 +4781,7 @@ Get a list of ML exclusions by specifying their IDs, value, or a specific filter
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| filter | A custom filter by which the exclusions should be filtered. The syntax follows the pattern `<property>:[operator]'<value>'` for example: value:'test'. Available filters: applied_globally, created_by, created_on, last_modified, modified_by, value. For more information, see: https://www.falconpy.io/Service-Collections/Falcon-Query-Language. | Optional |
+| filter | A custom filter by which the exclusions should be filtered. The syntax follows the pattern `<property>:[operator]'<value>'` for example: value:'test'. Available filters: applied_globally, created_by, created_on, last_modified, modified_by, value. For more information, see: . | Optional |
| value | The value by which the exclusions should be filtered. | Optional |
| ids | A comma-separated list of exclusion IDs to retrieve. The IDs overwrite the filter and value. | Optional |
| limit | The maximum number of records to return. [1-500]. Applies only if the IDs argument is not supplied. | Optional |
@@ -5100,7 +5103,7 @@ Get a list of IOA exclusions by specifying their IDs or a filter.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| filter | A custom filter by which the exclusions should be filtered. The syntax follows the pattern `<property>:[operator]'<value>'` for example: name:'test'. Available filters: applied_globally, created_by, created_on, name, last_modified, modified_by, value, pattern. For more information, see: https://www.falconpy.io/Service-Collections/Falcon-Query-Language. | Optional |
+| filter | A custom filter by which the exclusions should be filtered. The syntax follows the pattern `<property>:[operator]'<value>'` for example: name:'test'. Available filters: applied_globally, created_by, created_on, name, last_modified, modified_by, value, pattern. For more information, see: . | Optional |
| name | The name by which the exclusions should be filtered. | Optional |
| ids | A comma-separated list of exclusion IDs to retrieve. The IDs overwrite the filter and name. | Optional |
| limit | The limit of how many exclusions to retrieve. Default is 50. Applies only if the IDs argument is not supplied. | Optional |
@@ -6239,8 +6242,11 @@ List identity entities.
| CrowdStrike.CSPMPolicy.account_scope | String | The account scope. |
#### Command example
+
```!cs-falcon-cspm-list-policy-details policy_ids=1,2```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -6353,6 +6359,7 @@ List identity entities.
#### Human Readable Output
>### CSPM Policy Details:
+
>|Id|Description|Policy Statement|Policy Remediation|Cloud Service Subtype|Cloud Platform Type|Cloud Service Type|Default Severity|Policy Type|Tactic|Technique|
>|---|---|---|---|---|---|---|---|---|---|---|
>| 1 | Because IAM access keys are long-term credentials, as time goes on, the risk of these keys being exposed is increased.
Keys are often left on old servers, accidentally published through Git, or stolen from developer machines. The longer the keys are valid, the more likely they are to be discovered in one of these places. By ensuring keys are rotated at least every 90 days, you can be confident that if those keys are discovered, they cannot be abused. | IAM user access key active longer than 90 days | Step 1. From the AWS Console, navigate to the IAM page.\| Step 2. Locate and click on the offending IAM User.\| Step 3. Click on the Security Credentials tab.\| Step 4. Navigate to the Access Keys section and choose between making the access key inactive, deleting the key, or rotating the key. | Access Keys | aws | IAM | informational | Configuration | Credential Access | Steal Application Access Token |
@@ -6416,8 +6423,11 @@ Returns information about current policy settings.
| CrowdStrike.CSPMPolicySetting.attack_types | Array | The attack types. |
#### Command example
+
```!cs-falcon-cspm-list-service-policy-settings limit=2```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -6556,6 +6566,7 @@ Returns information about current policy settings.
#### Human Readable Output
>### CSPM Policy Settings:
+
>|Policy Id|Is Remediable|Remediation Summary|Name|Policy Type|Cloud Service Subtype|Cloud Service|Default Severity|
>|---|---|---|---|---|---|---|---|
>| 1 | false | | EFS File System is encrypted without CMK | Configuration | N/A | efs | informational |
@@ -6585,8 +6596,11 @@ Updates a policy setting - can be used to override policy severity or to disable
#### Context Output
There is no context output for this command.
+
#### Command example
+
```!cs-falcon-cspm-update-policy_settings policy_id=1 enabled=true regions="eu-central-1,eu-central-2" severity=high tag_excluded=false```
+
#### Human Readable Output
>Policy 1 was updated successfully
@@ -6594,7 +6608,7 @@ There is no context output for this command.
### cs-falcon-resolve-identity-detection
***
-Perform actions on alerts.
+Perform actions on identity detection alerts.
#### Base Command
@@ -6619,7 +6633,9 @@ Perform actions on alerts.
There is no context output for this command.
#### Command example
+
```!cs-falcon-resolve-identity-detection ids="id_1,id_2" add_tag="Demo tag" append_comment="Demo comment" assign_to_name="morganf" show_in_ui=true update_status=in_progress```
+
#### Human Readable Output
>IDP Detection(s) id_1, id_2 were successfully updated
@@ -6760,3 +6776,38 @@ Get IOA Rules for Custom IOA rule triggered detections
| CrowdStrike.IOARules.ruletype_id | String | The IOA Rule's Rule Type ID. |
| CrowdStrike.IOARules.ruletype_name | String | The IOA Rule's Rule Type Name. |
| CrowdStrike.IOARules.version_ids | String | The IOA Rule's Version ID. |
+
+### cs-falcon-resolve-mobile-detection
+
+***
+Perform actions on mobile detection alerts.
+
+#### Base Command
+
+`cs-falcon-resolve-mobile-detection`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| ids | IDs of the alerts to update. | Required |
+| assign_to_name | Assign the specified detections to a user based on their username. | Optional |
+| assign_to_uuid | Assign the specified detections to a user based on their UUID. | Optional |
+| append_comment | Appends a new comment to any existing comments for the specified detections. | Optional |
+| add_tag | Add a tag to the specified detections. | Optional |
+| remove_tag | Remove a tag from the specified detections. | Optional |
+| update_status | Update status of the alert to the specified value. Possible values are: new, in_progress, closed, reopened. | Optional |
+| unassign | Whether to unassign any assigned users to the specified detections. Possible values are: false, true. | Optional |
+| show_in_ui | If true, displays the detection in the UI. Possible values are: false, true. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+
+#### Command example
+
+```!cs-falcon-resolve-mobile-detection ids="id_1,id_2" add_tag="Demo tag" append_comment="Demo comment" assign_to_name="morganf" show_in_ui=true update_status=in_progress```
+
+#### Human Readable Output
+
+>Mobile Detection(s) id_1, id_2 were successfully updated
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_0.json b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_0.json
new file mode 100644
index 000000000000..a974f9ef421b
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_0.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Parameters *limit* (with defualt value 50) and *offset* (with defualt value 0), were introduced to ***cs-falcon-search-device*** command. Number of returned results will be defined by the limit"
+}
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_0.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_0.md
new file mode 100644
index 000000000000..0fc0462082a1
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_0.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CrowdStrike Falcon
+- Added the *limit* and *offset* parameter to the ***cs-falcon-search-device*** command.
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.88283*.
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_1.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_1.md
new file mode 100644
index 000000000000..3875b09daaa7
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_1.md
@@ -0,0 +1,47 @@
+
+#### Classifiers
+
+##### CrowdStrike Falcon Incident Classifier
+
+- Added support for **CrowdStrike Falcon Mobile Detection**.
+
+#### Incident Fields
+
+- New: **CrowdStrike Falcon Security patch level**
+- New: **CrowdStrike Falcon Firmware Build Fingerprint**
+- New: **CrowdStrike Falcon Firmware Build Time**
+- New: **CrowdStrike Falcon Mobile Manufacturer**
+- New: **CrowdStrike Falcon Mobile platform version**
+- New: **CrowdStrike Falcon Mobile Product**
+
+- Added the **CrowdStrike Falcon Mobile Detection** incident type to the following incident fields:
+
+- **Behaviour Objective**
+- **Behaviour Scenario**
+- **Behaviour Tactic**
+- **CrowdStrike Falcon Crawled Timestamp**
+- **CrowdStrike Falcon Detection Type**
+
+#### Incident Types
+
+- New: **CrowdStrike Falcon Mobile Detection**
+
+
+#### Integrations
+
+##### CrowdStrike Falcon
+
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.88283*.
+- Added the new command ***cs-falcon-resolve-mobile-detection***.
+- Added the **Mobile Detection** option to the **Fetch type** options.
+- Added the **Mobile Detections fetch query** param to allow custom query for *Mobile Detection* **fetch-incidents**.
+- Added support for mirroring for **CrowdStrike Falcon Mobile Detection** incident type.
+
+#### Mappers
+
+##### CrowdStrike Falcon Mapper
+
+- Added support for mirroring for **CrowdStrike Falcon Mobile Detection** incident type.
+
+##### CrowdStrike Falcon - Outgoing Mapper
+- Added support for mirroring for **CrowdStrike Falcon Mobile Detection** incident type.
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/pack_metadata.json b/Packs/CrowdStrikeFalcon/pack_metadata.json
index 0bebbf755bfd..7dcca08a926e 100644
--- a/Packs/CrowdStrikeFalcon/pack_metadata.json
+++ b/Packs/CrowdStrikeFalcon/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CrowdStrike Falcon",
"description": "The CrowdStrike Falcon OAuth 2 API (formerly the Falcon Firehose API), enables fetching and resolving detections, searching devices, getting behaviors by ID, containing hosts, and lifting host containment.",
"support": "xsoar",
- "currentVersion": "1.12.17",
+ "currentVersion": "1.13.1",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CrowdStrikeFalconX/TestPlaybooks/CrowdStrike_Falcon_X_-Test-Detonate_File.yml b/Packs/CrowdStrikeFalconX/TestPlaybooks/CrowdStrike_Falcon_X_-Test-Detonate_File.yml
index 4bcf197dd68a..de2aeea2ef12 100644
--- a/Packs/CrowdStrikeFalconX/TestPlaybooks/CrowdStrike_Falcon_X_-Test-Detonate_File.yml
+++ b/Packs/CrowdStrikeFalconX/TestPlaybooks/CrowdStrike_Falcon_X_-Test-Detonate_File.yml
@@ -83,13 +83,13 @@ tasks:
- "4"
scriptarguments:
filename:
- simple: "# test.pdf"
+ simple: "# script.py"
method:
simple: GET
saveAsFile:
simple: "yes"
url:
- simple: http://www.pdf995.com/samples/pdf.pdf
+ simple: https://raw.githubusercontent.com/demisto/content/3ef746414beb35924fd5ce4c74bf646867ccbba6/Packs/CrowdStrikeFalconX/Integrations/CrowdStrikeFalconX/CrowdStrikeFalconX.py
separatecontext: false
view: |-
{
diff --git a/Packs/CrowdStrikeOpenAPI/Integrations/CrowdStrikeOpenAPI/CrowdStrikeOpenAPI.yml b/Packs/CrowdStrikeOpenAPI/Integrations/CrowdStrikeOpenAPI/CrowdStrikeOpenAPI.yml
index c46c7d06e578..e341e0de6910 100644
--- a/Packs/CrowdStrikeOpenAPI/Integrations/CrowdStrikeOpenAPI/CrowdStrikeOpenAPI.yml
+++ b/Packs/CrowdStrikeOpenAPI/Integrations/CrowdStrikeOpenAPI/CrowdStrikeOpenAPI.yml
@@ -25717,7 +25717,7 @@ script:
- contextPath: CrowdStrike.deviceNetworkHistory.resources.history.timestamp
description: ''
type: String
- dockerimage: demisto/python3:3.10.13.85667
+ dockerimage: demisto/python3:3.10.13.86272
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/CrowdStrikeOpenAPI/ReleaseNotes/1_0_18.md b/Packs/CrowdStrikeOpenAPI/ReleaseNotes/1_0_18.md
new file mode 100644
index 000000000000..e5a9937d5915
--- /dev/null
+++ b/Packs/CrowdStrikeOpenAPI/ReleaseNotes/1_0_18.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CrowdStrike OpenAPI (Beta)
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/CrowdStrikeOpenAPI/pack_metadata.json b/Packs/CrowdStrikeOpenAPI/pack_metadata.json
index 571ae0d4efd0..8532b718ccb2 100644
--- a/Packs/CrowdStrikeOpenAPI/pack_metadata.json
+++ b/Packs/CrowdStrikeOpenAPI/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CrowdStrike OpenAPI",
"description": "Use the CrowdStrike OpenAPI integration to interact with CrowdStrike APIs that do not have dedicated integrations in Cortex XSOAR, for example, CrowdStrike FalconX, etc.",
"support": "xsoar",
- "currentVersion": "1.0.17",
+ "currentVersion": "1.0.18",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Cryptocurrency/Integrations/Cryptocurrency/Cryptocurrency.yml b/Packs/Cryptocurrency/Integrations/Cryptocurrency/Cryptocurrency.yml
index e5501ee8a859..d68282b0f1bc 100644
--- a/Packs/Cryptocurrency/Integrations/Cryptocurrency/Cryptocurrency.yml
+++ b/Packs/Cryptocurrency/Integrations/Cryptocurrency/Cryptocurrency.yml
@@ -70,7 +70,7 @@ script:
description: The cryptocurrency type. e.g. 'bitcoin'.
type: string
description: Return Cryptocurrency reputation.
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.13.88772
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/Cryptocurrency/ReleaseNotes/1_1_54.md b/Packs/Cryptocurrency/ReleaseNotes/1_1_54.md
new file mode 100644
index 000000000000..dd80549e0ea5
--- /dev/null
+++ b/Packs/Cryptocurrency/ReleaseNotes/1_1_54.md
@@ -0,0 +1,3 @@
+#### Integrations
+##### Cryptocurrency
+- Updated the Docker image to: *demisto/python3:3.10.13.88772*.
diff --git a/Packs/Cryptocurrency/pack_metadata.json b/Packs/Cryptocurrency/pack_metadata.json
index 96635dd29164..74863c8a9f32 100644
--- a/Packs/Cryptocurrency/pack_metadata.json
+++ b/Packs/Cryptocurrency/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cryptocurrency",
"description": "This Content Pack enables you to add a reputation for cryptocurrency addresses.",
"support": "xsoar",
- "currentVersion": "1.1.53",
+ "currentVersion": "1.1.54",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CuckooSandbox/ReleaseNotes/1_1_5.md b/Packs/CuckooSandbox/ReleaseNotes/1_1_5.md
new file mode 100644
index 000000000000..2acdb56852db
--- /dev/null
+++ b/Packs/CuckooSandbox/ReleaseNotes/1_1_5.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CuckooDisplayReport
+
+- Updated the Docker image to: *demisto/python3:3.10.13.86272*.
diff --git a/Packs/CuckooSandbox/Scripts/CuckooDisplayReport/CuckooDisplayReport.yml b/Packs/CuckooSandbox/Scripts/CuckooDisplayReport/CuckooDisplayReport.yml
index cd30cff49f3e..67c3760b8b8b 100644
--- a/Packs/CuckooSandbox/Scripts/CuckooDisplayReport/CuckooDisplayReport.yml
+++ b/Packs/CuckooSandbox/Scripts/CuckooDisplayReport/CuckooDisplayReport.yml
@@ -22,6 +22,6 @@ args:
scripttarget: 0
timeout: 0s
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.13.86272
tests:
- No tests (auto formatted)
diff --git a/Packs/CuckooSandbox/pack_metadata.json b/Packs/CuckooSandbox/pack_metadata.json
index ffe88b8bdaf7..bf60908e2232 100644
--- a/Packs/CuckooSandbox/pack_metadata.json
+++ b/Packs/CuckooSandbox/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cuckoo Sandbox",
"description": "Malware dynamic analysis sandboxing",
"support": "xsoar",
- "currentVersion": "1.1.4",
+ "currentVersion": "1.1.5",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CybelAngel/.pack-ignore b/Packs/CybelAngel/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/CybelAngel/.secrets-ignore b/Packs/CybelAngel/.secrets-ignore
new file mode 100644
index 000000000000..a05e10d8c909
--- /dev/null
+++ b/Packs/CybelAngel/.secrets-ignore
@@ -0,0 +1 @@
+https://platform.cybelangel.com
\ No newline at end of file
diff --git a/Packs/CybelAngel/Integrations/CybelAngelEventCollector/CybelAngelEventCollector.py b/Packs/CybelAngel/Integrations/CybelAngelEventCollector/CybelAngelEventCollector.py
new file mode 100644
index 000000000000..19816435e47a
--- /dev/null
+++ b/Packs/CybelAngel/Integrations/CybelAngelEventCollector/CybelAngelEventCollector.py
@@ -0,0 +1,289 @@
+import dateparser
+
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+from CommonServerUserPython import * # noqa
+
+import urllib3
+from typing import Any
+from enum import Enum
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%S'
+DEFAULT_MAX_FETCH = 5000
+VENDOR = "cybelangel"
+PRODUCT = "platform"
+DEFAULT_FIRST_FETCH = "30 days"
+
+
+class LastRun(str, Enum):
+ LATEST_REPORT_TIME = "latest_report_time"
+ LATEST_FETCHED_REPORTS_IDS = "latest_fetched_reports_ids"
+
+
+class Client(BaseClient):
+
+ def __init__(self, base_url: str, client_id: str, client_secret: str, verify: bool, proxy: bool, **kwargs):
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+ super().__init__(base_url=base_url, verify=verify, proxy=proxy, **kwargs)
+
+ def http_request(self, method: str, url_suffix: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
+ """
+ Overrides Base client request function, retrieves and adds to headers access token before sending the request.
+ """
+ token = self.get_access_token()
+ headers = {
+ 'Authorization': f'Bearer {token}',
+ 'Content-Type': 'application/json',
+ }
+
+ demisto.debug(f'Running http-request with URL {url_suffix} and {params=}')
+
+ response = self._http_request(
+ method, url_suffix=url_suffix, headers=headers, params=params, resp_type="response", ok_codes=(401, 200)
+ )
+ if response.status_code == 200:
+ return response.json()
+ else:
+ demisto.debug('Access token has expired, retrieving new access token')
+
+ token = self.get_access_token(create_new_token=True)
+ headers["Authorization"] = f'Bearer {token}'
+
+ return self._http_request(method, url_suffix=url_suffix, headers=headers, params=params)
+
+ def get_reports(self, start_date: str, end_date: str, limit: int = DEFAULT_MAX_FETCH) -> List[dict[str, Any]]:
+ """
+ Get manual reports from Cybel Angel Collector.
+
+ Note:
+ The order of the events returned is random, hence need to sort them out to return the oldest events first.
+ """
+ params = {
+ "start-date": start_date,
+ "end-date": end_date
+ }
+ reports = self.http_request(method='GET', url_suffix="/api/v2/reports", params=params).get("reports") or []
+ for report in reports:
+ if updated_at := report.get("updated_at"):
+ _time_field = updated_at
+ else:
+ _time_field = report["created_at"]
+
+ report["_time"] = _time_field
+
+ reports = sorted(
+ reports, key=lambda _report: dateparser.parse(_report["_time"]) # type: ignore[arg-type, return-value]
+ )
+ return reports[:limit]
+
+ def get_access_token(self, create_new_token: bool = False) -> str:
+ """
+ Obtains access and refresh token from CybleAngel server.
+ Access token is used and stored in the integration context until expiration time.
+ After expiration, new refresh token and access token are obtained and stored in the
+ integration context.
+
+ Returns:
+ str: the access token.
+ """
+ integration_context = get_integration_context()
+ current_access_token = integration_context.get('access_token')
+ if current_access_token and not create_new_token:
+ return current_access_token
+ new_access_token = self.get_token_request()
+ integration_context = {
+ 'access_token': new_access_token,
+ }
+ demisto.debug(f'updating access token at {datetime.now()}')
+ set_integration_context(context=integration_context)
+ return new_access_token
+
+ def get_token_request(self) -> str:
+ """
+ Sends request to retrieve token.
+
+ Returns:
+ tuple[str, str]: token and its expiration date
+ """
+ url = 'https://auth.cybelangel.com/oauth/token'
+
+ token_response = self._http_request(
+ 'POST',
+ full_url=url,
+ json_data={
+ "client_id": self.client_id,
+ "client_secret": self.client_secret,
+ "audience": "https://platform.cybelangel.com/",
+ "grant_type": "client_credentials"
+ }
+ )
+ if access_token := token_response.get("access_token"):
+ return access_token
+ raise RuntimeError(f"Could not retrieve token from {url}, access-token returned is empty")
+
+
+def dedup_fetched_events(
+ events: List[dict],
+ last_run_fetched_event_ids: Set[str],
+) -> List[dict]:
+ """
+ Dedup events, removes events which were already fetched.
+ """
+ un_fetched_events = []
+
+ for event in events:
+ event_id = event.get("id")
+ if event_id not in last_run_fetched_event_ids:
+ demisto.debug(f'event with ID {event_id} has not been fetched.')
+ un_fetched_events.append(event)
+ else:
+ demisto.debug(f'event with ID {event_id} for has been fetched')
+
+ demisto.debug(f'{un_fetched_events=}')
+ return un_fetched_events
+
+
+def get_latest_event_time_and_ids(reports: List[Dict[str, Any]]) -> tuple[str, List[str]]:
+ """
+ Returns the latest event time and all the events that were fetched in the latest event time
+ """
+ latest_report_time = reports[-1]["_time"]
+ return latest_report_time, [report["id"] for report in reports if report["_time"] == latest_report_time]
+
+
+def test_module(client: Client) -> str:
+ """
+ Tests that the authentication to the api is ok.
+ """
+ client.get_reports(
+ start_date=(datetime.now() - timedelta(days=1)).strftime(DATE_FORMAT),
+ end_date=datetime.now().strftime(DATE_FORMAT)
+ )
+ return "ok"
+
+
+def fetch_events(client: Client, first_fetch: str, last_run: dict, max_fetch: int) -> tuple[List[dict[str, Any]], dict[str, Any]]:
+ """
+ Fetches reports from Cybel Angel
+
+ Args:
+ client: Cybel Angel client
+ first_fetch: since when to start to takes reports
+ last_run: the last run object
+ max_fetch: maximum number of reports
+
+ Fetch logic:
+ 1. Get the latest report time from last fetch or start from fetch in case its a the first time fetching
+ 2. get all the reports since the last fetch or first fetch
+ 3. remove any reports which where already fetched
+ 4. if there are no reports after dedup, keep the last run the same and return
+ 5. if there are reports after dedup, update the last run to the latest report time, save all the report IDs which
+ occurred in the last event time
+ 6. return all the fetched events
+
+ """
+ last_run_time = last_run.get(LastRun.LATEST_REPORT_TIME)
+ if not last_run_time:
+ last_run_time = dateparser.parse(first_fetch).strftime(DATE_FORMAT) # type: ignore[union-attr]
+ if not last_run_time:
+ demisto.error(f'First fetch {first_fetch} is not valid')
+ raise ValueError(f'First fetch {first_fetch} not valid')
+ else:
+ last_run_time = dateparser.parse(last_run_time).strftime(DATE_FORMAT) # type: ignore[union-attr]
+ now = datetime.now()
+ reports = client.get_reports(start_date=last_run_time, end_date=now.strftime(DATE_FORMAT), limit=max_fetch)
+ reports = dedup_fetched_events(reports, last_run_fetched_event_ids=last_run.get(LastRun.LATEST_FETCHED_REPORTS_IDS) or set())
+ if not reports:
+ demisto.debug(f'No reports found when last run is {last_run}')
+ return [], {
+ LastRun.LATEST_REPORT_TIME: last_run_time,
+ LastRun.LATEST_FETCHED_REPORTS_IDS: last_run.get(LastRun.LATEST_FETCHED_REPORTS_IDS)
+ }
+
+ latest_report_time, latest_fetched_report_ids = get_latest_event_time_and_ids(reports)
+ demisto.debug(f'latest-report-time: {latest_report_time}')
+ demisto.debug(f'latest-fetched-report-ids {latest_fetched_report_ids}')
+
+ last_run.update(
+ {
+ LastRun.LATEST_REPORT_TIME: latest_report_time,
+ LastRun.LATEST_FETCHED_REPORTS_IDS: latest_fetched_report_ids
+ }
+ )
+ return reports, last_run
+
+
+def get_events_command(client: Client, args: dict[str, Any]) -> CommandResults:
+ """
+ Get events from Cybel Angel, used mainly for debugging purposes
+ """
+ if end := args.get("end-date"):
+ end_date = dateparser.parse(end).strftime(DATE_FORMAT) # type: ignore[union-attr]
+ else:
+ end_date = datetime.now().strftime(DATE_FORMAT)
+
+ reports = client.get_reports(
+ dateparser.parse(args["start-date"]).strftime(DATE_FORMAT), # type: ignore[union-attr]
+ end_date=end_date,
+ limit=arg_to_number(args.get("limit")) or DEFAULT_MAX_FETCH
+ )
+
+ return CommandResults(
+ outputs_prefix="CybleAngel.Events",
+ outputs_key_field="id",
+ outputs=reports,
+ raw_response=reports,
+ readable_output=tableToMarkdown("Reports", reports, headers=["id", "created_at", "updated_at"], removeNull=True)
+ )
+
+
+''' MAIN FUNCTION '''
+
+
+def main() -> None:
+
+ params = demisto.params()
+ client_id: str = params.get('credentials', {}).get('identifier', '')
+ client_secret: str = params.get('credentials', {}).get('password', '')
+ base_url: str = params.get('url', '').rstrip('/')
+ verify_certificate = not params.get('insecure', False)
+ proxy = params.get('proxy', False)
+ max_fetch = arg_to_number(params.get("max_fetch")) or DEFAULT_MAX_FETCH
+ first_fetch = params.get("first_fetch") or DEFAULT_FIRST_FETCH
+
+ command = demisto.command()
+ demisto.info(f'Command being called is {command}')
+ try:
+ client = Client(
+ client_id=client_id,
+ client_secret=client_secret,
+ base_url=base_url,
+ verify=verify_certificate,
+ proxy=proxy
+ )
+ if command == 'test-module':
+ return_results(test_module(client))
+ elif command == 'fetch-events':
+ events, last_run = fetch_events(client, first_fetch=first_fetch, last_run=demisto.getLastRun(), max_fetch=max_fetch)
+ send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT)
+ demisto.debug(f'Successfully sent event {[event.get("id") for event in events]} IDs to XSIAM')
+ demisto.setLastRun(last_run)
+ elif command == "cybelangel-get-events":
+ return_results(get_events_command(client, demisto.args()))
+ except Exception as e:
+ demisto.error(traceback.format_exc())
+ return_error(f"Failed to execute {command} command.\nError:\ntype:{type(e)}, error:{str(e)}")
+
+
+''' ENTRY POINT '''
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/CybelAngel/Integrations/CybelAngelEventCollector/CybelAngelEventCollector.yml b/Packs/CybelAngel/Integrations/CybelAngelEventCollector/CybelAngelEventCollector.yml
new file mode 100644
index 000000000000..cb819041ba9a
--- /dev/null
+++ b/Packs/CybelAngel/Integrations/CybelAngelEventCollector/CybelAngelEventCollector.yml
@@ -0,0 +1,71 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: CybelAngel Event Collector
+ version: -1
+configuration:
+- defaultvalue: https://platform.cybelangel.com
+ display: Server URL
+ name: url
+ required: true
+ type: 0
+ section: Connect
+- display: Client ID
+ displaypassword: Client Secret
+ name: credentials
+ type: 9
+ required: true
+- display: The maximum number of events per fetch
+ name: max_fetch
+ defaultvalue: "5000"
+ advanced: true
+ type: 0
+ required: true
+ section: Collect
+- defaultvalue: 30 days
+ display: First fetch timestamp (