diff --git a/.github/workflows/ci-build.yaml b/.github/workflows/ci-build.yaml index 07928f2..02583c5 100644 --- a/.github/workflows/ci-build.yaml +++ b/.github/workflows/ci-build.yaml @@ -78,7 +78,7 @@ jobs: cache-to: type=inline - uses: actions/setup-node@v4.0.2 with: - node-version: "20.12.0" + node-version: "20.12" build-backend: name: build-backend @@ -128,4 +128,4 @@ jobs: cache-to: type=inline - uses: actions/setup-node@v4.0.2 with: - node-version: "20.12.0" + node-version: "20.12" diff --git a/.github/workflows/ci-main.yaml b/.github/workflows/ci-main.yaml index 6e2a183..2fde50d 100644 --- a/.github/workflows/ci-main.yaml +++ b/.github/workflows/ci-main.yaml @@ -56,7 +56,7 @@ jobs: strategy: matrix: node-version: - - 20.12.0 + - 20.12 steps: - uses: actions/checkout@v4 - name: Set Node.js ${{ matrix.node-version }} diff --git a/.github/workflows/ci-release.yaml b/.github/workflows/ci-release.yaml index 9b65c59..d0bebe8 100644 --- a/.github/workflows/ci-release.yaml +++ b/.github/workflows/ci-release.yaml @@ -21,7 +21,7 @@ on: - "main" - "develop" - "next" - tags-ignore: + tags: - "v*" jobs: @@ -61,6 +61,7 @@ jobs: type=semver,pattern={{major}} type=semver,pattern={{version}} type=ref,event=branch + type=ref,event=pr - name: Build and push action - frontend id: docker_action_build_frontend uses: docker/build-push-action@v5 @@ -73,7 +74,7 @@ jobs: cache-to: type=inline - uses: actions/setup-node@v4.0.2 with: - node-version: "20.12.0" + node-version: "20.12" build-backend: name: build-backend @@ -111,6 +112,7 @@ jobs: type=semver,pattern={{major}} type=semver,pattern={{version}} type=ref,event=branch + type=ref,event=pr - name: Build and push action - backend id: docker_action_build_backend uses: docker/build-push-action@v5 @@ -123,7 +125,7 @@ jobs: cache-to: type=inline - uses: actions/setup-node@v4.0.2 with: - node-version: "20.12.0" + node-version: "20.12" release: name: Release needs: [build-frontend, build-backend] @@ -138,15 +140,16 @@ jobs: persist-credentials: false - uses: actions/setup-node@v4.0.2 with: - node-version: "20.12.0" + node-version: "20.12" - name: Semantic Release id: version - uses: cycjimmy/semantic-release-action@v4.1.0 + uses: splunk/semantic-release-action@v1.3.4 with: - semantic_version: 21.1.1 + git_committer_name: ${{ secrets.SA_GH_USER_NAME }} + git_committer_email: ${{ secrets.SA_GH_USER_EMAIL }} + gpg_private_key: ${{ secrets.SA_GPG_PRIVATE_KEY }} + passphrase: ${{ secrets.SA_GPG_PASSPHRASE }} extra_plugins: | - @semantic-release/exec - @semantic-release/git semantic-release-replace-plugin@1.2.7 env: GITHUB_TOKEN: ${{ secrets.GH_TOKEN_ADMIN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 56d705e..2960a16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 1.1.0 + +### Changed +- add error handling for apply changes action +- after clicking 'Apply changes' workflow is initially attempting to create new job immediately, if it is impossible, schedule it for the future + ## [1.0.2] ### Changed diff --git a/README.md b/README.md index 0542b25..603f32e 100644 --- a/README.md +++ b/README.md @@ -68,6 +68,7 @@ source venv/bin/activate Next step is to install required `python3` packages: ```shell +cd backend pip3 install -r requirements.txt ``` @@ -80,7 +81,6 @@ docker run --rm -d -p 27017:27017 --name example-mongo mongo:4.4.6 To start backend service run: ```yaml -cd backend flask run ``` diff --git a/backend/SC4SNMP_UI_backend/__init__.py b/backend/SC4SNMP_UI_backend/__init__.py index 316dfec..a480844 100644 --- a/backend/SC4SNMP_UI_backend/__init__.py +++ b/backend/SC4SNMP_UI_backend/__init__.py @@ -8,7 +8,7 @@ load_dotenv() -__version__ = "1.0.2" +__version__ = "1.1.0-beta.1" MONGO_URI = os.getenv("MONGO_URI") mongo_client = MongoClient(MONGO_URI) diff --git a/backend/SC4SNMP_UI_backend/apply_changes/apply_changes.py b/backend/SC4SNMP_UI_backend/apply_changes/apply_changes.py index b220f9d..d6ba532 100644 --- a/backend/SC4SNMP_UI_backend/apply_changes/apply_changes.py +++ b/backend/SC4SNMP_UI_backend/apply_changes/apply_changes.py @@ -43,11 +43,13 @@ def __init__(self) -> None: mongo_config_collection.update_one( { "previous_job_start_time": {"$exists": True}, - "currently_scheduled": {"$exists": True}} + "currently_scheduled": {"$exists": True}, + "task_id": {"$exists": True}} ,{ "$set":{ "previous_job_start_time": None, - "currently_scheduled": False + "currently_scheduled": False, + "task_id": None } }, upsert=True diff --git a/backend/SC4SNMP_UI_backend/apply_changes/handling_chain.py b/backend/SC4SNMP_UI_backend/apply_changes/handling_chain.py index fd8e51a..297b15b 100644 --- a/backend/SC4SNMP_UI_backend/apply_changes/handling_chain.py +++ b/backend/SC4SNMP_UI_backend/apply_changes/handling_chain.py @@ -2,7 +2,9 @@ import ruamel.yaml from flask import current_app from SC4SNMP_UI_backend import mongo_client -from SC4SNMP_UI_backend.apply_changes.tasks import run_job +from SC4SNMP_UI_backend.apply_changes.tasks import run_job, get_job_config +from SC4SNMP_UI_backend.apply_changes.kubernetes_job import create_job +from kubernetes.client import ApiException import datetime import os @@ -13,11 +15,23 @@ VALUES_DIRECTORY = os.getenv("VALUES_DIRECTORY", "") VALUES_FILE = os.getenv("VALUES_FILE", "") KEEP_TEMP_FILES = os.getenv("KEEP_TEMP_FILES", "false") +JOB_NAMESPACE = os.getenv("JOB_NAMESPACE", "sc4snmp") mongo_config_collection = mongo_client.sc4snmp.config_collection mongo_groups = mongo_client.sc4snmp.groups_ui mongo_inventory = mongo_client.sc4snmp.inventory_ui mongo_profiles = mongo_client.sc4snmp.profiles_ui + +class EmptyValuesFileException(Exception): + def __init__(self, filename): + self.message = f"{filename} cannot be empty. Check sc4snmp documentation for template." + super().__init__(self.message) + +class YamlParserException(Exception): + def __init__(self, filename): + self.message = f"Error occurred while reading {filename}. Check yaml syntax." + super().__init__(self.message) + class Handler(ABC): @abstractmethod def set_next(self, handler): @@ -71,8 +85,15 @@ def handle(self, request: dict): values_file_resolved = False values = {} if values_file_resolved: - with open(values_file_path, "r") as file: - values = yaml.load(file) + try: + with open(values_file_path, "r") as file: + values = yaml.load(file) + except ruamel.yaml.parser.ParserError as e: + current_app.logger.error(f"Error occurred while reading {VALUES_FILE}. Check yaml syntax.") + raise YamlParserException(VALUES_FILE) + if values is None: + current_app.logger.error(f"{VALUES_FILE} cannot be empty. Check sc4snmp documentation for template.") + raise EmptyValuesFileException(VALUES_FILE) if not values_file_resolved or KEEP_TEMP_FILES.lower() in ["t", "true", "y", "yes", "1"]: delete_temp_files = False @@ -120,31 +141,55 @@ def handle(self, request: dict = None): :return: pass dictionary with job_delay in seconds to the next handler """ record = list(mongo_config_collection.find())[0] - last_update = record["previous_job_start_time"] - if last_update is None: - # If it's the first time that the job is run (record in mongo_config_collection has been created - # in ApplyChanges class and last_update attribute is None) then job delay should be equal to - # CHANGES_INTERVAL_SECONDS. Update the mongo record with job state accordingly. - job_delay = CHANGES_INTERVAL_SECONDS + schedule_new_job = True + # get_job_config return job configuration in "job" variable and BatchV1Api from kubernetes client + job, batch_v1 = get_job_config() + if job is None or batch_v1 is None: + raise ValueError("CheckJobHandler: Job configuration is empty") + try: + # Try creating a new kubernetes job immediately. If the previous job is still present in the namespace, + # ApiException will be thrown. + create_job(batch_v1, job, JOB_NAMESPACE) + task_id = record["task_id"] + if task_id is not None: + # revoke existing Celery task with the previously scheduled job + current_app.extensions["celery"].control.revoke(task_id, + terminate=True, signal='SIGKILL') mongo_config_collection.update_one({"_id": record["_id"]}, - {"$set": {"previous_job_start_time": datetime.datetime.utcnow()}}) - # time from the last update + {"$set": {"previous_job_start_time": datetime.datetime.utcnow(), + "currently_scheduled": False, + "task_id": None}}) + job_delay = 1 time_difference = 0 - else: + schedule_new_job = False + except ApiException: # Check how many seconds have elapsed since the last time that the job was run. If the time difference - # is greater than CHANGES_INTERVAL_SECONDS then job can be run immediately. Otherwise, calculate how + # is greater than CHANGES_INTERVAL_SECONDS then job can be scheduled within 1 second. Otherwise, calculate how # many seconds are left until minimum time difference between updates (CHANGES_INTERVAL_SECONDS). - current_time = datetime.datetime.utcnow() - delta = current_time - last_update - time_difference = delta.total_seconds() - if time_difference > CHANGES_INTERVAL_SECONDS: - job_delay = 1 + last_update = record["previous_job_start_time"] + if last_update is None: + # If it's the first time that the job is run (record in mongo_config_collection has been created + # in ApplyChanges class and last_update attribute is None) but the previous job is still in the namespace + # then job delay should be equal to CHANGES_INTERVAL_SECONDS. + # Update the mongo record with job state accordingly. + job_delay = CHANGES_INTERVAL_SECONDS + mongo_config_collection.update_one({"_id": record["_id"]}, + {"$set": {"previous_job_start_time": datetime.datetime.utcnow()}}) + # time from the last update + time_difference = 0 else: - job_delay = int(CHANGES_INTERVAL_SECONDS - time_difference) + current_time = datetime.datetime.utcnow() + delta = current_time - last_update + time_difference = delta.total_seconds() + if time_difference > CHANGES_INTERVAL_SECONDS: + job_delay = 1 + else: + job_delay = int(CHANGES_INTERVAL_SECONDS - time_difference) result = { "job_delay": job_delay, - "time_from_last_update": time_difference + "time_from_last_update": time_difference, + "schedule_new_job": schedule_new_job } current_app.logger.info(f"CheckJobHandler: {result}") @@ -157,11 +202,11 @@ def handle(self, request: dict): ScheduleHandler schedules the kubernetes job with updated sc4snmp configuration """ record = list(mongo_config_collection.find())[0] - if not record["currently_scheduled"]: + if not record["currently_scheduled"] and request["schedule_new_job"]: # If the task isn't currently scheduled, schedule it and update its state in mongo. + async_result = run_job.apply_async(countdown=request["job_delay"], queue='apply_changes') mongo_config_collection.update_one({"_id": record["_id"]}, - {"$set": {"currently_scheduled": True}}) - run_job.apply_async(countdown=request["job_delay"], queue='apply_changes') + {"$set": {"currently_scheduled": True, "task_id": async_result.id}}) current_app.logger.info( f"ScheduleHandler: scheduling new task with the delay of {request['job_delay']} seconds.") else: diff --git a/backend/SC4SNMP_UI_backend/apply_changes/routes.py b/backend/SC4SNMP_UI_backend/apply_changes/routes.py index c6089f4..88c769d 100644 --- a/backend/SC4SNMP_UI_backend/apply_changes/routes.py +++ b/backend/SC4SNMP_UI_backend/apply_changes/routes.py @@ -1,7 +1,9 @@ -from flask import Blueprint, jsonify +from flask import Blueprint, jsonify, current_app from flask_cors import cross_origin from SC4SNMP_UI_backend.apply_changes.apply_changes import ApplyChanges +from SC4SNMP_UI_backend.apply_changes.handling_chain import EmptyValuesFileException, YamlParserException import os +import traceback apply_changes_blueprint = Blueprint('common_blueprint', __name__) JOB_CREATION_RETRIES = int(os.getenv("JOB_CREATION_RETRIES", 10)) @@ -19,4 +21,15 @@ def apply_changes(): else: message = f"Configuration will be updated in approximately {job_delay} seconds." result = jsonify({"message": message}) - return result, 200 \ No newline at end of file + return result, 200 + +@apply_changes_blueprint.errorhandler(Exception) +@cross_origin() +def handle_exception(e): + current_app.logger.error(traceback.format_exc()) + if isinstance(e, (EmptyValuesFileException, YamlParserException)): + result = jsonify({"message": e.message}) + return result, 400 + + result = jsonify({"message": "Undentified error. Check logs."}) + return result, 400 \ No newline at end of file diff --git a/backend/SC4SNMP_UI_backend/apply_changes/tasks.py b/backend/SC4SNMP_UI_backend/apply_changes/tasks.py index 2e5bfed..4428d25 100644 --- a/backend/SC4SNMP_UI_backend/apply_changes/tasks.py +++ b/backend/SC4SNMP_UI_backend/apply_changes/tasks.py @@ -15,8 +15,11 @@ JOB_CONFIG_PATH = os.getenv("JOB_CONFIG_PATH", "/config/job_config.yaml") celery_logger = get_task_logger(__name__) -@shared_task() -def run_job(): +def get_job_config(): + """ + :return: job - configuration of the job + batch_v1 - BatchV1Api object from kubernetes client + """ job = None batch_v1 = None with open(JOB_CONFIG_PATH, encoding="utf-8") as file: @@ -26,6 +29,13 @@ def run_job(): config.load_incluster_config() batch_v1 = client.BatchV1Api() job = create_job_object(config_file) + return job, batch_v1 + +@shared_task() +def run_job(): + job, batch_v1 = get_job_config() + if job is None or batch_v1 is None: + raise ValueError("Scheduled kubernetes job: Job configuration is empty") with MongoClient(MONGO_URI) as connection: try_creating = True @@ -39,8 +49,9 @@ def run_job(): try: record = list(connection.sc4snmp.config_collection.find())[0] connection.sc4snmp.config_collection.update_one({"_id": record["_id"]}, - {"$set": {"previous_job_start_time": datetime.datetime.utcnow(), - "currently_scheduled": False}}) + {"$set": {"previous_job_start_time": datetime.datetime.utcnow(), + "currently_scheduled": False, + "task_id": None}}) except Exception as e: celery_logger.info(f"Error occurred while updating job state after job creation: {str(e)}") except ApiException: @@ -50,6 +61,6 @@ def run_job(): celery_logger.info(f"Kubernetes job was not created. Max retries ({JOB_CREATION_RETRIES}) exceeded.") record = list(connection.sc4snmp.config_collection.find())[0] connection.sc4snmp.config_collection.update_one({"_id": record["_id"]}, - {"$set": {"currently_scheduled": False}}) + {"$set": {"currently_scheduled": False, "task_id": None}}) else: - time.sleep(10) \ No newline at end of file + time.sleep(10) diff --git a/backend/requirements.txt b/backend/requirements.txt index 29f2a8d..7cb7d36 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,12 +1,12 @@ click==8.1.3 Flask==2.2.5 -Flask-Cors==3.0.10 +Flask-Cors==4.0.1 itsdangerous==2.1.2 -Jinja2==3.1.3 +Jinja2==3.1.4 MarkupSafe==2.1.1 -pymongo==4.1.1 +pymongo==4.6.3 six==1.16.0 -Werkzeug==2.3.8 +Werkzeug==3.0.3 pytest~=7.2.0 gunicorn kubernetes~=26.1.0 diff --git a/backend/tests/ui_handling/post_endpoints/test_post_apply_changes.py b/backend/tests/ui_handling/post_endpoints/test_post_apply_changes.py index 58cebec..7da516a 100644 --- a/backend/tests/ui_handling/post_endpoints/test_post_apply_changes.py +++ b/backend/tests/ui_handling/post_endpoints/test_post_apply_changes.py @@ -1,11 +1,14 @@ from unittest import mock -from unittest.mock import call +from unittest.mock import call, Mock from bson import ObjectId from copy import copy import ruamel import datetime import os +from kubernetes.client import ApiException from SC4SNMP_UI_backend.apply_changes.handling_chain import TMP_FILE_PREFIX +import pytest +from SC4SNMP_UI_backend.apply_changes.apply_changes import SingletonMeta VALUES_TEST_DIRECTORY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../yamls_for_tests/values_test") @@ -58,6 +61,12 @@ def reset_generated_values(): yaml.dump(original_data, file) +@pytest.fixture(autouse=True) +def reset_singleton(): + yield # The code after yield is executed after the test + SingletonMeta._instances = {} + + common_id = "635916b2c8cb7a15f28af40a" groups_collection = [ @@ -171,24 +180,26 @@ def reset_generated_values(): @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.VALUES_FILE", "values.yaml") @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.KEEP_TEMP_FILES", "true") @mock.patch("datetime.datetime") +@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.create_job") +@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.get_job_config") @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.run_job") @mock.patch("pymongo.collection.Collection.update_one") @mock.patch("pymongo.collection.Collection.find") -def test_apply_changes_first_call(m_find, m_update, m_run_job, m_datetime, client): +def test_apply_changes_first_call_no_job_in_namespace(m_find, m_update, m_run_job, m_get_job_config, m_create_job, m_datetime, client): datetime_object = datetime.datetime(2020, 7, 10, 10, 30, 0, 0) m_datetime.utcnow = mock.Mock(return_value=datetime_object) collection = { "_id": ObjectId(common_id), "previous_job_start_time": None, - "currently_scheduled": False + "currently_scheduled": False, + "task_id": None } m_find.side_effect = [ groups_collection, # call from SaveConfigToFileHandler profiles_collection, # call from SaveConfigToFileHandler inventory_collection, # call from SaveConfigToFileHandler - [collection], - [collection], - [collection] + [collection], # call from CheckJobHandler + [collection], # call from ScheduleHandler ] calls_find = [ call(), @@ -196,104 +207,141 @@ def test_apply_changes_first_call(m_find, m_update, m_run_job, m_datetime, clien call() ] calls_update = [ - call({"_id": ObjectId(common_id)},{"$set": {"previous_job_start_time": datetime_object}}), - call({"_id": ObjectId(common_id)},{"$set": {"currently_scheduled": True}}) + call({'previous_job_start_time': {'$exists': True}, 'currently_scheduled': {'$exists': True}, + 'task_id': {'$exists': True}}, + {'$set': {'previous_job_start_time': None, 'currently_scheduled': False, 'task_id': None}}, upsert=True), # call from ApplyChanges + call({"_id": ObjectId(common_id)}, {"$set": {"previous_job_start_time": datetime_object, "currently_scheduled": False, "task_id": None}}) # call from CheckJobHandler ] - apply_async_calls = [ - call(countdown=300, queue='apply_changes') + create_job_calls = [ + call("val1", "val2", "sc4snmp") ] + m_get_job_config.return_value = ("val2", "val1") + m_create_job.return_value = None m_run_job.apply_async.return_value = None m_update.return_value = None response = client.post("/apply-changes") m_find.assert_has_calls(calls_find) + assert m_get_job_config.called m_update.assert_has_calls(calls_update) - m_run_job.apply_async.assert_has_calls(apply_async_calls) - assert response.json == {"message": "Configuration will be updated in approximately 300 seconds."} + m_create_job.assert_has_calls(create_job_calls) + assert not m_run_job.apply_async.called + assert response.json == {"message": "Configuration will be updated in approximately 1 seconds."} reference_files, generated_files = return_generated_and_reference_files() for ref_f, gen_f in zip(reference_files, generated_files): assert ref_f == gen_f delete_generated_files() reset_generated_values() + @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.VALUES_DIRECTORY", VALUES_TEST_DIRECTORY) @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.TMP_DIR", VALUES_TEST_DIRECTORY) -@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.datetime") +@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.VALUES_FILE", "values.yaml") +@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.KEEP_TEMP_FILES", "true") +@mock.patch("datetime.datetime") +@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.create_job") +@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.get_job_config") @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.run_job") @mock.patch("pymongo.collection.Collection.update_one") @mock.patch("pymongo.collection.Collection.find") -def test_apply_changes_job_currently_scheduled(m_find, m_update, m_run_job, m_datetime, client): - datetime_object_old = datetime.datetime(2020, 7, 10, 10, 27, 10, 0) - datetime_object_new = datetime.datetime(2020, 7, 10, 10, 30, 0, 0) - m_datetime.datetime.utcnow = mock.Mock(return_value=datetime_object_new) +def test_apply_changes_first_call_job_present_in_namespace(m_find, m_update, m_run_job, m_get_job_config, m_create_job, m_datetime, client): + datetime_object = datetime.datetime(2020, 7, 10, 10, 30, 0, 0) + m_datetime.utcnow = mock.Mock(return_value=datetime_object) collection = { "_id": ObjectId(common_id), - "previous_job_start_time": datetime_object_old, - "currently_scheduled": True + "previous_job_start_time": None, + "currently_scheduled": False, + "task_id": None } m_find.side_effect = [ - groups_collection, # call from SaveConfigToFileHandler + groups_collection, # call from SaveConfigToFileHandler profiles_collection, # call from SaveConfigToFileHandler inventory_collection, # call from SaveConfigToFileHandler - [collection], - [collection], - [collection] + [collection], # call from CheckJobHandler + [collection], # call from ScheduleHandler ] calls_find = [ call(), call(), call() ] - m_run_job.apply_async.return_value = None + calls_update = [ + call({'previous_job_start_time': {'$exists': True}, 'currently_scheduled': {'$exists': True}, + 'task_id': {'$exists': True}}, + {'$set': {'previous_job_start_time': None, 'currently_scheduled': False, 'task_id': None}}, upsert=True), # call from ApplyChanges + call({"_id": ObjectId(common_id)},{"$set": {"previous_job_start_time": datetime_object}}), # call from CheckJobHandler + call({"_id": ObjectId(common_id)}, {"$set": {"currently_scheduled": True, "task_id": "id_val"}}) # call from ScheduleHandler + + ] + apply_async_calls = [ + call(countdown=300, queue='apply_changes') + ] + create_job_calls = [ + call("val1", "val2", "sc4snmp") + ] + + m_get_job_config.return_value = ("val2", "val1") + m_create_job.side_effect = ApiException() + + apply_async_result = Mock() + apply_async_result.id = "id_val" + m_run_job.apply_async.return_value = apply_async_result m_update.return_value = None response = client.post("/apply-changes") m_find.assert_has_calls(calls_find) - assert not m_run_job.apply_async.called - assert response.json == {"message": "Configuration will be updated in approximately 130 seconds."} + assert m_get_job_config.called + m_update.assert_has_calls(calls_update) + m_create_job.assert_has_calls(create_job_calls) + m_run_job.apply_async.assert_has_calls(apply_async_calls) + assert response.json == {"message": "Configuration will be updated in approximately 300 seconds."} + reference_files, generated_files = return_generated_and_reference_files() + for ref_f, gen_f in zip(reference_files, generated_files): + assert ref_f == gen_f delete_generated_files() reset_generated_values() - @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.VALUES_DIRECTORY", VALUES_TEST_DIRECTORY) @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.TMP_DIR", VALUES_TEST_DIRECTORY) @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.datetime") +@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.create_job") +@mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.get_job_config") @mock.patch("SC4SNMP_UI_backend.apply_changes.handling_chain.run_job") @mock.patch("pymongo.collection.Collection.update_one") @mock.patch("pymongo.collection.Collection.find") -def test_apply_changes_new_job_delay_1(m_find, m_update, m_run_job, m_datetime, client): - datetime_object_old = datetime.datetime(2020, 7, 10, 10, 20, 0, 0) +def test_apply_changes_job_currently_scheduled_job_present_in_namespace(m_find, m_update, m_run_job, m_get_job_config, m_create_job, m_datetime, client): + datetime_object_old = datetime.datetime(2020, 7, 10, 10, 27, 10, 0) datetime_object_new = datetime.datetime(2020, 7, 10, 10, 30, 0, 0) m_datetime.datetime.utcnow = mock.Mock(return_value=datetime_object_new) collection = { "_id": ObjectId(common_id), "previous_job_start_time": datetime_object_old, - "currently_scheduled": False + "currently_scheduled": True, + "task_id": "test_id" } m_find.side_effect = [ groups_collection, # call from SaveConfigToFileHandler profiles_collection, # call from SaveConfigToFileHandler inventory_collection, # call from SaveConfigToFileHandler - [collection], - [collection], - [collection] + [collection], # call from CheckJobHandler + [collection], # call from ScheduleHandler ] calls_find = [ call(), call(), call() ] - apply_async_calls = [ - call(countdown=1, queue='apply_changes') + create_job_calls = [ + call("val1", "val2", "sc4snmp") ] - - m_run_job.apply_async.return_value = None - m_update.return_value = None + m_get_job_config.return_value = ("val2", "val1") + m_create_job.side_effect = ApiException() response = client.post("/apply-changes") m_find.assert_has_calls(calls_find) - m_run_job.apply_async.assert_has_calls(apply_async_calls) - assert response.json == {"message": "Configuration will be updated in approximately 1 seconds."} + m_create_job.assert_has_calls(create_job_calls) + assert not m_run_job.apply_async.called + assert response.json == {"message": "Configuration will be updated in approximately 130 seconds."} delete_generated_files() reset_generated_values() diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 7a9238c..a12a7da 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,4 +1,4 @@ -FROM node:20.12.0-alpine as build-step +FROM node:20.12-alpine as build-step WORKDIR /frontend ENV PATH /frontend/node_modules/.bin:$PATH COPY package.json yarn.lock lerna.json ./ diff --git a/frontend/lerna.json b/frontend/lerna.json index 525036b..3f7aa46 100644 --- a/frontend/lerna.json +++ b/frontend/lerna.json @@ -1,6 +1,6 @@ { "lerna": "^6.6.2", - "version": "1.0.2", + "version": "1.1.0-beta.1", "command": { "publish": { "ignoreChanges": ["*.md"] diff --git a/frontend/package.json b/frontend/package.json index e011eb4..d8836ac 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -19,7 +19,7 @@ "packages/*" ], "engines": { - "node": "20.12.0" + "node": "^20.12" }, "dependencies": { "cors": "^2.8.5", diff --git a/frontend/packages/manager/CHANGELOG.md b/frontend/packages/manager/CHANGELOG.md deleted file mode 100644 index ec5626d..0000000 --- a/frontend/packages/manager/CHANGELOG.md +++ /dev/null @@ -1,6 +0,0 @@ -# Change Log - -0.0.1 – Release date: TBA -------- - -* Initial version diff --git a/frontend/packages/manager/package.json b/frontend/packages/manager/package.json index 3582e45..19339f6 100644 --- a/frontend/packages/manager/package.json +++ b/frontend/packages/manager/package.json @@ -1,6 +1,6 @@ { "name": "@splunk/manager", - "version": "1.0.2", + "version": "1.1.0-beta.1", "license": "UNLICENSED", "scripts": { "build": "NODE_ENV=production webpack --bail --config demo/webpack.standalone.config.js", @@ -77,6 +77,6 @@ "styled-components": "5.1.1" }, "engines": { - "node": "20.12.0" + "node": "^20.12" } } diff --git a/frontend/packages/manager/src/components/menu_header/Header.jsx b/frontend/packages/manager/src/components/menu_header/Header.jsx index e62a0b2..a8b83e1 100644 --- a/frontend/packages/manager/src/components/menu_header/Header.jsx +++ b/frontend/packages/manager/src/components/menu_header/Header.jsx @@ -54,6 +54,12 @@ function Header(){ ErrCtx.setMessage(response.data.message); } }) + .catch((error) => { + console.log(error) + ErrCtx.setOpen(true); + ErrCtx.setErrorType("error"); + ErrCtx.setMessage("Error: " + error.response.data.message); + }) }; const addButtonLabel = { diff --git a/frontend/yarn.lock b/frontend/yarn.lock index d60ccc6..94f41e1 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -5178,9 +5178,9 @@ ee-first@1.1.1: integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== ejs@^3.1.7: - version "3.1.9" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.9.tgz#03c9e8777fe12686a9effcef22303ca3d8eeb361" - integrity sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ== + version "3.1.10" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.10.tgz#69ab8358b14e896f80cc39e62087b88500c3ac3b" + integrity sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA== dependencies: jake "^10.8.5" @@ -12743,7 +12743,16 @@ wordwrap@^1.0.0: resolved "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz" integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==