diff --git a/.docker/README.md b/.docker/README.md new file mode 100644 index 0000000000..66b76b0f2d --- /dev/null +++ b/.docker/README.md @@ -0,0 +1,17 @@ +## What is this directory? +This directory is a space for mounting directories to docker containers, allowing the mounts to be specified in committed code, but the contents of the mounts to remain ignored by git. + +### postgres +The `postgres` directory is mounted to `/docker-entrypoint-initdb.d`. Any `.sh` or `.sql` files will be executed when the container is first started with a new data volume. You may read more regarding this functionality on the [Docker Hub page](https://hub.docker.com/_/postgres), under _Initialization scripts_. + +When running docker services through the Makefile commands, it specifies a docker-compose project name that depends on the name of the current git branch. This causes the volumes to change when the branch changes, which is helpful when switching between many branches that might have incompatible database schema changes. The downside is that whenever you start a new branch, you'll have to re-initialize the database again, like with `yarn run devsetup`. Creating a SQL dump from an existing, initialized database and placing it in this directory will allow you to skip this step. + +To create a SQL dump of your preferred database data useful for local testing, run `make .docker/postgres/init.sql` while the docker postgres container is running. + +> Note: you will likely need to run `make migrate` to ensure your database schema is up-to-date when using this technique. + +#### pgpass +Stores the postgres authentication for the docker service for scripting access without manually providing a password, created by `make .docker/pgpass` + +### minio +The `minio` directory is mounted to `/data`, since it isn't necessarily useful to have this data isolated based off the current git branch. diff --git a/.editorconfig b/.editorconfig index 1f49431c53..8db7923734 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,5 +1,8 @@ root = true +[*] +max_line_length = 100 + [*.js] indent_size = 2 diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 3ba13e0cec..0b2ccf668e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1 +1,8 @@ blank_issues_enabled: false +contact_links: + - name: Studio GitHub Discussions + url: https://github.com/learningequality/studio/discussions + about: Please ask general questions about contributing to Studio or report development server issues here. + - name: Learning Equality Community Forum + url: https://community.learningequality.org/ + about: Ask and answer questions about Learning Equality's products and tools, share your experiences using Kolibri, and connect with users around the world. diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 128ae4fe41..24349f8d83 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -7,10 +7,30 @@ updates: - package-ecosystem: "pip" directory: "/" schedule: - interval: "daily" + interval: "weekly" + day: "wednesday" + time: "00:00" # Maintain dependencies for Javascript - package-ecosystem: "npm" directory: "/" schedule: - interval: "daily" + interval: "weekly" + day: "wednesday" + time: "00:00" + groups: + babel: + patterns: + - "@babel/*" + + # Maintain dependencies for Github Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + day: "wednesday" + time: "00:00" + groups: + github: + patterns: + - "actions/*" diff --git a/.github/workflows/containerbuild.yml b/.github/workflows/containerbuild.yml new file mode 100644 index 0000000000..361b0fad36 --- /dev/null +++ b/.github/workflows/containerbuild.yml @@ -0,0 +1,105 @@ +name: Container Build + +on: + push: + branches: + - unstable + - hotfixes + - master + tags: + - 'v*' + pull_request: + +jobs: + pre_postgres: + name: Path match check - postgres + runs-on: ubuntu-latest + # Map a step output to a job output + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@master + with: + skip_after_successful_duplicate: false + github_token: ${{ github.token }} + paths: '["docker/Dockerfile.postgres.dev", ".github/workflows/containerbuild.yml"]' + + build_and_push_postgres: + name: Postgres - build and push Docker image to GitHub Container Registry + needs: pre_postgres + if: ${{ needs.pre_postgres.outputs.should_skip != 'true' }} + runs-on: ubuntu-latest + steps: + - name: Checkout codebase + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to Docker Hub + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ghcr.io/learningequality/postgres + env: + DOCKER_METADATA_ANNOTATIONS_LEVELS: manifest,index + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: ./docker + file: ./docker/Dockerfile.postgres.dev + platforms: linux/amd64,linux/arm64 + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + annotations: ${{ steps.meta.outputs.annotations }} + + pre_nginx: + name: Path match check - nginx + runs-on: ubuntu-latest + # Map a step output to a job output + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@master + with: + skip_after_successful_duplicate: false + github_token: ${{ github.token }} + paths: '["k8s/images/nginx/*", ".github/workflows/containerbuild.yml"]' + + build_nginx: + name: nginx - test build of nginx Docker image + needs: pre_nginx + if: ${{ needs.pre_nginx.outputs.should_skip != 'true' }} + runs-on: ubuntu-latest + steps: + - name: Checkout codebase + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image + uses: docker/build-push-action@v5 + with: + context: ./ + file: ./k8s/images/nginx/Dockerfile + platforms: linux/amd64 + push: false diff --git a/.github/workflows/deploytest.yml b/.github/workflows/deploytest.yml index 11768ed489..71b3b9296c 100644 --- a/.github/workflows/deploytest.yml +++ b/.github/workflows/deploytest.yml @@ -27,13 +27,13 @@ jobs: if: ${{ needs.pre_job.outputs.should_skip != 'true' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Use Node.js - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: '16.x' - name: Cache Node.js modules - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: '**/node_modules' key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }} @@ -51,13 +51,13 @@ jobs: if: ${{ needs.pre_job.outputs.should_skip != 'true' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: '3.10' - name: pip cache - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: ~/.cache/pip key: ${{ runner.os }}-pyprod-${{ hashFiles('requirements.txt') }} @@ -69,11 +69,11 @@ jobs: pip install pip-tools pip-sync requirements.txt - name: Use Node.js - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: '16.x' - name: Cache Node.js modules - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: '**/node_modules' key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }} diff --git a/.github/workflows/frontendlint.yml b/.github/workflows/frontendlint.yml index 6de4d701bd..c28a80937a 100644 --- a/.github/workflows/frontendlint.yml +++ b/.github/workflows/frontendlint.yml @@ -27,13 +27,13 @@ jobs: if: ${{ needs.pre_job.outputs.should_skip != 'true' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Use Node.js - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: '16.x' - name: Cache Node.js modules - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: '**/node_modules' key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }} @@ -49,7 +49,7 @@ jobs: if: github.event.pull_request && github.event.pull_request.head.repo.full_name == github.repository id: git-check run: echo ::set-output name=modified::$(git diff-index --name-only HEAD) - - uses: tibdex/github-app-token@v1 + - uses: tibdex/github-app-token@v2 if: github.event.pull_request && github.event.pull_request.head.repo.full_name == github.repository && steps.git-check.outputs.modified != '' id: generate-token with: diff --git a/.github/workflows/frontendtest.yml b/.github/workflows/frontendtest.yml index c9ed46672c..e83ac316d8 100644 --- a/.github/workflows/frontendtest.yml +++ b/.github/workflows/frontendtest.yml @@ -27,13 +27,13 @@ jobs: if: ${{ needs.pre_job.outputs.should_skip != 'true' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Use Node.js - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: '16.x' - name: Cache Node.js modules - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: '**/node_modules' key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }} diff --git a/.github/workflows/notify_team_new_comment.yml b/.github/workflows/notify_team_new_comment.yml new file mode 100644 index 0000000000..5c4f675d09 --- /dev/null +++ b/.github/workflows/notify_team_new_comment.yml @@ -0,0 +1,35 @@ +name: Send a slack notification when a contributor comments on issue + +on: + issue_comment: + types: [created] + +jobs: + contributor_issue_comment: + name: Contributor issue comment + + if: >- + ${{ + !github.event.issue.pull_request && + github.event.comment.author_association != 'MEMBER' && + github.event.comment.author_association != 'OWNER' + }} + + runs-on: ubuntu-latest + steps: + - name: Escape title double quotes + id: escape_title + run: | + title='${{ github.event.issue.title }}' + echo "ISSUE_TITLE=${title//\"/\\\"}" >> "$GITHUB_OUTPUT" + + - name: Send message to Slack channel + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + uses: slackapi/slack-github-action@v1.25.0 + with: + payload: | + { + "text": "*[Studio] New comment on issue: <${{ github.event.issue.html_url }}#issuecomment-${{ github.event.comment.id }}|${{ steps.escape_title.outputs.ISSUE_TITLE }} by ${{ github.event.comment.user.login }}>*" + } diff --git a/.github/workflows/pythontest.yml b/.github/workflows/pythontest.yml index 217399ea8e..443e445b4e 100644 --- a/.github/workflows/pythontest.yml +++ b/.github/workflows/pythontest.yml @@ -61,7 +61,7 @@ jobs: # Maps port 6379 on service container to the host - 6379:6379 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up minio run: | docker run -d -p 9000:9000 --name minio \ @@ -70,12 +70,12 @@ jobs: -v /tmp/minio_data:/data \ -v /tmp/minio_config:/root/.minio \ minio/minio server /data - - name: Set up Python 3.9 - uses: actions/setup-python@v2 + - name: Set up Python 3.10 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: '3.10' - name: pip cache - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: ~/.cache/pip key: ${{ runner.os }}-pytest-${{ hashFiles('requirements.txt', 'requirements-dev.txt') }} diff --git a/.gitignore b/.gitignore index b5e0261f09..8d869357f8 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,8 @@ var/ # IntelliJ IDE, except project config .idea/* !.idea/studio.iml +# ignore future updates to run configuration +.run/devserver.run.xml # PyInstaller # Usually these files are written by a python script from a template @@ -95,8 +97,11 @@ contentcuration/csvs/ # Ignore the TAGS file generated by some editors TAGS -# Ignore Vagrant-created files -/.vagrant/ +# Services +.vagrant/ +.docker/minio/* +.docker/postgres/* +.docker/pgpass # Ignore test files /contentcuration/contentcuration/proxy_settings.py diff --git a/.run/devserver.run.xml b/.run/devserver.run.xml new file mode 100644 index 0000000000..1c94ee6402 --- /dev/null +++ b/.run/devserver.run.xml @@ -0,0 +1,24 @@ + + + + + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..35d0e2c4c1 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,28 @@ + +## How can I contribute? + +1. šŸ“™ **Skim through the [Developer documentation](./docs/_index.md)** to understand where to refer later on. +2. šŸ’» **Follow the [Local development instructions](./docs/local_dev_docker.md) to set up your development server.** +3. šŸ” **Search for issues tagged as [help wanted](https://github.com/learningequality/studio/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22+no%3Aassignee) or [good first issue](https://github.com/learningequality/studio/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22+no%3Aassignee).** +4. šŸ—£ļø **Ask us for an assignment in the comments of an issue you've chosen.** Please request assignment of a reasonable amount of issues at a time. Once you finish your current issue or two, you are welcome to ask for more. + +**ā“ Where to ask questions** + +- For anything development related, refer to the [Developer documentation](./docs/_index.md) at first. Some answers may already be there. +- For questions related to a specific issue or assignment requests, use the corresponding issue's comments section. +- Visit [GitHub Discussions](https://github.com/learningequality/studio/discussions) to ask about anything related to contributing or to troubleshoot development server issues. + +**šŸ‘„ How to connect** + +- We encourage you to visit [GitHub Discussions](https://github.com/learningequality/studio/discussions) to connect with the Learning Equality team as well as with other contributors. +- If you'd like to contribute on a regular basis, we are happy to invite you to our open-source community Slack channel. Get in touch with us at info@learningequality.org to receive an invitation. + +--- + +šŸ•– Please allow us a few days to reply to your comments. If you don't hear from us within a week, reach out via [GitHub Discussions](https://github.com/learningequality/studio/discussions). + +As soon as you open a pull request, it may take us a week or two to review it as we're a small team. We appreciate your contribution and will provide feedback. + +--- + +*Thank you for your interest in contributing! Learning Equality was founded by volunteers dedicated to helping make educational materials more accessible to those in need, and every contribution makes a difference.* diff --git a/Makefile b/Makefile index 73fca6ea8c..cebcf1b79f 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,11 @@ +# standalone install method +DOCKER_COMPOSE = docker-compose + +# support new plugin installation for docker-compose +ifeq (, $(shell which docker-compose)) +DOCKER_COMPOSE = docker compose +endif + ############################################################### # PRODUCTION COMMANDS ######################################### ############################################################### @@ -19,10 +27,6 @@ gunicornserver: NUM_PROCS:=1 gunicornserver: cd contentcuration/ && gunicorn contentcuration.wsgi:application --timeout=4000 --error-logfile=/var/log/gunicorn-error.log --workers=${NUM_PROCS} --bind=0.0.0.0:8081 --pid=/tmp/contentcuration.pid --log-level=debug || sleep infinity - -contentnodegc: - cd contentcuration/ && python manage.py garbage_collect - prodceleryworkers: cd contentcuration/ && celery -A contentcuration worker -l info --concurrency=3 --task-events @@ -36,6 +40,21 @@ migrate: python contentcuration/manage.py migrate || true python contentcuration/manage.py loadconstants +# This is a special command that is we'll reuse to run data migrations outside of the normal +# django migration process. This is useful for long running migrations which we don't want to block +# the CD build. Do not delete! +# Procedure: +# 1) Add a new management command for the migration +# 2) Call it here +# 3) Perform the release +# 4) Remove the management command from this `deploy-migrate` recipe +# 5) Repeat! +deploy-migrate: + echo "Nothing to do here!" + +contentnodegc: + python contentcuration/manage.py garbage_collect + filedurations: python contentcuration/manage.py set_file_duration @@ -46,6 +65,10 @@ set-tsvectors: python contentcuration/manage.py set_channel_tsvectors python contentcuration/manage.py set_contentnode_tsvectors --published +reconcile: + python contentcuration/manage.py reconcile_publishing_status + python contentcuration/manage.py reconcile_change_tasks + ############################################################### # END PRODUCTION COMMANDS ##################################### ############################################################### @@ -66,10 +89,10 @@ i18n-extract: i18n-extract-frontend i18n-extract-backend i18n-transfer-context: yarn transfercontext -#i18n-django-compilemessages: - # Change working directory to kolibri/ such that compilemessages +i18n-django-compilemessages: + # Change working directory to contentcuration/ such that compilemessages # finds only the .po files nested there. - #cd kolibri && PYTHONPATH="..:$$PYTHONPATH" python -m kolibri manage compilemessages + cd contentcuration && python manage.py compilemessages i18n-upload: i18n-extract python node_modules/kolibri-tools/lib/i18n/crowdin.py upload-sources ${branch} @@ -80,27 +103,15 @@ i18n-pretranslate: i18n-pretranslate-approve-all: python node_modules/kolibri-tools/lib/i18n/crowdin.py pretranslate ${branch} --approve-all -i18n-convert: - python node_modules/kolibri-tools/lib/i18n/crowdin.py convert-files - i18n-download-translations: python node_modules/kolibri-tools/lib/i18n/crowdin.py rebuild-translations ${branch} python node_modules/kolibri-tools/lib/i18n/crowdin.py download-translations ${branch} - node node_modules/kolibri-tools/lib/i18n/intl_code_gen.js - python node_modules/kolibri-tools/lib/i18n/crowdin.py convert-files - # TODO: is this necessary? # Manual hack to add es language by copying es_ES to es - # cp -r contentcuration/locale/es_ES contentcuration/locale/es + yarn exec kolibri-tools i18n-code-gen -- --output-dir ./contentcuration/contentcuration/frontend/shared/i18n + $(MAKE) i18n-django-compilemessages + yarn exec kolibri-tools i18n-create-message-files -- --namespace contentcuration --searchPath ./contentcuration/contentcuration/frontend i18n-download: i18n-download-translations -i18n-update: - echo "WARNING: i18n-update has been renamed to i18n-download" - $(MAKE) i18n-download - echo "WARNING: i18n-update has been renamed to i18n-download" - -i18n-stats: - python node_modules/kolibri-tools/lib/i18n/crowdin.py translation-stats ${branch} - i18n-download-glossary: python node_modules/kolibri-tools/lib/i18n/crowdin.py download-glossary @@ -137,11 +148,13 @@ dummyusers: hascaptions: python contentcuration/manage.py set_orm_based_has_captions -export COMPOSE_PROJECT_NAME=studio_$(shell git rev-parse --abbrev-ref HEAD) +BRANCH_NAME := $(shell git rev-parse --abbrev-ref HEAD | sed 's/[^a-zA-Z0-9_-]/-/g') -purge-postgres: - -PGPASSWORD=kolibri dropdb -U learningequality "kolibri-studio" --port 5432 -h localhost - PGPASSWORD=kolibri createdb -U learningequality "kolibri-studio" --port 5432 -h localhost +export COMPOSE_PROJECT_NAME=studio_$(BRANCH_NAME) + +purge-postgres: .docker/pgpass + -PGPASSFILE=.docker/pgpass dropdb -U learningequality "kolibri-studio" --port 5432 -h localhost + PGPASSFILE=.docker/pgpass createdb -U learningequality "kolibri-studio" --port 5432 -h localhost destroy-and-recreate-database: purge-postgres setup @@ -151,39 +164,56 @@ devceleryworkers: run-services: $(MAKE) -j 2 dcservicesup devceleryworkers +.docker/minio: + mkdir -p $@ + +.docker/postgres: + mkdir -p $@ + +.docker/pgpass: + echo "localhost:5432:kolibri-studio:learningequality:kolibri" > $@ + chmod 600 $@ + +.docker/postgres/init.sql: .docker/pgpass + # assumes postgres is running in a docker container + PGPASSFILE=.docker/pgpass pg_dump --host localhost --port 5432 --username learningequality --dbname "kolibri-studio" --exclude-table-data=contentcuration_change --file $@ + dcbuild: # build all studio docker image and all dependent services using docker-compose - docker-compose build + $(DOCKER_COMPOSE) build -dcup: +dcup: .docker/minio .docker/postgres # run all services except for cloudprober - docker-compose up studio-app celery-worker + $(DOCKER_COMPOSE) up studio-app celery-worker -dcup-cloudprober: +dcup-cloudprober: .docker/minio .docker/postgres # run all services including cloudprober - docker-compose up + $(DOCKER_COMPOSE) up dcdown: - # run make deverver in foreground with all dependent services using docker-compose - docker-compose down + # run make deverver in foreground with all dependent services using $(DOCKER_COMPOSE) + $(DOCKER_COMPOSE) down dcclean: # stop all containers and delete volumes - docker-compose down -v + $(DOCKER_COMPOSE) down -v docker image prune -f dcshell: # bash shell inside the (running!) studio-app container - docker-compose exec studio-app /usr/bin/fish + $(DOCKER_COMPOSE) exec studio-app /usr/bin/fish + +dcpsql: .docker/pgpass + PGPASSFILE=.docker/pgpass psql --host localhost --port 5432 --username learningequality --dbname "kolibri-studio" -dctest: +dctest: .docker/minio .docker/postgres # run backend tests inside docker, in new instances - docker-compose run studio-app make test + $(DOCKER_COMPOSE) run studio-app make test -dcservicesup: +dcservicesup: .docker/minio .docker/postgres # launch all studio's dependent services using docker-compose - docker-compose -f docker-compose.yml -f docker-compose.alt.yml up minio postgres redis + $(DOCKER_COMPOSE) -f docker-compose.yml -f docker-compose.alt.yml up minio postgres redis dcservicesdown: # stop services that were started using dcservicesup - docker-compose -f docker-compose.yml -f docker-compose.alt.yml down + $(DOCKER_COMPOSE) -f docker-compose.yml -f docker-compose.alt.yml down diff --git a/README.md b/README.md index 821aba3b88..c7bd366d4e 100644 --- a/README.md +++ b/README.md @@ -13,258 +13,31 @@ Kolibri Studio uses the [Django framework](https://www.djangoproject.com/) for t If you are looking for help setting up custom content channels, uploading and organizing resources using Kolibri Studio, please refer to the [User Guide](https://kolibri-studio.readthedocs.io/en/latest/). -## Local development instructions -The following guide utilizes docker and docker-compose to run select services required for Studio to function. If you would rather install these services on your host, please follow the [host-setup guide](docs/host_services_setup.md). + +## How can I contribute? -### Prerequisites -Please install these prerequisites, or alternatives for setting up your local development environment: -- [volta](https://docs.volta.sh/guide/getting-started) or a different node.js manager -- [pyenv](https://kolibri-dev.readthedocs.io/en/develop/howtos/installing_pyenv.html) and [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv#installation) -- [docker](https://docs.docker.com/install/) and [docker-compose](https://docs.docker.com/compose/install/) +1. šŸ“™ **Skim through the [Developer documentation](./docs/_index.md)** to understand where to refer later on. +2. šŸ’» **Follow the [Local development instructions](./docs/local_dev_docker.md) to set up your development server.** +3. šŸ” **Search for issues tagged as [help wanted](https://github.com/learningequality/studio/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22+no%3Aassignee) or [good first issue](https://github.com/learningequality/studio/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22+no%3Aassignee).** +4. šŸ—£ļø **Ask us for an assignment in the comments of an issue you've chosen.** Please request assignment of a reasonable amount of issues at a time. Once you finish your current issue or two, you are welcome to ask for more. +**ā“ Where to ask questions** -### Build your python virtual environment -To determine the preferred version of Python, you can check the `runtime.txt` file: -```bash -$ cat runtime.txt -# This is the required version of Python to run Studio currently. -# This is determined by the default Python 3 version that is installed -# inside Ubuntu Bionic, which is used to build images for Studio. -# We encode it here so that it can be picked up by Github's dependabot -# to manage automated package upgrades. -python-3.9.13 -``` -Use `pyenv` to install the version of Python listed in that file, and to also set up a virtual environment: -```bash -pyenv install 3.9.13 -pyenv virtualenv 3.9.13 studio-py3.9 -pyenv activate studio-py3.9 -``` -Now you may install Studio's Python dependencies: -```bash -pip install -r requirements.txt -r requirements-dev.txt -``` -To deactivate the virtual environment, when you're finished developing on Studio for the time being: -```bash -pyenv deactivate -``` +- For anything development related, refer to the [Developer documentation](./docs/_index.md) at first. Some answers may already be there. +- For questions related to a specific issue or assignment requests, use the corresponding issue's comments section. +- Visit [GitHub Discussions](https://github.com/learningequality/studio/discussions) to ask about anything related to contributing or to troubleshoot development server issues. -#### A note about dependencies on Apple Silicon M1+ -If you run into an error with `pip install` related to the `grcpio` package, it is because it currently [does not support M1 with the version for `grcpio` Studio uses](https://github.com/grpc/grpc/issues/25082). In order to fix it, you will need to add the following environmental variables before running `pip install`: -```bash -export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 -export GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1 -export CFLAGS="-I/opt/homebrew/opt/openssl/include" -export LDFLAGS="-L/opt/homebrew/opt/openssl/lib" -``` +**šŸ‘„ How to connect** -### Install frontend dependencies -Install the version of node.js supported by Studio, and install `yarn`: -```bash -volta install node@16 -volta install yarn -``` -After installing `yarn`, you may now install frontend dependencies: -```bash -yarn install -``` +- We encourage you to visit [GitHub Discussions](https://github.com/learningequality/studio/discussions) to connect with the Learning Equality team as well as with other contributors. +- If you'd like to contribute on a regular basis, we are happy to invite you to our open-source community Slack channel. Get in touch with us at info@learningequality.org to receive an invitation. -### Install and run services +--- -Studio requires some background services to be running: +šŸ•– Please allow us a few days to reply to your comments. If you don't hear from us within a week, reach out via [GitHub Discussions](https://github.com/learningequality/studio/discussions). -* Minio - a local S3 storage emulation -* PostgreSQL (postgres) - a relational database -* Redis - a fast key/value store useful for caching -* Celery - the task manager and executor, which relies on the Studio codebase +As soon as you open a pull request, it may take us a week or two to review it as we're a small team. We appreciate your contribution and will provide feedback. -Generally speaking, you'll want to open a separate terminal/terminal-tab to run the services. With docker and docker-compose installed, running the above services is as easy as: -```bash -make run-services -``` +--- -The above command may take longer the first time it's run. It includes starting the `celery` workers, and the other dependent services through docker, which can be done separately with the following two commands: - -```bash -make dcservicesup -make devceleryworkers -``` - -To confirm that docker-based services are running, you should see three containers when executing `docker ps`. For example: - -```bash -> docker ps -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -e09c5c203b93 redis:6.0.9 "docker-entrypoint.sā€¦" 51 seconds ago Up 49 seconds 0.0.0.0:6379->6379/tcp studio_vue-refactor_redis_1 -6164371efb6b minio/minio "minio server /data" 51 seconds ago Up 49 seconds 0.0.0.0:9000->9000/tcp studio_vue-refactor_minio_1 -c86bbfa3a59e postgres:12.10 "docker-entrypoint.sā€¦" 51 seconds ago Up 49 seconds 0.0.0.0:5432->5432/tcp studio_vue-refactor_postgres_1 -``` - -To stop the services, press Ctrl + C in the terminal where you ran `make run-services` (or `dcservicesup`). Once you've done that, you may run the following command to remove the docker containers (they will be recreated when you run `run-services` or `dcservicesup` again): -```bash -make dcservicesdown -``` - -### Initializing Studio -With the services running, in a separate terminal/terminal-tab, we can now initialize the database for Studio development purposes. The command below will initialize the database tables, import constants, and a user account for development: -```bash -yarn run devsetup -``` - -### Running the development server -With the services running, in a separate terminal/terminal-tab, and the database initialized, we can start the dev server: -```bash -yarn run devserver:hot # with Vue hot module reloading -# or -yarn run devserver # without hot module reloading -``` - -Either of the above commands will take a few moments to build the frontend. When it finishes, you can sign in with the account created by the `yarn run devsetup` command: -- url: `http://localhost:8080/accounts/login/` -- username: `a@a.com` -- password: `a` - -### Running the celery service -Studio uses `celery` for executing asynchronous tasks, which are integral to Studio's channel editing architecture. The celery service does not reload when there are Python changes like the Django devserver does, so it's often preferred to run it separately. If you are developing changes against a task or the celery configuration, you'll need to use `make dcservicesup` to run only the docker-based services. - -In a separate terminal/terminal-tab, run the following to start the service and press Ctrl + C to stop it: -```bash -make devceleryworkers -``` - -Stop and restart the above to reload your changes. - -## Adding or updating dependencies - -We use `pip-tools` to ensure all our dependencies use the same versions on all deployments. - -To add a dependency, add it to either `requirements.in` or `requirements-dev.in`, then -run `pip-compile requirements[-dev|-docs].in` to generate the .txt file. Please make sure that -both the `.in` and `.txt` file changes are part of the commit when updating dependencies. - -To update a dependency, use `pip-compile --upgrade-package [package-name] requirements[-dev|-docs].in` - -For more details, please see the [pip-tools docs on Github](https://github.com/jazzband/pip-tools). - -## Additional tools - -### Running tests - -With Studio's services running, you may run tests with the following commands: - -```bash -# backend -make test -# frontend -yarn run test -``` - -View [more testing tips](docs/running_tests.md) - -### Linting - -Front-end linting is run using: - -```bash -yarn run lint-frontend -``` - -Some linting errors can be fixed automatically by running: - -```bash -yarn run lint-frontend:format -``` - -Make sure you've set up pre-commit hooks as described above. This will ensure that linting is automatically run on staged changes before every commit. - -### Profiling and local production testing - -If you want to test the performance of your changes, you can start up a local server with settings closer to a production environment like so: - -```bash -# build frontend dependencies -yarn run build -# run the server (no webpack) -yarn run runserver -# or for profiling production more closely -yarn run runserver:prod-profiling -``` - -Once the local production server is running, you can also use Locust to test your changes under scenarios of high demand like so: - -```bash -cd deploy/chaos/loadtest -make timed_run -make stop_slaves # mac: killall python -``` - -#### Profiling - -In case you need to profile the application to know which part of the code are more time consuming, there are two different profilers available to work in two different modes. Both will store the profiling output in a directory that's determined by the `PROFILE_DIR` env variable. If this variable is not set, the output files will be store in a folder called profiler inside the OS temp folder (`/tmp/profile` usually) -Note that both profiling modes are incompatible: you can either use one or the other, but not both at the same time. In case the env variables are set for both modes, _All request profiling mode_ will be used. - -##### All requests profiling mode - -This mode will create interactive html files with all the profiling information for every request the Studio server receives. The name of the files will contain the total execution time, the endpoint name and a timestamp. - -To activate it an env variable called `PROFILE_STUDIO_FULL` must be set. - -Example of use: - -`PROFILE_STUDIO_FULL=y yarn runserver` - -Afterwards no further treatment of the generated files is needed. You can open directly the html files in your browser. - -##### Endpoint profiling mode - -When using the _all requests mode_ it's usual that the profile folder is soon full of information for requests that are not interesting for the developer, obscuring the files for specific endpoints. - -If an env variable called `PROFILE_STUDIO_FILTER` is used, the profiler will be executed only on the http requests containing the text stated by the variable. - -Example of use: - -`PROFILE_STUDIO_FILTER=edit yarn localprodserver` - -For this case, only html requests having the text _edit_ in their request path will be profiled. The profile folder will not have html files, but binary dump files (with the timestamp as filename) of the profiler information that can be later seen by different profiling tools (`snakeviz` that can be installed using pip is recommended). Also while the server is running, the ten most time consuming lines of code of the filtered request will be shown in the console where Studio has been launched. - -Example of snakeviz use: - -`snakeviz /tmp/profile/studio\:20200909161405011678.prof` - -will open the browser with an interactive diagram with all the profiling information - -### Storybook - -Storybook is a development environment for UI components. If this is your first encounter with this tool, you can check [this presentation](https://docs.google.com/presentation/d/10JL4C9buygWsTbT62Ym149Yh9zSR9nY_ZqFumBKUY0o/edit?usp=sharing) or [its website](https://storybook.js.org/). You are encouraged to use it any time you need to develop a new UI component. It is especially suitable for smaller to middle size components that represent basic UI building blocks. - -An example is worth a thousand words so please have a look at these simple [stories of an example component](./contentcuration/contentcuration/frontend/shared/views/details/DetailsRow.stories.js) to see how to write yours. For detailed information on writing stories you can [go through this tutorial](https://www.learnstorybook.com/intro-to-storybook/). - -You can also check [official addons](https://storybook.js.org/addons/). - -**Run development server** - -```bash -yarn run storybook -``` - -With detailed webpack information (useful when debugging loaders, addons and similar): - -```bash -yarn run storybook:debug -``` - -**Bundle** - -```bash -yarn run storybook:build -``` - -The output is saved to *storybook-static/*. - -### Current usage notes - -We've decided not to push our stories to the codebase and keep them locally in the near future. Although this limits the number of advantages Storybook provides, it allows us to start using it as soon as possible without the need to agree on all conventions and it also gives the whole team enough time to test the development workflow so we can decide later if we want to adopt this tool in a larger scale. - -Taking into account the above-mentioned, all stories except of example *DetailsRow.stories.js* will be ignored by git as long as you use a naming convention for Storybook source files: *\*.stories.js*. - -Although we don't share stories at this point, Storybook is installed and configured in the codebase to prevent the need for everyone to configure everything locally. If you update Storybook Webpack settings, install a new plugin and similar, you are welcome to share such updates with other members of the team. +*Thank you for your interest in contributing! Learning Equality was founded by volunteers dedicated to helping make educational materials more accessible to those in need, and every contribution makes a difference.* diff --git a/bin/run_minio.py b/bin/run_minio.py deleted file mode 100755 index 42adf31562..0000000000 --- a/bin/run_minio.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -import os -import pathlib -import subprocess - -MINIO_RUN_TYPES = ["LOCAL", "GCS_PROXY"] - -MINIO_LOCAL_HOME_STORAGE = pathlib.Path("/app") / "contentworkshop_content" - -MINIO_CONFIG_DIR = MINIO_LOCAL_HOME_STORAGE / ".minio" - -GOOGLE_APPLICATION_CREDENTIALS_PATH = os.getenv("GOOGLE_APPLICATION_CREDENTIALS") - -GOOGLE_GCS_PROJECT_ID = os.getenv("GOOGLE_GCS_PROJECT_ID") - - -if __name__ == "__main__": - - run_type = os.getenv("MINIO_RUN_TYPE") - - if run_type not in MINIO_RUN_TYPES: - raise AssertionError("MINIO_RUN_TYPE must be one of {}".format(MINIO_RUN_TYPES)) - - if run_type == "LOCAL": - cmd = ["minio", "server", "-C", str(MINIO_CONFIG_DIR), str(MINIO_LOCAL_HOME_STORAGE)] - elif run_type == "GCS_PROXY": - - if not os.path.exists(GOOGLE_APPLICATION_CREDENTIALS_PATH): - raise AssertionError("the env var GOOGLE_APPLICATION_CREDENTIALS must be defined," " and pointing to a credentials file for your project.") - - if not GOOGLE_GCS_PROJECT_ID: - raise AssertionError("$GOOGLE_GCS_PROJECT_ID must be defined with the project" " id where you store your objects.") - cmd = ["minio", "gateway", "gcs", GOOGLE_GCS_PROJECT_ID] - else: - raise Exception("Unhandled run_type type: {}".format(run_type)) - - subprocess.check_call(cmd) - - diff --git a/contentcuration/automation/__init__.py b/contentcuration/automation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/contentcuration/automation/admin.py b/contentcuration/automation/admin.py new file mode 100644 index 0000000000..4185d360e9 --- /dev/null +++ b/contentcuration/automation/admin.py @@ -0,0 +1,3 @@ +# from django.contrib import admin + +# Register your models here. diff --git a/contentcuration/automation/apps.py b/contentcuration/automation/apps.py new file mode 100644 index 0000000000..eaa1d3d4e1 --- /dev/null +++ b/contentcuration/automation/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class AutomationConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'automation' diff --git a/contentcuration/automation/migrations/__init__.py b/contentcuration/automation/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/contentcuration/automation/models.py b/contentcuration/automation/models.py new file mode 100644 index 0000000000..0b4331b362 --- /dev/null +++ b/contentcuration/automation/models.py @@ -0,0 +1,3 @@ +# from django.db import models + +# Create your models here. diff --git a/contentcuration/automation/tests.py b/contentcuration/automation/tests.py new file mode 100644 index 0000000000..a79ca8be56 --- /dev/null +++ b/contentcuration/automation/tests.py @@ -0,0 +1,3 @@ +# from django.test import TestCase + +# Create your tests here. diff --git a/contentcuration/automation/tests/appnexus/__init__.py b/contentcuration/automation/tests/appnexus/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/contentcuration/automation/tests/appnexus/test_base.py b/contentcuration/automation/tests/appnexus/test_base.py new file mode 100644 index 0000000000..7944e00e4f --- /dev/null +++ b/contentcuration/automation/tests/appnexus/test_base.py @@ -0,0 +1,48 @@ +import pytest + +from automation.utils.appnexus.base import Adapter +from automation.utils.appnexus.base import Backend + + +class MockBackend(Backend): + def connect(self) -> None: + return super().connect() + + def make_request(self, request): + return super().make_request(request) + + @classmethod + def _create_instance(cls) -> 'MockBackend': + return cls() + + +class MockAdapter(Adapter): + def mockoperation(self): + pass + + +def test_backend_error(): + with pytest.raises(NotImplementedError) as error: + Backend.get_instance() + assert "Subclasses should implement the creation of instance" in str(error.value) + +def test_backend_singleton(): + b1, b2 = MockBackend.get_instance(), MockBackend.get_instance() + assert id(b1) == id(b2) + + +def test_adapter_creation(): + a = MockAdapter(backend=MockBackend) + assert isinstance(a, Adapter) + + +def test_adapter_backend_default(): + b = MockBackend() + adapter = Adapter(backend=b) + assert isinstance(adapter.backend, Backend) + + +def test_adapter_backend_custom(): + b = MockBackend() + a = Adapter(backend=b) + assert a.backend is b diff --git a/contentcuration/automation/utils/appnexus/APILayer.md b/contentcuration/automation/utils/appnexus/APILayer.md new file mode 100644 index 0000000000..4e82e5b3f3 --- /dev/null +++ b/contentcuration/automation/utils/appnexus/APILayer.md @@ -0,0 +1,161 @@ +## API Layer Documentation + +### Overview + +Within the `contentcuration` app in Studio, we want to build an API layer that acts as a communication bridge with different backends like Docker Images, Google Cloud Platform's Vertex AI, and VM instances, cloud storage services, etc. The goal is to make sure this API layer can work with these backends, regardless of where or how they do the job. As long as the input and output formats stay the same, this setup provides flexibility in choosing and using backend resources. + +### Description and outcomes + +The stand-alone deployed backend service(s) will not have direct access to `contentcuration` models or the database for that matter, so this API layer facilitates access to these resources by receiving and returning a standardized requests and responses, irrespective of the backend interacted with. + +#### The Architecture + +Screenshot 2023-09-11 at 14 50 06 + +The key components of this architecture are as follows: + +#### 1. Creating the Backend Interface + +The Backend class serves as an abstract interface that outlines the operations all backends must support. It implements the Singleton pattern to ensure that only one instance of each backend type can exist. The methods defined by the Backend class are: + +```python +ABSTRACT CLASS Backend: + _instance = None # Private variable to hold the instance + + ABSTRACT METHOD connect() + # Provides blue print to connect + pass + + ABSTRACT METHOD make_request(params) + # provide blue print to make request + pass + + ABSTRACT METHOD request(params) + # provide blue print for the request object + pass + + ABSTRACT METHOD response(params) + # provides blue print for the response object + pass + + CLASS METHOD get_instance(cls) + IF cls._instance is None: + cls._instance = cls._create_instance() + return cls._instance + + CLASS METHOD _create_instance(cls) + raise NotImplementedError # concrete class must implement +``` + +Different backends can now be created by implementing the base `Backend` class: + +```python +# Implement CONCRETE CLASS using ABSTRACT Backend class +CLASS GCS IMPLEMENTS Backend: + METHOD make_request(request): + # make request to Google Cloud Storage services + + METHOD connect(params): + # Implement the connect method for GCS + + CLASS METHOD _create_instance(cls) + # initialize a GCS Backend instance + +CLASS ML IMPLEMENTS Backend: + METHOD make_request(request): + # make request to DeepLearning models hosted as service + + METHOD connect(params): + # Implement the connect method for hosted ML service + + CLASS METHOD _create_instance(cls) + # initialize a ML Backend instance + +CLASS OtherBackend IMPLEMENTS Backend: + ... + [you get the idea] +``` + +To create an instance of a backend, using the `ML` class as an example, use the `get_instance()` method: + +```python +>>> backend = ML.get_instance() +``` + +To centralize the creation of `Backend` instances based on specific Django settings(e.g. dev vs. production environments), create `BackendFactory` class. This should follow the Factory Design Pattern. + +```python +# Factory to instantiate the Backend based on Django Settings +CLASS BackendFactory: + METHOD create_backend(self, backend=None) -> Backend + IF backend: + return backend + ELSE: + # Create an Adapter instance based on Django settings + IF DjangoSettings is 'SomeSetting': + backend = GCS.get_instance() # Use of Singleton pattern + ELSE IF DjangoSettings is 'AnotherSetting': + backend = ML.get_instance() + ELSE + RAISE ValueError + # Return the created Backend instance + RETURN backend +``` +The `BackendFactory`'s `create_backend` method optionally allows a `Backend` instance to be injected into the factory instead of relying solely on Django settings. This is particularly useful if we want to explicitly specify the backend to use. + +### Creating Adapter that accepts any Backend + +The **`Adapter`** class can be initialized with a `Backend` instance(optional) which provides a `make_request` method that forwards requests to the chosen `Backend`, while adhering to its specific `request` and `response` formats. + +```python +CLASS Adapter: + + METHOD __init__(self, backend(Optional) defaults None) + # Initialize the Backend with BackendFactory + backend_factory = BackendFactory() + SET backend = backend_factory.create_backend(backend) + + METHOD request(self): + # something + return self.backend.request() + + METHOD response(self): + # something + return self.backend.response() +``` + +With this `Adapter` class in place, we can create Adapter that are able interact with any backend we need. + +```python +CLASS Recommendation INHERITS ADAPTER: + METHOD generateEmbeddings(self, request) -> Boolean + # [ Implementation ] + + METHOD getRecommendation(self, request) -> Array + # [ Implementation ] + +CLASS Transcription INHERITS ADAPTER: + METHOD generateCaption(self, request) -> Array + # [ Implementation ] + +CLASS OtherAdapter INHERITS ADAPTER: + METHOD someOperation(self, request) -> Any + # Operation that any backend wants +``` + +Below is a sample use case, using the `ML` backend as an example: + +```python +>>> backend = ML.get_instance() +>>> adapter = Transcription(backend) +``` + +To access specific methods within the adapter: + +```python +>>> adapter.generateCaption(...) +``` + +### Resources + +[OOP Design patterns](https://refactoring.guru/design-patterns/catalog) diff --git a/contentcuration/automation/utils/appnexus/__init__.py b/contentcuration/automation/utils/appnexus/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/contentcuration/automation/utils/appnexus/base.py b/contentcuration/automation/utils/appnexus/base.py new file mode 100644 index 0000000000..ab9e6d5096 --- /dev/null +++ b/contentcuration/automation/utils/appnexus/base.py @@ -0,0 +1,62 @@ +from abc import ABC +from abc import abstractmethod +from builtins import NotImplementedError + + +class BackendRequest(object): + """ Class that should be inherited by specific backend for its requests""" + pass + + +class BackendResponse(object): + """ Class that should be inherited by specific backend for its responses""" + pass + + +class Backend(ABC): + """ An abstract base class for backend interfaces that also implements the singleton pattern """ + _instance = None + + def __new__(class_, *args, **kwargs): + if not isinstance(class_._instance, class_): + class_._instance = object.__new__(class_, *args, **kwargs) + return class_._instance + + @abstractmethod + def connect(self) -> None: + """ Establishes a connection to the backend service. """ + pass + + @abstractmethod + def make_request(self, request) -> BackendResponse: + """ Make a request based on "request" """ + pass + + @classmethod + def get_instance(cls) -> 'Backend': + """ Returns existing instance, if not then create one. """ + return cls._instance if cls._instance else cls._create_instance() + + @classmethod + def _create_instance(cls) -> 'Backend': + """ Returns the instance after creating it. """ + raise NotImplementedError("Subclasses should implement the creation of instance") + + +class BackendFactory(ABC): + @abstractmethod + def create_backend(self) -> Backend: + """ Create a Backend instance from the given backend. """ + pass + + +class Adapter: + """ + Base class for adapters that interact with a backend interface. + + This class should be inherited by adapter classes that facilitate + interaction with different backend implementations. + """ + + def __init__(self, backend: Backend) -> None: + self.backend = backend diff --git a/contentcuration/automation/views.py b/contentcuration/automation/views.py new file mode 100644 index 0000000000..fd0e044955 --- /dev/null +++ b/contentcuration/automation/views.py @@ -0,0 +1,3 @@ +# from django.shortcuts import render + +# Create your views here. diff --git a/contentcuration/contentcuration/api.py b/contentcuration/contentcuration/api.py index 33c9692cbc..b297ffaba6 100644 --- a/contentcuration/contentcuration/api.py +++ b/contentcuration/contentcuration/api.py @@ -10,9 +10,6 @@ from django.core.files.storage import default_storage import contentcuration.models as models -from contentcuration.utils.garbage_collect import get_deleted_chefs_root -from contentcuration.viewsets.sync.constants import CHANNEL -from contentcuration.viewsets.sync.utils import generate_update_event def write_file_to_storage(fobj, check_valid=False, name=None): @@ -68,33 +65,3 @@ def get_hash(fobj): md5.update(chunk) fobj.seek(0) return md5.hexdigest() - - -def activate_channel(channel, user): - user.check_channel_space(channel) - - if channel.previous_tree and channel.previous_tree != channel.main_tree: - # IMPORTANT: Do not remove this block, MPTT updating the deleted chefs block could hang the server - with models.ContentNode.objects.disable_mptt_updates(): - garbage_node = get_deleted_chefs_root() - channel.previous_tree.parent = garbage_node - channel.previous_tree.title = "Previous tree for channel {}".format(channel.pk) - channel.previous_tree.save() - - channel.previous_tree = channel.main_tree - channel.main_tree = channel.staging_tree - channel.staging_tree = None - channel.save() - - user.staged_files.all().delete() - user.set_space_used() - - models.Change.create_change(generate_update_event( - channel.id, - CHANNEL, - { - "root_id": channel.main_tree.id, - "staging_root_id": None - }, - channel_id=channel.id, - ), applied=True, created_by_id=user.id) diff --git a/contentcuration/contentcuration/constants/channel_history.py b/contentcuration/contentcuration/constants/channel_history.py index 28de05e035..790b4dfd51 100644 --- a/contentcuration/contentcuration/constants/channel_history.py +++ b/contentcuration/contentcuration/constants/channel_history.py @@ -1,13 +1,11 @@ -from django.utils.translation import ugettext_lazy as _ - CREATION = "creation" PUBLICATION = "publication" DELETION = "deletion" RECOVERY = "recovery" choices = ( - (CREATION, _("Creation")), - (PUBLICATION, _("Publication")), - (DELETION, _("Deletion")), - (RECOVERY, _("Deletion recovery")), + (CREATION, "Creation"), + (PUBLICATION, "Publication"), + (DELETION, "Deletion"), + (RECOVERY, "Deletion recovery"), ) diff --git a/contentcuration/contentcuration/constants/locking.py b/contentcuration/contentcuration/constants/locking.py new file mode 100644 index 0000000000..6b53fbd081 --- /dev/null +++ b/contentcuration/contentcuration/constants/locking.py @@ -0,0 +1,5 @@ +""" +Constants for locking behaviors, like advisory locking in Postgres, and mutexes +""" +TREE_LOCK = 1001 +TASK_LOCK = 1002 diff --git a/contentcuration/contentcuration/constants/user_history.py b/contentcuration/contentcuration/constants/user_history.py new file mode 100644 index 0000000000..9adc9b56c6 --- /dev/null +++ b/contentcuration/contentcuration/constants/user_history.py @@ -0,0 +1,9 @@ +DELETION = "soft-deletion" +RECOVERY = "soft-recovery" +RELATED_DATA_HARD_DELETION = "related-data-hard-deletion" + +choices = ( + (DELETION, "User soft deletion"), + (RECOVERY, "User soft deletion recovery"), + (RELATED_DATA_HARD_DELETION, "User related data hard deletion"), +) diff --git a/contentcuration/contentcuration/db/advisory_lock.py b/contentcuration/contentcuration/db/advisory_lock.py index 61d53a379f..f1d71995ed 100644 --- a/contentcuration/contentcuration/db/advisory_lock.py +++ b/contentcuration/contentcuration/db/advisory_lock.py @@ -6,11 +6,36 @@ logging = logger.getLogger(__name__) +# signed limits are 2**32 or 2**64, so one less power of 2 +# to become unsigned limits (half above 0, half below 0) +INT_32BIT = 2**31 +INT_64BIT = 2**63 + class AdvisoryLockBusy(RuntimeError): pass +def _prepare_keys(keys): + """ + Ensures that integers do not exceed postgres constraints: + - signed 64bit allowed with single key + - signed 32bit allowed with two keys + :param keys: A list of unsigned integers + :return: A list of signed integers + """ + limit = INT_64BIT if len(keys) == 1 else INT_32BIT + new_keys = [] + for key in keys: + # if key is over the limit, convert to negative int since key should be unsigned int + if key >= limit: + key = limit - key + if key < -limit or key >= limit: + raise OverflowError(f"Advisory lock key '{key}' is too large") + new_keys.append(key) + return new_keys + + @contextmanager def execute_lock(key1, key2=None, unlock=False, session=False, shared=False, wait=True): """ @@ -32,6 +57,7 @@ def execute_lock(key1, key2=None, unlock=False, session=False, shared=False, wai keys = [key1] if key2 is not None: keys.append(key2) + keys = _prepare_keys(keys) query = "SELECT pg{_try}_advisory_{xact_}{lock}{_shared}({keys}) AS lock;".format( _try="" if wait else "_try", @@ -41,11 +67,11 @@ def execute_lock(key1, key2=None, unlock=False, session=False, shared=False, wai keys=", ".join(["%s" for i in range(0, 2 if key2 is not None else 1)]) ) - log_query = "'{}' with params {}".format(query, keys) - logging.debug("Acquiring advisory lock: {}".format(query, log_query)) + log_query = f"'{query}' with params {keys}" + logging.debug(f"Acquiring advisory lock: {log_query}") with connection.cursor() as c: c.execute(query, keys) - logging.debug("Acquired advisory lock: {}".format(query, log_query)) + logging.debug(f"Acquired advisory lock: {log_query}") yield c diff --git a/contentcuration/contentcuration/db/models/manager.py b/contentcuration/contentcuration/db/models/manager.py index 3556fe8e70..db1e3a77bf 100644 --- a/contentcuration/contentcuration/db/models/manager.py +++ b/contentcuration/contentcuration/db/models/manager.py @@ -12,6 +12,7 @@ from mptt.managers import TreeManager from mptt.signals import node_moved +from contentcuration.constants.locking import TREE_LOCK from contentcuration.db.advisory_lock import advisory_lock from contentcuration.db.models.query import CustomTreeQuerySet from contentcuration.utils.cache import ResourceSizeCache @@ -32,7 +33,6 @@ # The exact optimum batch size is probably highly dependent on tree # topology also, so these rudimentary tests are likely insufficient BATCH_SIZE = 100 -TREE_LOCK = 1001 class CustomManager(Manager.from_queryset(CTEQuerySet)): @@ -47,14 +47,33 @@ def log_lock_time_spent(timespent): logging.debug("Spent {} seconds inside an mptt lock".format(timespent)) -def execute_queryset_without_results(queryset): - query = queryset.query - compiler = query.get_compiler(queryset.db) - sql, params = compiler.as_sql() - if not sql: - return - cursor = compiler.connection.cursor() - cursor.execute(sql, params) +# Fields that are allowed to be overridden on copies coming from a source that the user +# does not have edit rights to. +ALLOWED_OVERRIDES = { + "node_id", + "title", + "description", + "aggregator", + "provider", + "language_id", + "grade_levels", + "resource_types", + "learning_activities", + "accessibility_labels", + "categories", + "learner_needs", + "role", + "extra_fields", + "suggested_duration", +} + +EDIT_ALLOWED_OVERRIDES = ALLOWED_OVERRIDES.union({ + "license_id", + "license_description", + "extra_fields", + "copyright_holder", + "author", +}) class CustomContentNodeTreeManager(TreeManager.from_queryset(CustomTreeQuerySet)): @@ -272,7 +291,10 @@ def _clone_node( copy.update(self.get_source_attributes(source)) if isinstance(mods, dict): - copy.update(mods) + allowed_keys = EDIT_ALLOWED_OVERRIDES if can_edit_source_channel else ALLOWED_OVERRIDES + for key, value in mods.items(): + if key in copy and key in allowed_keys: + copy[key] = value # There might be some legacy nodes that don't have these, so ensure they are added if ( @@ -465,6 +487,7 @@ def _copy_tags(self, source_copy_id_map): tag_id_map[tag.id] = new_tag.id tags_to_create.append(new_tag) + # TODO: Can cleanup the above and change the below to use ignore_conflicts=True ContentTag.objects.bulk_create(tags_to_create) mappings_to_create = [ @@ -477,7 +500,10 @@ def _copy_tags(self, source_copy_id_map): for mapping in node_tags_mappings ] - self.model.tags.through.objects.bulk_create(mappings_to_create) + # In the case that we are copying a node that is in the weird state of having a tag + # that is duplicated (with a channel tag and a null channel tag) this can cause an error + # so we ignore conflicts here to ignore the duplicate tags. + self.model.tags.through.objects.bulk_create(mappings_to_create, ignore_conflicts=True) def _copy_assessment_items(self, source_copy_id_map): from contentcuration.models import File diff --git a/contentcuration/contentcuration/decorators.py b/contentcuration/contentcuration/decorators.py index e8a2dd5a0d..9c51e83b7a 100644 --- a/contentcuration/contentcuration/decorators.py +++ b/contentcuration/contentcuration/decorators.py @@ -76,6 +76,10 @@ class DelayUserStorageCalculation(ContextDecorator): def is_active(self): return self.depth > 0 + def add(self, user_id): + if user_id not in self.queue: + self.queue.append(user_id) + def __enter__(self): self.depth += 1 diff --git a/contentcuration/contentcuration/dev_settings.py b/contentcuration/contentcuration/dev_settings.py index d81d23a993..439bdef8af 100644 --- a/contentcuration/contentcuration/dev_settings.py +++ b/contentcuration/contentcuration/dev_settings.py @@ -5,4 +5,4 @@ ROOT_URLCONF = "contentcuration.dev_urls" -INSTALLED_APPS += ("drf_yasg",) +INSTALLED_APPS += ("drf_yasg", "automation") diff --git a/contentcuration/contentcuration/forms.py b/contentcuration/contentcuration/forms.py index 973916431e..d9dc781f61 100644 --- a/contentcuration/contentcuration/forms.py +++ b/contentcuration/contentcuration/forms.py @@ -7,6 +7,7 @@ from django.contrib.auth.forms import UserChangeForm from django.contrib.auth.forms import UserCreationForm from django.core import signing +from django.db.models import Q from django.template.loader import render_to_string from contentcuration.models import User @@ -45,7 +46,7 @@ class RegistrationForm(UserCreationForm, ExtraFormMixin): def clean_email(self): email = self.cleaned_data['email'].strip().lower() - if User.objects.filter(email__iexact=email, is_active=True).exists(): + if User.objects.filter(Q(is_active=True) | Q(deleted=True), email__iexact=email).exists(): raise UserWarning return email diff --git a/contentcuration/contentcuration/frontend/accounts/components/MessageLayout.vue b/contentcuration/contentcuration/frontend/accounts/components/MessageLayout.vue index 0a89c4ba5d..869ccf3a88 100644 --- a/contentcuration/contentcuration/frontend/accounts/components/MessageLayout.vue +++ b/contentcuration/contentcuration/frontend/accounts/components/MessageLayout.vue @@ -13,7 +13,10 @@

- +

diff --git a/contentcuration/contentcuration/frontend/accounts/pages/Create.vue b/contentcuration/contentcuration/frontend/accounts/pages/Create.vue index 361ffce01d..b211cd5634 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/Create.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/Create.vue @@ -15,7 +15,7 @@

{{ $tr('createAnAccountTitle') }}

- + {{ registrationFailed ? $tr('registrationFailed') : $tr('errorsMessage') }} @@ -131,42 +131,45 @@ /> - - + +
+ + - - - + + | + + + +
+ +
+
+ {{ $tr('contactMessage') }} +
+
-

- {{ $tr('contactMessage') }} -

- - {{ $tr('finishButton') }} - +
@@ -238,12 +241,19 @@ passwordConfirmRules() { return [value => (this.form.password1 === value ? true : this.$tr('passwordMatchMessage'))]; }, - tosRules() { + tosAndPolicyRules() { return [value => (value ? true : this.$tr('ToSRequiredMessage'))]; }, - policyRules() { - return [value => (value ? true : this.$tr('privacyPolicyRequiredMessage'))]; + acceptedAgreement: { + get() { + return this.form.accepted_tos && this.form.accepted_policy; + }, + set(accepted) { + this.form.accepted_tos = accepted; + this.form.accepted_policy = accepted; + }, }, + usageOptions() { return [ { @@ -350,7 +360,7 @@ }, clean() { return data => { - let cleanedData = { ...data, policies: {} }; + const cleanedData = { ...data, policies: {} }; Object.keys(cleanedData).forEach(key => { // Trim text fields if (key === 'source') { @@ -413,10 +423,9 @@ showOtherField(id) { return id === uses.OTHER && this.form.uses.includes(id); }, - submit() { if (this.$refs.form.validate()) { - let cleanedData = this.clean(this.form); + const cleanedData = this.clean(this.form); return this.register(cleanedData) .then(() => { this.$router.push({ name: 'ActivationSent' }); @@ -439,6 +448,7 @@ return Promise.resolve(); }, }, + $trs: { backToLoginButton: 'Sign in', createAnAccountTitle: 'Create an account', @@ -447,7 +457,6 @@ registrationFailed: 'There was an error registering your account. Please try again', registrationFailedOffline: 'You seem to be offline. Please connect to the internet to create an account.', - // Basic information strings basicInformationHeader: 'Basic information', firstNameLabel: 'First name', @@ -492,15 +501,13 @@ otherSourcePlaceholder: 'Please describe', // Privacy policy + terms of service - viewToSLink: 'View terms of service', - ToSCheck: 'I have read and agree to the terms of service', - ToSRequiredMessage: 'Please accept our terms of service', + viewToSLink: 'View Terms of Service', + ToSRequiredMessage: 'Please accept our terms of service and policy', - viewPrivacyPolicyLink: 'View privacy policy', - privacyPolicyCheck: 'I have read and agree to the privacy policy', - privacyPolicyRequiredMessage: 'Please accept our privacy policy', + viewPrivacyPolicyLink: 'View Privacy Policy', contactMessage: 'Questions or concerns? Please email us at content@learningequality.org', finishButton: 'Finish', + agreement: 'I have read and agree to terms of service and the privacy policy', }, }; @@ -521,6 +528,11 @@ } } + .policy-checkbox /deep/ .v-messages { + min-height: 0; + margin-left: 40px; + } + iframe { width: 100%; min-height: 400px; @@ -529,4 +541,23 @@ border: 0; } + .span-spacing { + display: flex; + margin-left: 40px; + } + + .span-spacing span { + margin-left: 2px; + font-size: 16px; + } + + .span-spacing-email { + margin-left: 3px; + font-size: 16px; + } + + .align-items { + display: block; + } + diff --git a/contentcuration/contentcuration/frontend/accounts/pages/Main.vue b/contentcuration/contentcuration/frontend/accounts/pages/Main.vue index c15a96fca1..49833f60e2 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/Main.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/Main.vue @@ -6,12 +6,12 @@ justify-center class="main pt-5" > -

- -

- + {{ $tr('kolibriStudio') }} - - + + - - - + + +

- +

- - {{ $tr('signInButton') }} - - - {{ $tr('createAccountButton') }} - + +

- +

@@ -90,7 +123,6 @@ import PolicyModals from 'shared/views/policies/PolicyModals'; import { policies } from 'shared/constants'; import LanguageSwitcherList from 'shared/languageSwitcher/LanguageSwitcherList'; - import OfflineText from 'shared/views/OfflineText'; export default { name: 'Main', @@ -100,7 +132,6 @@ LanguageSwitcherList, PasswordField, PolicyModals, - OfflineText, }, data() { return { @@ -132,7 +163,7 @@ submit() { if (this.$refs.form.validate()) { this.busy = true; - let credentials = { + const credentials = { username: this.username, password: this.password, }; @@ -180,6 +211,7 @@ .main { overflow: auto; + /* stylelint-disable-next-line custom-property-pattern */ background-color: var(--v-backgroundColor-base); } @@ -191,10 +223,8 @@ content: 'ā€¢'; } - .corner { - position: absolute; - top: 1em; - left: 1em; + .w-100 { + width: 100%; } diff --git a/contentcuration/contentcuration/frontend/accounts/pages/__tests__/create.spec.js b/contentcuration/contentcuration/frontend/accounts/pages/__tests__/create.spec.js index dc9f24df06..f2a201b560 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/__tests__/create.spec.js +++ b/contentcuration/contentcuration/frontend/accounts/pages/__tests__/create.spec.js @@ -34,7 +34,7 @@ const defaultData = { const register = jest.fn(); function makeWrapper(formData) { - let wrapper = mount(Create, { + const wrapper = mount(Create, { router, computed: { getPolicyAcceptedData() { @@ -62,7 +62,6 @@ function makeWrapper(formData) { }); return wrapper; } - function makeFailedPromise(statusCode) { return () => { return new Promise((resolve, reject) => { @@ -81,13 +80,13 @@ describe('create', () => { }); it('should trigger submit method when form is submitted', () => { const submit = jest.fn(); - let wrapper = makeWrapper(); + const wrapper = makeWrapper(); wrapper.setMethods({ submit }); wrapper.find({ ref: 'form' }).trigger('submit'); expect(submit).toHaveBeenCalled(); }); it('should call register with form data', () => { - let wrapper = makeWrapper(); + const wrapper = makeWrapper(); wrapper.find({ ref: 'form' }).trigger('submit'); expect(register.mock.calls[0][0]).toEqual({ ...defaultData, @@ -98,12 +97,12 @@ describe('create', () => { }); it('should automatically fill the email if provided in the query param', () => { router.push({ name: 'Create', query: { email: 'newtest@test.com' } }); - let wrapper = mount(Create, { router, stubs: ['PolicyModals'], mocks: connectionStateMocks }); + const wrapper = mount(Create, { router, stubs: ['PolicyModals'], mocks: connectionStateMocks }); expect(wrapper.vm.form.email).toBe('newtest@test.com'); }); describe('validation', () => { it('should call register if form is valid', () => { - let wrapper = makeWrapper(); + const wrapper = makeWrapper(); wrapper.vm.submit(); expect(register).toHaveBeenCalled(); }); @@ -122,26 +121,26 @@ describe('create', () => { }; Object.keys(form).forEach(field => { - let wrapper = makeWrapper({ [field]: form[field] }); + const wrapper = makeWrapper({ [field]: form[field] }); wrapper.vm.submit(); expect(register).not.toHaveBeenCalled(); }); }); it('should fail if password1 and password2 do not match', () => { - let wrapper = makeWrapper({ password1: 'some other password' }); + const wrapper = makeWrapper({ password1: 'some other password' }); wrapper.vm.submit(); expect(register).not.toHaveBeenCalled(); }); it('should fail if uses field is set to fields that require more input that is not provided', () => { [uses.STORING, uses.OTHER].forEach(use => { - let wrapper = makeWrapper({ uses: [use] }); + const wrapper = makeWrapper({ uses: [use] }); wrapper.vm.submit(); expect(register).not.toHaveBeenCalled(); }); }); it('should fail if source field is set to an option that requires more input that is not provided', () => { [sources.ORGANIZATION, sources.CONFERENCE, sources.OTHER].forEach(source => { - let wrapper = makeWrapper({ source }); + const wrapper = makeWrapper({ source }); wrapper.vm.submit(); expect(register).not.toHaveBeenCalled(); }); diff --git a/contentcuration/contentcuration/frontend/accounts/pages/__tests__/main.spec.js b/contentcuration/contentcuration/frontend/accounts/pages/__tests__/main.spec.js index dc57ccf210..2525a83d57 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/__tests__/main.spec.js +++ b/contentcuration/contentcuration/frontend/accounts/pages/__tests__/main.spec.js @@ -5,7 +5,7 @@ import Main from '../Main'; const login = jest.fn(); function makeWrapper() { - let wrapper = mount(Main, { + const wrapper = mount(Main, { router, stubs: ['GlobalSnackbar', 'PolicyModals'], mocks: { diff --git a/contentcuration/contentcuration/frontend/accounts/pages/accountDeleted/AccountDeleted.vue b/contentcuration/contentcuration/frontend/accounts/pages/accountDeleted/AccountDeleted.vue index f881737065..032cfc506a 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/accountDeleted/AccountDeleted.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/accountDeleted/AccountDeleted.vue @@ -5,7 +5,7 @@ > @@ -24,7 +24,7 @@ }, $trs: { accountDeletedTitle: 'Account successfully deleted', - continueToSignIn: 'Continue to sign-in page', + backToLogin: 'Continue to sign-in page', }, }; diff --git a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountCreated.vue b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountCreated.vue index b55bbdf418..14e106c232 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountCreated.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountCreated.vue @@ -5,7 +5,7 @@ > @@ -24,7 +24,7 @@ }, $trs: { accountCreatedTitle: 'Account successfully created', - continueToSignIn: 'Continue to sign-in', + backToLogin: 'Continue to sign-in page', }, }; diff --git a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountNotActivated.vue b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountNotActivated.vue index a0db43bca2..ccbec003d1 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountNotActivated.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountNotActivated.vue @@ -4,9 +4,12 @@ :header="$tr('title')" :text="$tr('text')" > - - {{ $tr('requestNewLink') }} - + diff --git a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/ActivationExpired.vue b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/ActivationExpired.vue index cdf2db7a43..9dd1dad18c 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/ActivationExpired.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/ActivationExpired.vue @@ -4,9 +4,12 @@ :header="$tr('activationExpiredTitle')" :text="$tr('activationExpiredText')" > - - {{ $tr('requestNewLink') }} - + diff --git a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/RequestNewActivationLink.vue b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/RequestNewActivationLink.vue index df889b3ecb..afb06cdaca 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/RequestNewActivationLink.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/RequestNewActivationLink.vue @@ -11,9 +11,12 @@ > - - {{ $tr('submitButton') }} - + @@ -64,3 +67,11 @@ }; + + \ No newline at end of file diff --git a/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ForgotPassword.vue b/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ForgotPassword.vue index 13910a9bad..7349cf97ff 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ForgotPassword.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ForgotPassword.vue @@ -7,9 +7,12 @@ - - {{ $tr('submitButton') }} - + @@ -65,3 +68,11 @@ }; + + \ No newline at end of file diff --git a/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetLinkExpired.vue b/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetLinkExpired.vue index 4f0c8fc057..0249fd9fda 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetLinkExpired.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetLinkExpired.vue @@ -4,9 +4,12 @@ :header="$tr('resetExpiredTitle')" :text="$tr('resetExpiredText')" > - - {{ $tr('requestNewLink') }} - + diff --git a/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetPassword.vue b/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetPassword.vue index b3866f5ccd..2fd3ceddd3 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetPassword.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetPassword.vue @@ -16,9 +16,12 @@ :label="$tr('passwordConfirmLabel')" :additionalRules="passwordConfirmRules" /> - - {{ $tr('submitButton') }} - + @@ -55,7 +58,7 @@ submit() { this.error = false; if (this.$refs.form.validate()) { - let payload = { + const payload = { ...this.$route.query, new_password1: this.new_password1, new_password2: this.new_password2, @@ -84,3 +87,11 @@ }; + + diff --git a/contentcuration/contentcuration/frontend/accounts/vuex/index.js b/contentcuration/contentcuration/frontend/accounts/vuex/index.js index bc95d1ed29..f64df2778a 100644 --- a/contentcuration/contentcuration/frontend/accounts/vuex/index.js +++ b/contentcuration/contentcuration/frontend/accounts/vuex/index.js @@ -17,7 +17,7 @@ export default { return client.post(window.Urls.auth_password_reset(), { email }); }, setPassword(context, { uidb64, token, new_password1, new_password2 }) { - let data = { + const data = { new_password1, new_password2, }; diff --git a/contentcuration/contentcuration/frontend/administration/mixins.js b/contentcuration/contentcuration/frontend/administration/mixins.js index 529c75ffae..c27e933ed5 100644 --- a/contentcuration/contentcuration/frontend/administration/mixins.js +++ b/contentcuration/contentcuration/frontend/administration/mixins.js @@ -24,7 +24,7 @@ export function generateFilterMixin(filterMap) { return this.$route.query.keywords; }, set(value) { - let params = { ...this.$route.query, page: 1 }; + const params = { ...this.$route.query, page: 1 }; if (value) { params.keywords = value; } else { @@ -37,7 +37,7 @@ export function generateFilterMixin(filterMap) { get() { // Return filter where all param conditions are met const filterKeys = intersection(Object.keys(this.$route.query), paramKeys); - let key = findKey(filterMap, value => { + const key = findKey(filterMap, value => { return filterKeys.every(field => { return value.params[field] === _getBooleanVal(this.$route.query[field]); }); @@ -115,7 +115,7 @@ export const tableMixin = { computed: { pagination: { get() { - let params = { + const params = { rowsPerPage: Number(this.$route.query.page_size) || 25, page: Number(this.$route.query.page) || 1, }; @@ -160,7 +160,7 @@ export const tableMixin = { ...this.$route.query, }; if (params.sortBy) { - params.ordering = (params.descending ? '-' : '') + params.sortBy; + params.ordering = (String(params.descending) === 'true' ? '-' : '') + params.sortBy; delete params.sortBy; delete params.descending; } diff --git a/contentcuration/contentcuration/frontend/administration/pages/AdministrationIndex.vue b/contentcuration/contentcuration/frontend/administration/pages/AdministrationIndex.vue index 7ed794230f..10f2b67efb 100644 --- a/contentcuration/contentcuration/frontend/administration/pages/AdministrationIndex.vue +++ b/contentcuration/contentcuration/frontend/administration/pages/AdministrationIndex.vue @@ -128,6 +128,7 @@ .v-icon:not(.v-icon--is-component) { font-size: 16pt !important; + /* stylelint-disable-next-line custom-property-pattern */ color: var(--v-darkGrey-darken1) !important; opacity: 1 !important; transform: none !important; @@ -159,6 +160,7 @@ } tr:hover td { + /* stylelint-disable-next-line custom-property-pattern */ background-color: var(--v-greyBackground-base) !important; } diff --git a/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelActionsDropdown.vue b/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelActionsDropdown.vue index d71841492f..491706777c 100644 --- a/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelActionsDropdown.vue +++ b/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelActionsDropdown.vue @@ -153,7 +153,7 @@ return { name: RouteNames.USERS, query: { - keywords: `${this.name} ${this.channel.id}`, + keywords: `${this.channel.id}`, }, }; }, diff --git a/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelDetails.vue b/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelDetails.vue index a7029e2710..b867ee36e8 100644 --- a/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelDetails.vue +++ b/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelDetails.vue @@ -19,7 +19,7 @@ - + This channel has been deleted @@ -102,6 +102,9 @@ channel() { return this.getChannel(this.channelId); }, + isDeleted() { + return this.channel && Boolean(this.channel?.deleted); + }, channelWithDetails() { if (!this.channel || !this.details) { return {}; diff --git a/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelItem.vue b/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelItem.vue index 3e975714af..d87e1d4d3f 100644 --- a/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelItem.vue +++ b/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelItem.vue @@ -176,8 +176,8 @@ import ClipboardChip from '../../components/ClipboardChip'; import { RouteNames } from '../../constants'; import ChannelActionsDropdown from './ChannelActionsDropdown'; - import Checkbox from 'shared/views/form/Checkbox'; import { fileSizeMixin } from 'shared/mixins'; + import Checkbox from 'shared/views/form/Checkbox'; export default { name: 'ChannelItem', @@ -232,7 +232,7 @@ return { name: RouteNames.USERS, query: { - keywords: `${this.channel.name} ${this.channelId}`, + keywords: `${this.channelId}`, }, }; }, diff --git a/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelTable.vue b/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelTable.vue index 76bfc4d4e2..b4185d8a85 100644 --- a/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelTable.vue +++ b/contentcuration/contentcuration/frontend/administration/pages/Channels/ChannelTable.vue @@ -100,8 +100,8 @@ diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/progress/ProgressModal.vue b/contentcuration/contentcuration/frontend/channelEdit/views/progress/ProgressModal.vue index f5ce76de6b..a124e0cc6e 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/views/progress/ProgressModal.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/views/progress/ProgressModal.vue @@ -1,14 +1,21 @@