diff --git a/.github/workflows/pylint-checks.yml b/.github/workflows/pylint-checks.yml index 9a654e09e711..1afa032b6ad1 100644 --- a/.github/workflows/pylint-checks.yml +++ b/.github/workflows/pylint-checks.yml @@ -22,7 +22,7 @@ jobs: - module-name: openedx-2 path: "openedx/core/djangoapps/geoinfo/ openedx/core/djangoapps/header_control/ openedx/core/djangoapps/heartbeat/ openedx/core/djangoapps/lang_pref/ openedx/core/djangoapps/models/ openedx/core/djangoapps/monkey_patch/ openedx/core/djangoapps/oauth_dispatch/ openedx/core/djangoapps/olx_rest_api/ openedx/core/djangoapps/password_policy/ openedx/core/djangoapps/plugin_api/ openedx/core/djangoapps/plugins/ openedx/core/djangoapps/profile_images/ openedx/core/djangoapps/programs/ openedx/core/djangoapps/safe_sessions/ openedx/core/djangoapps/schedules/ openedx/core/djangoapps/service_status/ openedx/core/djangoapps/session_inactivity_timeout/ openedx/core/djangoapps/signals/ openedx/core/djangoapps/site_configuration/ openedx/core/djangoapps/system_wide_roles/ openedx/core/djangoapps/theming/ openedx/core/djangoapps/user_api/ openedx/core/djangoapps/user_authn/ openedx/core/djangoapps/util/ openedx/core/djangoapps/verified_track_content/ openedx/core/djangoapps/video_config/ openedx/core/djangoapps/video_pipeline/ openedx/core/djangoapps/waffle_utils/ openedx/core/djangoapps/xblock/ openedx/core/djangoapps/xmodule_django/ openedx/core/tests/ openedx/features/ openedx/testing/ openedx/tests/ openedx/core/djangoapps/notifications/ openedx/core/djangoapps/staticfiles/ openedx/core/djangoapps/content_tagging/" - module-name: common - path: "common pavelib" + path: "common" - module-name: cms path: "cms" - module-name: xmodule diff --git a/.github/workflows/unit-test-shards.json b/.github/workflows/unit-test-shards.json index 1af8af814254..4709930493ce 100644 --- a/.github/workflows/unit-test-shards.json +++ b/.github/workflows/unit-test-shards.json @@ -255,15 +255,13 @@ "common-with-lms": { "settings": "lms.envs.test", "paths": [ - "common/djangoapps/", - "pavelib/" + "common/djangoapps/" ] }, "common-with-cms": { "settings": "cms.envs.test", "paths": [ - "common/djangoapps/", - "pavelib/" + "common/djangoapps/" ] }, "xmodule-with-lms": { diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index e691e16e47f1..c635972d5f4c 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -71,29 +71,7 @@ jobs: - name: install system requirements run: | - sudo apt-get update && sudo apt-get install libmysqlclient-dev libxmlsec1-dev lynx openssl - - # This is needed until the ENABLE_BLAKE2B_HASHING can be removed and we - # can stop using MD4 by default. - - name: enable md4 hashing in libssl - run: | - cat <> $GITHUB_ENV - echo "root_lms_unit_tests_count=$(pytest --disable-warnings --collect-only --ds=lms.envs.test lms/ openedx/ common/djangoapps/ xmodule/ pavelib/ -q | head -n -2 | wc -l)" >> $GITHUB_ENV + echo "root_lms_unit_tests_count=$(pytest --disable-warnings --collect-only --ds=lms.envs.test lms/ openedx/ common/djangoapps/ xmodule/ -q | head -n -2 | wc -l)" >> $GITHUB_ENV - name: get GHA unit test paths shell: bash diff --git a/Makefile b/Makefile index 8297f4b27b4b..08b6f14893cc 100644 --- a/Makefile +++ b/Makefile @@ -93,7 +93,6 @@ requirements: dev-requirements ## install development environment requirements # Order is very important in this list: files must appear after everything they include! REQ_FILES = \ requirements/edx/coverage \ - requirements/edx/paver \ requirements/edx-sandbox/base \ requirements/edx/base \ requirements/edx/doc \ @@ -179,7 +178,7 @@ xsslint: ## check xss for quality issuest --config=scripts.xsslint_config \ --thresholds=scripts/xsslint_thresholds.json -pycodestyle: ## check python files for quality issues +pycodestyle: ## check python files for quality issues pycodestyle . ## Re-enable --lint flag when this issue https://github.com/openedx/edx-platform/issues/35775 is resolved @@ -190,13 +189,13 @@ pii_check: ## check django models for pii annotations --app_name cms \ --coverage \ --lint - + DJANGO_SETTINGS_MODULE=lms.envs.test \ code_annotations django_find_annotations \ --config_file .pii_annotations.yml \ --app_name lms \ --coverage \ - --lint + --lint check_keywords: ## check django models for reserve keywords DJANGO_SETTINGS_MODULE=cms.envs.test \ diff --git a/cms/envs/common.py b/cms/envs/common.py index 20e99974b3fb..591247388a9d 100644 --- a/cms/envs/common.py +++ b/cms/envs/common.py @@ -566,9 +566,6 @@ # .. toggle_tickets: https://github.com/openedx/edx-platform/pull/33911 'ENABLE_GRADING_METHOD_IN_PROBLEMS': False, - # See annotations in lms/envs/common.py for details. - 'ENABLE_BLAKE2B_HASHING': False, - # .. toggle_name: FEATURES['BADGES_ENABLED'] # .. toggle_implementation: DjangoSetting # .. toggle_default: False diff --git a/common/djangoapps/util/memcache.py b/common/djangoapps/util/memcache.py index 2f7e6dc623a0..ce8c70219e2c 100644 --- a/common/djangoapps/util/memcache.py +++ b/common/djangoapps/util/memcache.py @@ -7,7 +7,6 @@ import hashlib from urllib.parse import quote_plus -from django.conf import settings from django.utils.encoding import smart_str @@ -15,10 +14,7 @@ def fasthash(string): """ Hashes `string` into a string representation of a 128-bit digest. """ - if settings.FEATURES.get("ENABLE_BLAKE2B_HASHING", False): - hash_obj = hashlib.new("blake2b", digest_size=16) - else: - hash_obj = hashlib.new("md4") + hash_obj = hashlib.new("blake2b", digest_size=16) hash_obj.update(string.encode('utf-8')) return hash_obj.hexdigest() diff --git a/common/djangoapps/util/tests/test_memcache.py b/common/djangoapps/util/tests/test_memcache.py index 13f67f386a00..0047e9fa66cf 100644 --- a/common/djangoapps/util/tests/test_memcache.py +++ b/common/djangoapps/util/tests/test_memcache.py @@ -3,15 +3,11 @@ """ -from django.conf import settings from django.core.cache import caches -from django.test import TestCase, override_settings +from django.test import TestCase from common.djangoapps.util.memcache import safe_key -BLAKE2B_ENABLED_FEATURES = settings.FEATURES.copy() -BLAKE2B_ENABLED_FEATURES["ENABLE_BLAKE2B_HASHING"] = True - class MemcacheTest(TestCase): """ @@ -55,20 +51,6 @@ def test_safe_key_long(self): # The key should now be valid assert self._is_valid_key(key), f'Failed for key length {length}' - @override_settings(FEATURES=BLAKE2B_ENABLED_FEATURES) - def test_safe_key_long_with_blake2b_enabled(self): - # Choose lengths close to memcached's cutoff (250) - for length in [248, 249, 250, 251, 252]: - - # Generate a key of that length - key = 'a' * length - - # Make the key safe - key = safe_key(key, '', '') - - # The key should now be valid - assert self._is_valid_key(key), f'Failed for key length {length}' - def test_long_key_prefix_version(self): # Long key @@ -83,34 +65,6 @@ def test_long_key_prefix_version(self): key = safe_key('key', 'prefix', 'a' * 300) assert self._is_valid_key(key) - @override_settings(FEATURES=BLAKE2B_ENABLED_FEATURES) - def test_long_key_prefix_version_with_blake2b_enabled(self): - - # Long key - key = safe_key('a' * 300, 'prefix', 'version') - assert self._is_valid_key(key) - - # Long prefix - key = safe_key('key', 'a' * 300, 'version') - assert self._is_valid_key(key) - - # Long version - key = safe_key('key', 'prefix', 'a' * 300) - assert self._is_valid_key(key) - - def test_safe_key_unicode(self): - - for unicode_char in self.UNICODE_CHAR_CODES: - - # Generate a key with that character - key = chr(unicode_char) - - # Make the key safe - key = safe_key(key, '', '') - - # The key should now be valid - assert self._is_valid_key(key), f'Failed for unicode character {unicode_char}' - def test_safe_key_prefix_unicode(self): for unicode_char in self.UNICODE_CHAR_CODES: diff --git a/common/static/data/geoip/GeoLite2-Country.mmdb b/common/static/data/geoip/GeoLite2-Country.mmdb index 4404d374206e..f32738e62173 100644 Binary files a/common/static/data/geoip/GeoLite2-Country.mmdb and b/common/static/data/geoip/GeoLite2-Country.mmdb differ diff --git a/docs/concepts/testing/testing.rst b/docs/concepts/testing/testing.rst index 9d448afd5bdc..d8a79b9d6619 100644 --- a/docs/concepts/testing/testing.rst +++ b/docs/concepts/testing/testing.rst @@ -1,4 +1,3 @@ -####### Testing ####### @@ -7,7 +6,7 @@ Testing :depth: 3 Overview -======== +******** We maintain two kinds of tests: unit tests and integration tests. @@ -26,10 +25,10 @@ tests. Most of our tests are unit tests or integration tests. Test Types ----------- +========== Unit Tests -~~~~~~~~~~ +---------- - Each test case should be concise: setup, execute, check, and teardown. If you find yourself writing tests with many steps, @@ -38,18 +37,18 @@ Unit Tests - As a rule of thumb, your unit tests should cover every code branch. -- Mock or patch external dependencies. We use the voidspace `Mock Library`_. +- Mock or patch external dependencies using `unittest.mock`_ functions. - We unit test Python code (using `unittest`_) and Javascript (using `Jasmine`_) -.. _Mock Library: http://www.voidspace.org.uk/python/mock/ +.. _unittest.mock: https://docs.python.org/3/library/unittest.mock.html .. _unittest: http://docs.python.org/2/library/unittest.html .. _Jasmine: http://jasmine.github.io/ Integration Tests -~~~~~~~~~~~~~~~~~ +----------------- - Test several units at the same time. Note that you can still mock or patch dependencies that are not under test! For example, you might test that @@ -67,7 +66,7 @@ Integration Tests .. _Django test client: https://docs.djangoproject.com/en/dev/topics/testing/overview/ Test Locations --------------- +============== - Python unit and integration tests: Located in subpackages called ``tests``. For example, the tests for the ``capa`` package are @@ -80,14 +79,29 @@ Test Locations the test for ``src/views/module.js`` should be written in ``spec/views/module_spec.js``. -Running Tests -============= +Factories +========= -**Unless otherwise mentioned, all the following commands should be run from inside the lms docker container.** +Many tests delegate set-up to a "factory" class. For example, there are +factories for creating courses, problems, and users. This encapsulates +set-up logic from tests. +Factories are often implemented using `FactoryBoy`_. + +In general, factories should be located close to the code they use. For +example, the factory for creating problem XML definitions is located in +``xmodule/capa/tests/response_xml_factory.py`` because the +``capa`` package handles problem XML. + +.. _FactoryBoy: https://readthedocs.org/projects/factoryboy/ Running Python Unit tests -------------------------- +************************* + +The following commands need to be run within a Python environment in +which requirements/edx/testing.txt has been installed. If you are using a +Docker-based Open edX distribution, then you probably will want to run these +commands within the LMS and/or CMS Docker containers. We use `pytest`_ to run Python tests. Pytest is a testing framework for python and should be your goto for local Python unit testing. @@ -97,16 +111,16 @@ Pytest (and all of the plugins we use with it) has a lot of options. Use `pytest Running Python Test Subsets -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +=========================== When developing tests, it is often helpful to be able to really just run one single test without the overhead of PIP installs, UX builds, etc. Various ways to run tests using pytest:: - pytest path/test_m­odule.py # Run all tests in a module. - pytest path/test_m­odule.p­y:­:te­st_func # Run a specific test within a module. - pytest path/test_m­odule.p­y:­:Te­stC­las­s # Run all tests in a class - pytest path/test_m­odule.p­y:­:Te­stC­las­s::­tes­t_m­ethod # Run a specific method of a class. + pytest path/test_module.py # Run all tests in a module. + pytest path/test_module.py::test_func # Run a specific test within a module. + pytest path/test_module.py::TestClass # Run all tests in a class + pytest path/test_module.py::TestClass::test_method # Run a specific method of a class. pytest path/testing/ # Run all tests in a directory. For example, this command runs a single python unit test file:: @@ -114,7 +128,7 @@ For example, this command runs a single python unit test file:: pytest xmodule/tests/test_stringify.py Note - -edx-platorm has multiple services (lms, cms) in it. The environment for each service is different enough that we run some tests in both environments in Github Actions. +edx-platorm has multiple services (lms, cms) in it. The environment for each service is different enough that we run some tests in both environments in Github Actions. To test in each of these environments (especially for tests in "common" and "xmodule" directories), you will need to test in each seperately. To specify that the tests are run with the relevant service as root, Add --rootdir flag at end of your pytest call and specify the env to test in:: @@ -139,7 +153,7 @@ Various tools like ddt create tests with very complex names, rather than figurin pytest xmodule/tests/test_stringify.py --collectonly Testing with migrations -*********************** +======================= For the sake of speed, by default the python unit test database tables are created directly from apps' models. If you want to run the tests @@ -149,7 +163,7 @@ against a database created by applying the migrations instead, use the pytest test --create-db --migrations Debugging a test -~~~~~~~~~~~~~~~~ +================ There are various ways to debug tests in Python and more specifically with pytest: @@ -173,7 +187,7 @@ There are various ways to debug tests in Python and more specifically with pytes How to output coverage locally -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +============================== These are examples of how to run a single test and get coverage:: @@ -220,234 +234,84 @@ run one of these commands:: .. _YouTube stub server: https://github.com/openedx/edx-platform/blob/master/common/djangoapps/terrain/stubs/tests/test_youtube_stub.py -Debugging Unittest Flakiness -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -As we move over to running our unittests with Jenkins Pipelines and pytest-xdist, -there are new ways for tests to flake, which can sometimes be difficult to debug. -If you run into flakiness, check (and feel free to contribute to) this -`confluence document `__ for help. - -Running Javascript Unit Tests ------------------------------ - -Before running Javascript unit tests, you will need to be running Firefox or Chrome in a place visible to edx-platform. If running this in devstack, you can run ``make dev.up.firefox`` or ``make dev.up.chrome``. Firefox is the default browser for the tests, so if you decide to use Chrome, you will need to prefix the test command with ``SELENIUM_BROWSER=chrome SELENIUM_HOST=edx.devstack.chrome`` (if using devstack). - -We use Jasmine to run JavaScript unit tests. To run all the JavaScript -tests:: - - paver test_js - -To run a specific set of JavaScript tests and print the results to the -console, run these commands:: - - paver test_js_run -s lms - paver test_js_run -s cms - paver test_js_run -s cms-squire - paver test_js_run -s xmodule - paver test_js_run -s xmodule-webpack - paver test_js_run -s common - paver test_js_run -s common-requirejs - -To run JavaScript tests in a browser, run these commands:: - - paver test_js_dev -s lms - paver test_js_dev -s cms - paver test_js_dev -s cms-squire - paver test_js_dev -s xmodule - paver test_js_dev -s xmodule-webpack - paver test_js_dev -s common - paver test_js_dev -s common-requirejs - -Debugging Specific Javascript Tests -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The best way to debug individual tests is to run the test suite in the browser and -use your browser's Javascript debugger. The debug page will allow you to select -an individual test and only view the results of that test. - - -Debugging Tests in a Browser -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To debug these tests on devstack in a local browser: - -* first run the appropriate test_js_dev command from above -* open http://localhost:19876/debug.html in your host system's browser of choice -* this will run all the tests and show you the results including details of any failures -* you can click on an individually failing test and/or suite to re-run it by itself -* you can now use the browser's developer tools to debug as you would any other JavaScript code - -Note: the port is also output to the console that you ran the tests from if you find that easier. - -These paver commands call through to Karma. For more -info, see `karma-runner.github.io `__. - -Testing internationalization with dummy translations ----------------------------------------------------- - -Any text you add to the platform should be internationalized. To generate translations for your new strings, run the following command:: - - paver i18n_dummy - -This command generates dummy translations for each dummy language in the -platform and puts the dummy strings in the appropriate language files. -You can then preview the dummy languages on your local machine and also in your sandbox, if and when you create one. - -The dummy language files that are generated during this process can be -found in the following locations:: - - conf/locale/{LANG_CODE} - -There are a few JavaScript files that are generated from this process. You can find those in the following locations:: - - lms/static/js/i18n/{LANG_CODE} - cms/static/js/i18n/{LANG_CODE} - -Do not commit the ``.po``, ``.mo``, ``.js`` files that are generated -in the above locations during the dummy translation process! +Handling flaky unit tests +========================= -Test Coverage and Quality -------------------------- +See this `confluence document `_. -Viewing Test Coverage -~~~~~~~~~~~~~~~~~~~~~ -We currently collect test coverage information for Python -unit/integration tests. +Running JavaScript Unit Tests +***************************** -To view test coverage: +Before running Javascript unit tests, you will need to be running Firefox or Chrome in a place visible to edx-platform. +If you are using Tutor Dev to run edx-platform, then you can do so by installing and enabling the +``test-legacy-js`` plugin from `openedx-tutor-plugins`_, and then rebuilding +the ``openedx-dev`` image:: -1. Run the test suite with this command:: + tutor plugins install https://github.com/openedx/openedx-tutor-plugins/tree/main/plugins/tutor-contrib-test-legacy-js + tutor plugins enable test-legacy-js + tutor images build openedx-dev - paver test +.. _openedx-tutor-plugins: https://github.com/openedx/openedx-tutor-plugins/ -2. Generate reports with this command:: +We use Jasmine (via Karma) to run most JavaScript unit tests. We use Jest to +run a small handful of additional JS unit tests. You can use the ``npm run +test*`` commands to run them:: - paver coverage + npm run test-karma # Run all Jasmine+Karma tests. + npm run test-jest # Run all Jest tests. + npm run test # Run both of the above. -3. Reports are located in the ``reports`` folder. The command generates - HTML and XML (Cobertura format) reports. +The Karma tests are further broken down into three types depending on how the +JavaScript it is testing is built:: -Python Code Style Quality -~~~~~~~~~~~~~~~~~~~~~~~~~ + npm run test-karma-vanilla # Our very oldest JS, which doesn't even use RequireJS + npm run test-karma-require # Old JS that uses RequireJS + npm run test-karma-webpack # Slightly "newer" JS which is built with Webpack -To view Python code style quality (including PEP 8 and pylint violations) run this command:: +Unfortunately, at the time of writing, the build for the ``test-karma-webpack`` +tests is broken. The tests are excluded from ``npm run test-karma`` as to not +fail CI. We `may fix this one day`_. - paver run_quality +.. _may fix this one day: https://github.com/openedx/edx-platform/issues/35956 -More specific options are below. +To run all Karma+Jasmine tests for a particular top-level edx-platform folder, +you can run:: -- These commands run a particular quality report:: + npm run test-cms + npm run test-lms + npm run test-xmodule + npm run test-common - paver run_pep8 - paver run_pylint - -- This command runs a report, and sets it to fail if it exceeds a given number - of violations:: - - paver run_pep8 --limit=800 - -- The ``run_quality`` uses the underlying diff-quality tool (which is packaged - with `diff-cover`_). With that, the command can be set to fail if a certain - diff threshold is not met. For example, to cause the process to fail if - quality expectations are less than 100% when compared to master (or in other - words, if style quality is worse than what is already on master):: - - paver run_quality --percentage=100 - -- Note that 'fixme' violations are not counted with run\_quality. To - see all 'TODO' lines, use this command:: - - paver find_fixme --system=lms - - ``system`` is an optional argument here. It defaults to - ``cms,lms,common``. - -.. _diff-cover: https://github.com/Bachmann1234/diff-cover - - -JavaScript Code Style Quality -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To view JavaScript code style quality run this command:: - - paver run_eslint - -- This command also comes with a ``--limit`` switch, this is an example of that switch:: - - paver run_eslint --limit=50000 - - -Code Complexity Tools -===================== - -Tool(s) available for evaluating complexity of edx-platform code: - - -- `plato `__ for JavaScript code - complexity. Several options are available on the command line; see - documentation. Below, the following command will produce an HTML report in a - subdirectory called "jscomplexity":: - - plato -q -x common/static/js/vendor/ -t common -e .eslintrc.json -r -d jscomplexity common/static/js/ - -Other Testing Tips -================== - -Connecting to Browser ---------------------- - -If you want to see the browser being automated for JavaScript, -you can connect to the container running it via VNC. - -+------------------------+----------------------+ -| Browser | VNC connection | -+========================+======================+ -| Firefox (Default) | vnc://0.0.0.0:25900 | -+------------------------+----------------------+ -| Chrome (via Selenium) | vnc://0.0.0.0:15900 | -+------------------------+----------------------+ - -On macOS, enter the VNC connection string in Safari to connect via VNC. The VNC -passwords for both browsers are randomly generated and logged at container -startup, and can be found by running ``make vnc-passwords``. - -Most tests are run in Firefox by default. To use Chrome for tests that normally -use Firefox instead, prefix the test command with -``SELENIUM_BROWSER=chrome SELENIUM_HOST=edx.devstack.chrome`` - -Factories ---------- - -Many tests delegate set-up to a "factory" class. For example, there are -factories for creating courses, problems, and users. This encapsulates -set-up logic from tests. - -Factories are often implemented using `FactoryBoy`_. - -In general, factories should be located close to the code they use. For -example, the factory for creating problem XML definitions is located in -``xmodule/capa/tests/response_xml_factory.py`` because the -``capa`` package handles problem XML. - -.. _FactoryBoy: https://readthedocs.org/projects/factoryboy/ +Finally, if you want to pass any options to the underlying ``node`` invocation +for Karma+Jasmine tests, you can run one of these specific commands, and put +your arguments after the ``--`` separator:: -Running Tests on Paver Scripts ------------------------------- + npm run test-cms-vanilla -- --your --args --here + npm run test-cms-require -- --your --args --here + npm run test-cms-webpack -- --your --args --here + npm run test-lms-webpack -- --your --args --here + npm run test-xmodule-vanilla -- --your --args --here + npm run test-xmodule-webpack -- --your --args --here + npm run test-common-vanilla -- --your --args --here + npm run test-common-require -- --your --args --here -To run tests on the scripts that power the various Paver commands, use the following command:: - pytest pavelib +Code Quality +************ -Testing using queue servers ---------------------------- +We use several tools to analyze code quality. The full set of them is:: -When testing problems that use a queue server on AWS (e.g. -sandbox-xqueue.edx.org), you'll need to run your server on your public IP, like so:: + mypy $PATHS... + pycodestyle $PATHS... + pylint $PATHS... + lint-imports + scripts/verify-dunder-init.sh + make xsslint + make pii_check + make check_keywords + npm run lint - ./manage.py lms runserver 0.0.0.0:8000 +Where ``$PATHS...`` is a list of folders and files to analyze, or nothing if +you would like to analyze the entire codebase (which can take a while). -When you connect to the LMS, you need to use the public ip. Use -``ifconfig`` to figure out the number, and connect e.g. to -``http://18.3.4.5:8000/`` diff --git a/docs/concepts/frontend/static_assets.rst b/docs/decisions/0000-static-asset-plan.rst similarity index 90% rename from docs/concepts/frontend/static_assets.rst rename to docs/decisions/0000-static-asset-plan.rst index fd7f04e954ad..eb64df858f60 100644 --- a/docs/concepts/frontend/static_assets.rst +++ b/docs/decisions/0000-static-asset-plan.rst @@ -1,6 +1,22 @@ -####################################### -edx-platform Static Asset Pipeline Plan -####################################### +0. edx-platform Static Asset Pipeline Plan +########################################## + +Status +****** + +Accepted ~2017 +Partially adopted 2017-2024 + +This was an old plan for modernizing Open edX's frontend assets. We've +retroactively turned it into an ADR because it has some valuable insights. +Although most of these improvements weren't applied as written, these ideas +(particularly, separating Python concerns from frontend tooling concerns) were +applied to both legacy edx-platform assets as well as the Micro-Frontend +framework that was developed 2017-2019. + +Context, Decision, Consequences +******************************* + Static asset handling in edx-platform has evolved in a messy way over the years. This has led to a lot of complexity and inconsistencies. This is a proposal for @@ -9,20 +25,9 @@ this is not a detailed guide for how to write React or Bootstrap code. This is instead going to talk about conventions for how we arrange, extract, and compile static assets. -Big Open Questions (TODO) -************************* - -This document is a work in progress, as the design for some of this is still in -flux, particularly around extensibility. - -* Pluggable third party apps and Webpack packaging. -* Keep the Django i18n mechanism? -* Stance on HTTP/2 and bundling granularity. -* Optimizing theme assets. -* Tests Requirements -************ +============ Any proposed solution must support: @@ -35,7 +40,7 @@ Any proposed solution must support: * Other kinds of pluggability??? Assumptions -*********** +=========== Some assumptions/opinions that this proposal is based on: @@ -54,8 +59,8 @@ Some assumptions/opinions that this proposal is based on: * It should be possible to pre-build static assets and deploy them onto S3 or similar. -Where We Are Today -****************** +Where We Are Today (2017) +========================= We have a static asset pipeline that is mostly driven by Django's built-in staticfiles finders and the collectstatic process. We use the popular @@ -95,9 +100,9 @@ places (typically ``/edx/var/edxapp/staticfiles`` for LMS and ``/edx/var/edxapp/staticfiles/studio`` for Studio) and can be collected separately. However in practice they're always run together because we deploy them from the same commits and to the same servers. - + Django vs. Webpack Conventions -****************************** +============================== The Django convention for having an app with bundled assets is to namespace them locally with the app name so that they get their own directories when they are @@ -112,7 +117,7 @@ the root of edx-platform, which would specify all bundles in the project. TODO: The big, "pluggable Webpack components" question. Proposed Repo Structure -*********************** +======================= All assets that are in common spaces like ``common/static``, ``lms/static``, and ``cms/static`` would be moved to be under the Django apps that they are a @@ -122,7 +127,7 @@ part of and follow the Django naming convention (e.g. any client-side templates will be put in ``static/{appname}/templates``. Proposed Compiled Structure -*************************** +=========================== This is meant to be a sample of the different types of things we'd have, not a full list: @@ -150,14 +155,14 @@ full list: /theming/themes/open-edx /red-theme /edx.org - + # XBlocks still collect their assets into a common space (/xmodule goes away) # We consider this to be the XBlock Runtime's app, and it collects static # assets from installed XBlocks. /xblock Django vs. Webpack Roles -************************ +======================== Rule of thumb: Django/Python still serves static assets, Webpack processes and optimizes them. diff --git a/docs/decisions/0021-fixing-quality-and-js-checks.rst b/docs/decisions/0021-fixing-quality-and-js-checks.rst new file mode 100644 index 000000000000..7d80039a8cb0 --- /dev/null +++ b/docs/decisions/0021-fixing-quality-and-js-checks.rst @@ -0,0 +1,143 @@ +Fixing the Quality and JS checks +################################ + +Status +****** + +Accepted + +Implemented by https://github.com/openedx/edx-platform/pull/35159 + +Context +******* + +edx-platform PRs need to pass a series of CI checks before merging, including +but not limited to: a CLA check, various unit tests, and various code quality +tests. Of these checks, two checks were implemented using the "Paver" Python +package, a scripting library `which we have been trying to move off of`_. These +two checks and their steps were: + +* **Check: Quality others** + + * **pii_check**: Ensure that Django models have PII annotations as + described in `OEP-30`_, with a minimum threshold of **94.5%** of models + annotated. + * **stylelint**: Statically check sass stylesheets for common errors. + * **pep8**: Run pycodestyle against Python code. + * **eslint**: Statically check javascript code for common errors. + * **xsslint**: Check python & javascript for xss vulnerabilities. + * **check_keywords**: Compare Django model field names against a denylist of + reserved keywords. + +* **Check: JS** + + * **test-js**: Run javascript unit tests. + * **coverage-js**: Check that javascript test coverage has not dropped. + +As we worked to reimplement these checks without Paver, we unfortunately +noticed that four of those steps had bugs in their implementations, and thus +had not been enforcing what they promised to: + +* **pii_check**: Instead of just checking the result of the underlying + code_annotations command, this check wrote an annotations report to a file, + and then used regex to parse the report and determine whether the check + should pass. However, the check failed to validate that the generation of the + report itself was successful. So, when malformed annotations were introduced + to the edx-proctoring repository, which edx-platform installs, the check + began silently passing. + +* **stylelint**: At some point, the `stylelint` binary stopped being available + on the runner's `$PATH`. Rather than causing the Quality Others check to + fail, the Paver code quietly ignored the shell error, and considered the + empty stylelint report file to indicate that there were not linting + violations. + +* **test-js**: There are eight suites within test-js. Six of them work fine. + But three of them--specifically the suites that test code built by Webpack-- + have not been running for some unknown amount of time. The Webpack test build + has been failing without signalling that the test suite should fail, + both preventing the tests from runnning and preventing anyone from noticing + that the tests weren't running. + +* **coverage-js**: This check tried to use `diff-cover` in order to compare the + coverage report on the current branch with the coverage report on the master + branch. However, the coverage report does not exist on the master branch, and + it's not clear when it ever did. The coverage-js step failed to validate that + `diff-cover` ran successfully, and instead of raising an error, it allowed + the JS check to pass. + +Decision & Consequences +*********************** + +pii_check +========= + +We `fixed the malformed annotations`_ in edx-proctoring, allowing the pii_check +to once again check model coverage. We have ensured that any future failure of +the code_annotations command (due to, for example, future malformed +annotations) will cause the pii_check step and the overall Quality Others check +to fail. We have stopped trying to parse the result of the annotations report +in CI, as this was and is completely unneccessary. + +In order to keep "Quality others" passing on the edx-platform master branch, we +lowered the PII annotation coverage threshold to reflect the percentage of +then-annotated models: **71.6%**. After a timeboxed effort to add missing +annotations and expand the annotation allowlist as appropriate, we have managed +to raise the threshold to **85.3%**. It is not clear whether we will put in +further effort to raise the annotation threshold back to 95%. + +This was all already `announced on the forums`_. + +stylelint +========= + +We have removed the **stylelint** step entirely from the "Quality Others" +check. Sass code in the edx-platform repository will no longer be subject to +any static analysis. + +test-js +======= + +We have stopped running these Webpack-based suites in CI: + +* ``npm run test-lms-webpack`` +* ``npm run test-cms-webpack`` +* ``npm run test-xmodule-webpack`` + +We have created a new edx-platform backlog issue for +`fixing and re-enabling these suites`_. +It is not clear whether we will prioritize that issue, or instead prioritize +deprecation and removal of the code that those suites were supposed to be +testing. + +coverage-js +=========== + +We will remove the **coverage-js** step entirely from the "JS" check. +JavaScript code in the edx-platform repository will no longer be subject to any +unit test coverage checking. + +Rejected Alternatives +********************* + +* While it would be ideal to raise the pii_check threshold to 94.5% or even + 100%, we do not have the resources to promise this. + +* It would also be nice to institute a "racheting" mechanism for the PII + annotation coverage threshold. That is, every commit to master could save the + coverage percentage to a persisted artifact, allowing subsequent PRs to + ensure that the pii_check never returns lower than the current threshold. We + will put this in the Aximprovements backlog, but we cannot commit to + implementing it right now. + +* We will not fix or apply amnestly in order to re-enable stlylint or + coverage-js. That could take significant effort, which we believe would be + better spent completing the migration off of this legacy Sass and JS and onto + our modern React frontends. + + +.. _fixing and re-enabling these suites: https://github.com/openedx/edx-platform/issues/35956 +.. _which we have been trying to move off of: https://github.com/openedx/edx-platform/issues/34467 +.. _announced on the forums: https://discuss.openedx.org/t/checking-pii-annotations-with-a-lower-coverage-threshold/14254 +.. _OEP-30: https://docs.openedx.org/projects/openedx-proposals/en/latest/architectural-decisions/oep-0030-arch-pii-markup-and-auditing.html +.. _fix the malformed annotations: https://github.com/openedx/edx-proctoring/issues/1241 diff --git a/lms/djangoapps/bulk_email/signals.py b/lms/djangoapps/bulk_email/signals.py index fb8749bf45a9..8ada8760cdcf 100644 --- a/lms/djangoapps/bulk_email/signals.py +++ b/lms/djangoapps/bulk_email/signals.py @@ -1,6 +1,8 @@ """ Signal handlers for the bulk_email app """ +import logging + from django.dispatch import receiver from eventtracking import tracker @@ -10,6 +12,8 @@ from .models import Optout +log = logging.getLogger(__name__) + @receiver(USER_RETIRE_MAILINGS) def force_optout_all(sender, **kwargs): # lint-amnesty, pylint: disable=unused-argument @@ -43,7 +47,7 @@ def ace_email_sent_handler(sender, **kwargs): if not course_id: course_email = context.get('course_email', None) course_id = course_email.course_id if course_email else None - + log.info(f'Email sent for {message_name} for course {course_id} using channel {channel}') tracker.emit( 'edx.ace.message_sent', { diff --git a/lms/djangoapps/course_goals/management/commands/goal_reminder_email.py b/lms/djangoapps/course_goals/management/commands/goal_reminder_email.py index eea03bd79455..af48472886bb 100644 --- a/lms/djangoapps/course_goals/management/commands/goal_reminder_email.py +++ b/lms/djangoapps/course_goals/management/commands/goal_reminder_email.py @@ -1,6 +1,7 @@ """ Command to trigger sending reminder emails for learners to achieve their Course Goals """ +import time from datetime import date, datetime, timedelta from eventtracking import tracker import logging @@ -119,9 +120,9 @@ def send_ace_message(goal, session_id): with emulate_http_request(site, user): try: - start_time = datetime.now() + start_time = time.perf_counter() ace.send(msg) - end_time = datetime.now() + end_time = time.perf_counter() log.info(f"Goal Reminder for {user.id} for course {goal.course_key} sent in {end_time - start_time} " f"using {'SES' if is_ses_enabled else 'others'}") except Exception as exc: # pylint: disable=broad-except @@ -283,7 +284,7 @@ def handle_goal(goal, today, sunday_date, monday_date, session_id): 'uuid': session_id, 'timestamp': datetime.now(), 'reason': 'User time zone', - 'user_timezone': user_timezone, + 'user_timezone': str(user_timezone), 'now_in_users_timezone': now_in_users_timezone, } ) diff --git a/lms/djangoapps/discussion/rest_api/discussions_notifications.py b/lms/djangoapps/discussion/rest_api/discussions_notifications.py index bd12e82adc50..f6572c829c7a 100644 --- a/lms/djangoapps/discussion/rest_api/discussions_notifications.py +++ b/lms/djangoapps/discussion/rest_api/discussions_notifications.py @@ -117,6 +117,7 @@ def send_new_response_notification(self): context = { 'email_content': clean_thread_html_body(self.comment.body), } + self._populate_context_with_ids_for_mobile(context) self._send_notification([self.thread.user_id], "new_response", extra_context=context) def _response_and_thread_has_same_creator(self) -> bool: @@ -156,6 +157,7 @@ def send_new_comment_notification(self): "author_pronoun": str(author_pronoun), "email_content": clean_thread_html_body(self.comment.body), } + self._populate_context_with_ids_for_mobile(context) self._send_notification([self.thread.user_id], "new_comment", extra_context=context) def send_new_comment_on_response_notification(self): @@ -171,6 +173,7 @@ def send_new_comment_on_response_notification(self): context = { "email_content": clean_thread_html_body(self.comment.body), } + self._populate_context_with_ids_for_mobile(context) self._send_notification( [self.parent_response.user_id], "new_comment_on_response", @@ -216,12 +219,15 @@ def send_response_on_followed_post_notification(self): # Remove duplicate users from the list of users to send notification users = list(set(users)) if not self.parent_id: + context = { + "email_content": clean_thread_html_body(self.comment.body), + } + self._populate_context_with_ids_for_mobile(context) self._send_notification( users, "response_on_followed_post", - extra_context={ - "email_content": clean_thread_html_body(self.comment.body), - }) + extra_context=context + ) else: author_name = f"{self.parent_response.username}'s" # use 'their' if comment author is also response author. @@ -231,14 +237,16 @@ def send_response_on_followed_post_notification(self): if self._response_and_comment_has_same_creator() else f"{self.parent_response.username}'s" ) + context = { + "author_name": str(author_name), + "author_pronoun": str(author_pronoun), + "email_content": clean_thread_html_body(self.comment.body), + } + self._populate_context_with_ids_for_mobile(context) self._send_notification( users, "comment_on_followed_post", - extra_context={ - "author_name": str(author_name), - "author_pronoun": str(author_pronoun), - "email_content": clean_thread_html_body(self.comment.body), - } + extra_context=context ) def _create_cohort_course_audience(self): @@ -290,6 +298,7 @@ def send_response_endorsed_on_thread_notification(self): context = { "email_content": clean_thread_html_body(self.comment.body) } + self._populate_context_with_ids_for_mobile(context) self._send_notification([self.thread.user_id], "response_endorsed_on_thread", extra_context=context) def send_response_endorsed_notification(self): @@ -299,6 +308,7 @@ def send_response_endorsed_notification(self): context = { "email_content": clean_thread_html_body(self.comment.body) } + self._populate_context_with_ids_for_mobile(context) self._send_notification([self.creator.id], "response_endorsed", extra_context=context) def send_new_thread_created_notification(self): @@ -330,6 +340,7 @@ def send_new_thread_created_notification(self): 'post_title': self.thread.title, "email_content": clean_thread_html_body(self.thread.body), } + self._populate_context_with_ids_for_mobile(context) self._send_course_wide_notification(notification_type, audience_filters, context) def send_reported_content_notification(self): @@ -362,6 +373,12 @@ def send_reported_content_notification(self): ]} self._send_course_wide_notification("content_reported", audience_filters, context) + def _populate_context_with_ids_for_mobile(self, context): + context['thread_id'] = self.thread.id + context['topic_id'] = self.thread.commentable_id + context['comment_id'] = self.comment_id + context['parent_id'] = self.parent_id + def is_discussion_cohorted(course_key_str): """ diff --git a/lms/djangoapps/discussion/rest_api/tests/test_tasks.py b/lms/djangoapps/discussion/rest_api/tests/test_tasks.py index 6aff0673cc73..8171ca7e6a71 100644 --- a/lms/djangoapps/discussion/rest_api/tests/test_tasks.py +++ b/lms/djangoapps/discussion/rest_api/tests/test_tasks.py @@ -305,6 +305,8 @@ def setUp(self): "username": thread.username, "thread_type": 'discussion', "title": thread.title, + "commentable_id": thread.commentable_id, + }) self._register_subscriptions_endpoint() @@ -354,7 +356,11 @@ def test_send_notification_to_thread_creator(self): 'post_title': 'test thread', 'email_content': self.comment.body, 'course_name': self.course.display_name, - 'sender_id': self.user_2.id + 'sender_id': self.user_2.id, + 'parent_id': None, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } self.assertDictEqual(args.context, expected_context) self.assertEqual( @@ -400,7 +406,11 @@ def test_send_notification_to_parent_threads(self): 'author_name': 'dummy\'s', 'author_pronoun': 'dummy\'s', 'course_name': self.course.display_name, - 'sender_id': self.user_3.id + 'sender_id': self.user_3.id, + 'parent_id': 2, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } self.assertDictEqual(args_comment.context, expected_context) self.assertEqual( @@ -417,7 +427,11 @@ def test_send_notification_to_parent_threads(self): 'post_title': self.thread.title, 'email_content': self.comment.body, 'course_name': self.course.display_name, - 'sender_id': self.user_3.id + 'sender_id': self.user_3.id, + 'parent_id': 2, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } self.assertDictEqual(args_comment_on_response.context, expected_context) self.assertEqual( @@ -477,7 +491,11 @@ def test_comment_creators_own_response(self): 'author_pronoun': 'your', 'course_name': self.course.display_name, 'sender_id': self.user_3.id, - 'email_content': self.comment.body + 'email_content': self.comment.body, + 'parent_id': 2, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } self.assertDictEqual(args_comment.context, expected_context) self.assertEqual( @@ -522,6 +540,10 @@ def test_send_notification_to_followers(self, parent_id, notification_type): 'email_content': self.comment.body, 'course_name': self.course.display_name, 'sender_id': self.user_2.id, + 'parent_id': parent_id, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } if parent_id: expected_context['author_name'] = 'dummy\'s' @@ -624,6 +646,8 @@ def test_new_comment_notification(self): "username": thread.username, "thread_type": 'discussion', "title": thread.title, + "commentable_id": thread.commentable_id, + }) self.register_get_comment_response({ 'id': response.id, @@ -707,6 +731,7 @@ def test_response_endorsed_notifications(self): "username": thread.username, "thread_type": 'discussion', "title": thread.title, + "commentable_id": thread.commentable_id, }) self.register_get_comment_response({ 'id': 1, @@ -734,7 +759,11 @@ def test_response_endorsed_notifications(self): 'post_title': 'test thread', 'course_name': self.course.display_name, 'sender_id': int(self.user_2.id), - 'email_content': 'dummy' + 'email_content': 'dummy', + 'parent_id': None, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 2, } self.assertDictEqual(notification_data.context, expected_context) self.assertEqual(notification_data.content_url, _get_mfe_url(self.course.id, thread.id)) @@ -752,7 +781,11 @@ def test_response_endorsed_notifications(self): 'post_title': 'test thread', 'course_name': self.course.display_name, 'sender_id': int(response.user_id), - 'email_content': 'dummy' + 'email_content': 'dummy', + 'parent_id': None, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 2, } self.assertDictEqual(notification_data.context, expected_context) self.assertEqual(notification_data.content_url, _get_mfe_url(self.course.id, thread.id)) diff --git a/lms/djangoapps/discussion/rest_api/tests/utils.py b/lms/djangoapps/discussion/rest_api/tests/utils.py index 27e34705f5df..496f8723acfb 100644 --- a/lms/djangoapps/discussion/rest_api/tests/utils.py +++ b/lms/djangoapps/discussion/rest_api/tests/utils.py @@ -675,13 +675,14 @@ class ThreadMock(object): A mock thread object """ - def __init__(self, thread_id, creator, title, parent_id=None, body=''): + def __init__(self, thread_id, creator, title, parent_id=None, body='', commentable_id=None): self.id = thread_id self.user_id = str(creator.id) self.username = creator.username self.title = title self.parent_id = parent_id self.body = body + self.commentable_id = commentable_id def url_with_id(self, params): return f"http://example.com/{params['id']}" diff --git a/lms/envs/common.py b/lms/envs/common.py index 4fcb0a8126bc..e354f75a8530 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -1059,18 +1059,6 @@ # .. toggle_creation_date: 2024-04-24 'ENABLE_COURSEWARE_SEARCH_VERIFIED_ENROLLMENT_REQUIRED': False, - # .. toggle_name: FEATURES['ENABLE_BLAKE2B_HASHING'] - # .. toggle_implementation: DjangoSetting - # .. toggle_default: False - # .. toggle_description: Enables the memcache to use the blake2b hash algorithm instead of depreciated md4 for keys - # exceeding 250 characters - # .. toggle_use_cases: open_edx - # .. toggle_creation_date: 2024-04-02 - # .. toggle_target_removal_date: 2024-12-09 - # .. toggle_warning: For consistency, keep the value in sync with the setting of the same name in the LMS and CMS. - # .. toggle_tickets: https://github.com/openedx/edx-platform/pull/34442 - 'ENABLE_BLAKE2B_HASHING': False, - # .. toggle_name: FEATURES['BADGES_ENABLED'] # .. toggle_implementation: DjangoSetting # .. toggle_default: False diff --git a/lms/envs/minimal.yml b/lms/envs/minimal.yml index 51d7bbf499c4..003bc243764f 100644 --- a/lms/envs/minimal.yml +++ b/lms/envs/minimal.yml @@ -2,7 +2,8 @@ # # This is the minimal settings you need to set to be able to get django to # load when using the production.py settings files. It's useful to point -# LMS_CFG and CMS_CFG to this file to be able to run various paver commands +# LMS_CFG and CMS_CFG to this file to be able to run various npm commands +# and make targets (e.g.: building assets, pulling translations, etc.) # without needing a full docker setup. # # Follow up work will likely be done as a part of diff --git a/lms/templates/courseware/static_tab.html b/lms/templates/courseware/static_tab.html index 38b08f85ee76..cf79fb8948f7 100644 --- a/lms/templates/courseware/static_tab.html +++ b/lms/templates/courseware/static_tab.html @@ -33,7 +33,6 @@ lang=${course.language} % endif > -
${HTML(fragment.body_html())}
diff --git a/openedx/core/djangoapps/content/learning_sequences/models.py b/openedx/core/djangoapps/content/learning_sequences/models.py index 5d2ee34bbc9f..924403f408aa 100644 --- a/openedx/core/djangoapps/content/learning_sequences/models.py +++ b/openedx/core/djangoapps/content/learning_sequences/models.py @@ -37,6 +37,8 @@ yourself to the LearningContext and LearningSequence models. Other tables are not guaranteed to stick around, and values may be deleted unexpectedly. """ +from __future__ import annotations + from django.db import models from model_utils.models import TimeStampedModel @@ -214,7 +216,7 @@ class CourseSection(CourseContentVisibilityMixin, TimeStampedModel): # What is our position within the Course? (starts with 0) ordering = models.PositiveIntegerField(null=False) - new_user_partition_groups = models.ManyToManyField( + new_user_partition_groups: models.ManyToManyField[UserPartitionGroup, models.Model] = models.ManyToManyField( UserPartitionGroup, db_index=True, related_name='sec_user_partition_groups', @@ -280,7 +282,7 @@ class CourseSectionSequence(CourseContentVisibilityMixin, TimeStampedModel): # sequences across 20 sections, the numbering here would be 0-199. ordering = models.PositiveIntegerField(null=False) - new_user_partition_groups = models.ManyToManyField( + new_user_partition_groups: models.ManyToManyField[UserPartitionGroup, models.Model] = models.ManyToManyField( UserPartitionGroup, db_index=True, related_name='secseq_user_partition_groups', diff --git a/openedx/core/djangoapps/content_libraries/serializers.py b/openedx/core/djangoapps/content_libraries/serializers.py index d639ed63afb0..c2a26220d4c7 100644 --- a/openedx/core/djangoapps/content_libraries/serializers.py +++ b/openedx/core/djangoapps/content_libraries/serializers.py @@ -276,7 +276,7 @@ class ContentLibraryCollectionUpdateSerializer(serializers.Serializer): description = serializers.CharField(allow_blank=True) -class UsageKeyV2Serializer(serializers.Serializer): +class UsageKeyV2Serializer(serializers.BaseSerializer): """ Serializes a UsageKeyV2. """ diff --git a/openedx/core/djangoapps/content_libraries/tests/test_static_assets.py b/openedx/core/djangoapps/content_libraries/tests/test_static_assets.py index 69f5cd2d797c..91d9556b01f1 100644 --- a/openedx/core/djangoapps/content_libraries/tests/test_static_assets.py +++ b/openedx/core/djangoapps/content_libraries/tests/test_static_assets.py @@ -63,6 +63,13 @@ def test_asset_filenames(self): file_name = "a////////b" self._set_library_block_asset(block_id, file_name, SVG_DATA, expect_response=400) + # Names with spaces are allowed but replaced with underscores + file_name_with_space = "o w o.svg" + self._set_library_block_asset(block_id, file_name_with_space, SVG_DATA) + file_name = "o_w_o.svg" + assert self._get_library_block_asset(block_id, file_name)['path'] == file_name + assert self._get_library_block_asset(block_id, file_name)['size'] == file_size + def test_video_transcripts(self): """ Test that video blocks can read transcript files out of learning core. diff --git a/openedx/core/djangoapps/content_libraries/views.py b/openedx/core/djangoapps/content_libraries/views.py index 94197f508775..c8c91d50331f 100644 --- a/openedx/core/djangoapps/content_libraries/views.py +++ b/openedx/core/djangoapps/content_libraries/views.py @@ -799,6 +799,7 @@ def put(self, request, usage_key_str, file_path): """ Replace a static asset file belonging to this block. """ + file_path = file_path.replace(" ", "_") # Messes up url/name correspondence due to URL encoding. usage_key = LibraryUsageLocatorV2.from_string(usage_key_str) api.require_permission_for_library_key( usage_key.lib_key, request.user, permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, diff --git a/openedx/core/djangoapps/notifications/email/utils.py b/openedx/core/djangoapps/notifications/email/utils.py index 9fd761785e5a..34c245308785 100644 --- a/openedx/core/djangoapps/notifications/email/utils.py +++ b/openedx/core/djangoapps/notifications/email/utils.py @@ -9,7 +9,7 @@ from django.contrib.auth import get_user_model from django.shortcuts import get_object_or_404 from pytz import utc -from waffle import get_waffle_flag_model # pylint: disable=invalid-django-waffle-import +from waffle import get_waffle_flag_model # pylint: disable=invalid-django-waffle-import from common.djangoapps.student.models import CourseEnrollment from lms.djangoapps.branding.api import get_logo_url_for_email @@ -29,7 +29,6 @@ from .notification_icons import NotificationTypeIcons - User = get_user_model() @@ -370,14 +369,6 @@ def is_name_match(name, param_name): """ return True if param_name is None else name == param_name - def is_editable(app_name, notification_type, channel): - """ - Returns if notification type channel is editable - """ - if notification_type == 'core': - return channel not in COURSE_NOTIFICATION_APPS[app_name]['non_editable'] - return channel not in COURSE_NOTIFICATION_TYPES[notification_type]['non_editable'] - def get_default_cadence_value(app_name, notification_type): """ Returns default email cadence value @@ -417,9 +408,18 @@ def get_updated_preference(pref): for channel in ['web', 'email', 'push']: if not is_name_match(channel, channel_value): continue - if is_editable(app_name, noti_type, channel): + if is_notification_type_channel_editable(app_name, noti_type, channel): type_prefs[channel] = pref_value if channel == 'email' and pref_value and type_prefs.get('email_cadence') == EmailCadence.NEVER: type_prefs['email_cadence'] = get_default_cadence_value(app_name, noti_type) preference.save() notification_preference_unsubscribe_event(user) + + +def is_notification_type_channel_editable(app_name, notification_type, channel): + """ + Returns if notification type channel is editable + """ + if notification_type == 'core': + return channel not in COURSE_NOTIFICATION_APPS[app_name]['non_editable'] + return channel not in COURSE_NOTIFICATION_TYPES[notification_type]['non_editable'] diff --git a/openedx/core/djangoapps/notifications/grouping_notifications.py b/openedx/core/djangoapps/notifications/grouping_notifications.py index 3c4688b5ed53..0e84ea3f109a 100644 --- a/openedx/core/djangoapps/notifications/grouping_notifications.py +++ b/openedx/core/djangoapps/notifications/grouping_notifications.py @@ -132,7 +132,8 @@ def get_user_existing_notifications(user_ids, notification_type, group_by_id, co user__in=user_ids, notification_type=notification_type, group_by_id=group_by_id, - course_id=course_id + course_id=course_id, + last_seen__isnull=True, ) notifications_mapping = {user_id: [] for user_id in user_ids} for notification in notifications: diff --git a/openedx/core/djangoapps/notifications/serializers.py b/openedx/core/djangoapps/notifications/serializers.py index 79c8c4af9d13..80b1577b6355 100644 --- a/openedx/core/djangoapps/notifications/serializers.py +++ b/openedx/core/djangoapps/notifications/serializers.py @@ -1,6 +1,7 @@ """ Serializers for the notifications API. """ + from django.core.exceptions import ValidationError from rest_framework import serializers @@ -9,9 +10,12 @@ from openedx.core.djangoapps.notifications.models import ( CourseNotificationPreference, Notification, - get_notification_channels, get_additional_notification_channel_settings + get_additional_notification_channel_settings, + get_notification_channels ) + from .base_notification import COURSE_NOTIFICATION_APPS, COURSE_NOTIFICATION_TYPES, EmailCadence +from .email.utils import is_notification_type_channel_editable from .utils import remove_preferences_with_no_access @@ -202,3 +206,113 @@ class Meta: 'last_seen', 'created', ) + + +def validate_email_cadence(email_cadence: str) -> str: + """ + Validate email cadence value. + """ + if EmailCadence.get_email_cadence_value(email_cadence) is None: + raise ValidationError(f'{email_cadence} is not a valid email cadence.') + return email_cadence + + +def validate_notification_app(notification_app: str) -> str: + """ + Validate notification app value. + """ + if not COURSE_NOTIFICATION_APPS.get(notification_app): + raise ValidationError(f'{notification_app} is not a valid notification app.') + return notification_app + + +def validate_notification_app_enabled(notification_app: str) -> str: + """ + Validate notification app is enabled. + """ + + if COURSE_NOTIFICATION_APPS.get(notification_app) and COURSE_NOTIFICATION_APPS.get(notification_app)['enabled']: + return notification_app + raise ValidationError(f'{notification_app} is not a valid notification app.') + + +def validate_notification_type(notification_type: str) -> str: + """ + Validate notification type value. + """ + if not COURSE_NOTIFICATION_TYPES.get(notification_type): + raise ValidationError(f'{notification_type} is not a valid notification type.') + return notification_type + + +def validate_notification_channel(notification_channel: str) -> str: + """ + Validate notification channel value. + """ + valid_channels = set(get_notification_channels()) | set(get_additional_notification_channel_settings()) + if notification_channel not in valid_channels: + raise ValidationError(f'{notification_channel} is not a valid notification channel setting.') + return notification_channel + + +class UserNotificationPreferenceUpdateAllSerializer(serializers.Serializer): + """ + Serializer for user notification preferences update with custom field validators. + """ + notification_app = serializers.CharField( + required=True, + validators=[validate_notification_app, validate_notification_app_enabled] + ) + value = serializers.BooleanField(required=False) + notification_type = serializers.CharField( + required=True, + ) + notification_channel = serializers.CharField( + required=False, + validators=[validate_notification_channel] + ) + email_cadence = serializers.CharField( + required=False, + validators=[validate_email_cadence] + ) + + def validate(self, attrs): + """ + Cross-field validation for notification preference update. + """ + notification_app = attrs.get('notification_app') + notification_type = attrs.get('notification_type') + notification_channel = attrs.get('notification_channel') + email_cadence = attrs.get('email_cadence') + + # Validate email_cadence requirements + if email_cadence and not notification_type: + raise ValidationError({ + 'notification_type': 'notification_type is required for email_cadence.' + }) + + # Validate notification_channel requirements + if not email_cadence and notification_type and not notification_channel: + raise ValidationError({ + 'notification_channel': 'notification_channel is required for notification_type.' + }) + + # Validate notification type + if all([not COURSE_NOTIFICATION_TYPES.get(notification_type), notification_type != "core"]): + raise ValidationError(f'{notification_type} is not a valid notification type.') + + # Validate notification type and channel is editable + if notification_channel and notification_type: + if not is_notification_type_channel_editable( + notification_app, + notification_type, + notification_channel + ): + raise ValidationError({ + 'notification_channel': ( + f'{notification_channel} is not editable for notification type ' + f'{notification_type}.' + ) + }) + + return attrs diff --git a/openedx/core/djangoapps/notifications/tasks.py b/openedx/core/djangoapps/notifications/tasks.py index 75ad3f1ecd0b..a1792d27c3c9 100644 --- a/openedx/core/djangoapps/notifications/tasks.py +++ b/openedx/core/djangoapps/notifications/tasks.py @@ -185,9 +185,12 @@ def send_notifications(user_ids, course_key: str, app_name, notification_type, c ) if grouping_enabled and existing_notifications.get(user_id, None): group_user_notifications(new_notification, existing_notifications[user_id]) + if not notifications_generated: + notifications_generated = True + notification_content = new_notification.content else: notifications.append(new_notification) - generated_notification_audience.append(user_id) + generated_notification_audience.append(user_id) # send notification to users but use bulk_create notification_objects = Notification.objects.bulk_create(notifications) diff --git a/openedx/core/djangoapps/notifications/tests/test_notification_grouping.py b/openedx/core/djangoapps/notifications/tests/test_notification_grouping.py index fea954a0eabb..a3aace632c8b 100644 --- a/openedx/core/djangoapps/notifications/tests/test_notification_grouping.py +++ b/openedx/core/djangoapps/notifications/tests/test_notification_grouping.py @@ -2,11 +2,13 @@ Tests for notification grouping module """ +import ddt import unittest from unittest.mock import MagicMock, patch from datetime import datetime from pytz import utc +from common.djangoapps.student.tests.factories import UserFactory from openedx.core.djangoapps.notifications.grouping_notifications import ( BaseNotificationGrouper, NotificationRegistry, @@ -15,6 +17,8 @@ get_user_existing_notifications, NewPostGrouper ) from openedx.core.djangoapps.notifications.models import Notification +from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase +from xmodule.modulestore.tests.factories import CourseFactory class TestNotificationRegistry(unittest.TestCase): @@ -143,7 +147,8 @@ def test_new_post_with_same_user(self): self.assertFalse(updated_context.get('grouped', False)) -class TestGroupUserNotifications(unittest.TestCase): +@ddt.ddt +class TestGroupUserNotifications(ModuleStoreTestCase): """ Tests for the group_user_notifications function """ @@ -179,6 +184,36 @@ def test_group_user_notifications_no_grouper(self): self.assertFalse(old_notification.save.called) + @ddt.data(datetime(2023, 1, 1, tzinfo=utc), None) + def test_not_grouped_when_notification_is_seen(self, last_seen): + """ + Notification is not grouped if the notification is marked as seen + """ + course = CourseFactory() + user = UserFactory() + notification_params = { + 'app_name': 'discussion', + 'notification_type': 'new_discussion_post', + 'course_id': course.id, + 'group_by_id': course.id, + 'content_url': 'http://example.com', + 'user': user, + 'last_seen': last_seen, + } + Notification.objects.create(content_context={ + 'username': 'User1', + 'post_title': ' Post title', + 'replier_name': 'User 1', + + }, **notification_params) + existing_notifications = get_user_existing_notifications( + [user.id], 'new_discussion_post', course.id, course.id + ) + if last_seen is None: + assert existing_notifications[user.id] is not None + else: + assert existing_notifications[user.id] is None + class TestGetUserExistingNotifications(unittest.TestCase): """ diff --git a/openedx/core/djangoapps/notifications/tests/test_utils.py b/openedx/core/djangoapps/notifications/tests/test_utils.py new file mode 100644 index 000000000000..e300c63c1d77 --- /dev/null +++ b/openedx/core/djangoapps/notifications/tests/test_utils.py @@ -0,0 +1,288 @@ +""" +Test cases for the notification utility functions. +""" +import unittest + +from openedx.core.djangoapps.notifications.utils import aggregate_notification_configs + + +class TestAggregateNotificationConfigs(unittest.TestCase): + """ + Test cases for the aggregate_notification_configs function. + """ + + def test_empty_configs_list_returns_default(self): + """ + If the configs list is empty, the default config should be returned. + """ + default_config = [{ + "grading": { + "enabled": False, + "non_editable": {}, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Daily" + } + } + } + }] + + result = aggregate_notification_configs(default_config) + assert result == default_config[0] + + def test_enable_notification_type(self): + """ + If a config enables a notification type, it should be enabled in the result. + """ + + config_list = [ + { + "grading": { + "enabled": False, + "non_editable": {}, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Weekly" + } + } + } + }, + { + "grading": { + "enabled": True, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Weekly" + } + } + } + }] + + result = aggregate_notification_configs(config_list) + assert result["grading"]["enabled"] is True + assert result["grading"]["notification_types"]["core"]["web"] is True + assert result["grading"]["notification_types"]["core"]["push"] is True + assert result["grading"]["notification_types"]["core"]["email"] is True + # Use default email_cadence + assert result["grading"]["notification_types"]["core"]["email_cadence"] == "Weekly" + + def test_merge_core_notification_types(self): + """ + Core notification types should be merged across configs. + """ + + config_list = [ + { + "discussion": { + "enabled": True, + "core_notification_types": ["new_comment"], + "notification_types": {} + } + }, + { + "discussion": { + "core_notification_types": ["new_response", "new_comment"] + } + + }] + + result = aggregate_notification_configs(config_list) + assert set(result["discussion"]["core_notification_types"]) == { + "new_comment", "new_response" + } + + def test_multiple_configs_aggregate(self): + """ + Multiple configs should be aggregated together. + """ + + config_list = [ + { + "updates": { + "enabled": False, + "notification_types": { + "course_updates": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Weekly" + } + } + } + }, + { + "updates": { + "enabled": True, + "notification_types": { + "course_updates": { + "web": True, + "email_cadence": "Weekly" + } + } + } + }, + { + "updates": { + "notification_types": { + "course_updates": { + "push": True, + "email_cadence": "Weekly" + } + } + } + } + ] + + result = aggregate_notification_configs(config_list) + assert result["updates"]["enabled"] is True + assert result["updates"]["notification_types"]["course_updates"]["web"] is True + assert result["updates"]["notification_types"]["course_updates"]["push"] is True + assert result["updates"]["notification_types"]["course_updates"]["email"] is False + # Use default email_cadence + assert result["updates"]["notification_types"]["course_updates"]["email_cadence"] == "Weekly" + + def test_ignore_unknown_notification_types(self): + """ + Unknown notification types should be ignored. + """ + config_list = [ + { + "grading": { + "enabled": False, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Daily" + } + } + } + }, + { + "grading": { + "notification_types": { + "unknown_type": { + "web": True, + "push": True, + "email": True + } + } + } + }] + + result = aggregate_notification_configs(config_list) + assert "unknown_type" not in result["grading"]["notification_types"] + assert result["grading"]["notification_types"]["core"]["web"] is False + + def test_ignore_unknown_categories(self): + """ + Unknown categories should be ignored. + """ + + config_list = [ + { + "grading": { + "enabled": False, + "notification_types": {} + } + }, + { + "unknown_category": { + "enabled": True, + "notification_types": {} + } + }] + + result = aggregate_notification_configs(config_list) + assert "unknown_category" not in result + assert result["grading"]["enabled"] is False + + def test_preserves_default_structure(self): + """ + The resulting config should have the same structure as the default config. + """ + + config_list = [ + { + "discussion": { + "enabled": False, + "non_editable": {"core": ["web"]}, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Weekly" + } + }, + "core_notification_types": [] + } + }, + { + "discussion": { + "enabled": True, + "extra_field": "should_not_appear" + } + } + ] + + result = aggregate_notification_configs(config_list) + assert set(result["discussion"].keys()) == { + "enabled", "non_editable", "notification_types", "core_notification_types" + } + assert "extra_field" not in result["discussion"] + + def test_if_email_cadence_has_diff_set_mix_as_value(self): + """ + If email_cadence is different in the configs, set it to "Mixed". + """ + config_list = [ + { + "grading": { + "enabled": False, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Daily" + } + } + } + }, + { + "grading": { + "enabled": True, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Weekly" + } + } + } + }, + { + "grading": { + "notification_types": { + "core": { + "email_cadence": "Monthly" + } + } + } + } + ] + + result = aggregate_notification_configs(config_list) + assert result["grading"]["notification_types"]["core"]["email_cadence"] == "Mixed" diff --git a/openedx/core/djangoapps/notifications/tests/test_views.py b/openedx/core/djangoapps/notifications/tests/test_views.py index 70e6fbc5739c..3e73c95be5be 100644 --- a/openedx/core/djangoapps/notifications/tests/test_views.py +++ b/openedx/core/djangoapps/notifications/tests/test_views.py @@ -2,11 +2,14 @@ Tests for the views in the notifications app. """ import json +from copy import deepcopy from datetime import datetime, timedelta from unittest import mock +from unittest.mock import patch import ddt from django.conf import settings +from django.contrib.auth import get_user_model from django.test.utils import override_settings from django.urls import reverse from edx_toggles.toggles.testutils import override_waffle_flag @@ -27,19 +30,21 @@ FORUM_ROLE_MODERATOR ) from openedx.core.djangoapps.notifications.config.waffle import ENABLE_NOTIFICATIONS +from openedx.core.djangoapps.notifications.email.utils import encrypt_object, encrypt_string from openedx.core.djangoapps.notifications.models import ( CourseNotificationPreference, Notification, get_course_notification_preference_config_version ) from openedx.core.djangoapps.notifications.serializers import NotificationCourseEnrollmentSerializer -from openedx.core.djangoapps.notifications.email.utils import encrypt_object, encrypt_string from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from ..base_notification import COURSE_NOTIFICATION_APPS, COURSE_NOTIFICATION_TYPES, NotificationAppManager from ..utils import get_notification_types_with_visibility_settings +User = get_user_model() + @ddt.ddt class CourseEnrollmentListViewTest(ModuleStoreTestCase): @@ -903,6 +908,7 @@ class UpdatePreferenceFromEncryptedDataView(ModuleStoreTestCase): """ Tests if preference is updated when encrypted url is hit """ + def setUp(self): """ Setup test case @@ -968,3 +974,310 @@ def remove_notifications_with_visibility_settings(expected_response): notification_type ) return expected_response + + +class UpdateAllNotificationPreferencesViewTests(APITestCase): + """ + Tests for the UpdateAllNotificationPreferencesView. + """ + + def setUp(self): + # Create test user + self.user = User.objects.create_user( + username='testuser', + password='testpass123' + ) + self.client = APIClient() + self.client.force_authenticate(user=self.user) + self.url = reverse('update-all-notification-preferences') + + # Complex notification config structure + self.base_config = { + "grading": { + "enabled": True, + "non_editable": {}, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Daily" + }, + "ora_staff_notification": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Daily" + } + }, + "core_notification_types": [] + }, + "updates": { + "enabled": True, + "non_editable": {}, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Daily" + }, + "course_updates": { + "web": True, + "push": True, + "email": False, + "email_cadence": "Daily" + } + }, + "core_notification_types": [] + }, + "discussion": { + "enabled": True, + "non_editable": { + "core": ["web"] + }, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Daily" + }, + "content_reported": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Daily" + }, + "new_question_post": { + "web": True, + "push": False, + "email": False, + "email_cadence": "Daily" + }, + "new_discussion_post": { + "web": True, + "push": False, + "email": False, + "email_cadence": "Daily" + } + }, + "core_notification_types": [ + "new_comment_on_response", + "new_comment", + "new_response", + "response_on_followed_post", + "comment_on_followed_post", + "response_endorsed_on_thread", + "response_endorsed" + ] + } + } + + # Create test notification preferences + self.preferences = [] + for i in range(3): + pref = CourseNotificationPreference.objects.create( + user=self.user, + course_id=f'course-v1:TestX+Test{i}+2024', + notification_preference_config=deepcopy(self.base_config), + is_active=True + ) + self.preferences.append(pref) + + # Create an inactive preference + self.inactive_pref = CourseNotificationPreference.objects.create( + user=self.user, + course_id='course-v1:TestX+Inactive+2024', + notification_preference_config=deepcopy(self.base_config), + is_active=False + ) + + def test_update_discussion_notification(self): + """ + Test updating discussion notification settings + """ + data = { + 'notification_app': 'discussion', + 'notification_type': 'content_reported', + 'notification_channel': 'push', + 'value': False + } + + response = self.client.post(self.url, data, format='json') + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['status'], 'success') + self.assertEqual(response.data['data']['total_updated'], 3) + + # Verify database updates + for pref in CourseNotificationPreference.objects.filter(is_active=True): + self.assertFalse( + pref.notification_preference_config['discussion']['notification_types']['content_reported']['push'] + ) + + def test_update_non_editable_field(self): + """ + Test attempting to update a non-editable field + """ + data = { + 'notification_app': 'discussion', + 'notification_type': 'core', + 'notification_channel': 'web', + 'value': False + } + + response = self.client.post(self.url, data, format='json') + + # Should fail because 'web' is non-editable for 'core' in discussion + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data['status'], 'error') + + # Verify database remains unchanged + for pref in CourseNotificationPreference.objects.filter(is_active=True): + self.assertTrue( + pref.notification_preference_config['discussion']['notification_types']['core']['web'] + ) + + def test_update_email_cadence(self): + """ + Test updating email cadence setting + """ + data = { + 'notification_app': 'discussion', + 'notification_type': 'content_reported', + 'email_cadence': 'Weekly' + } + + response = self.client.post(self.url, data, format='json') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['status'], 'success') + + # Verify database updates + for pref in CourseNotificationPreference.objects.filter(is_active=True): + notification_type = pref.notification_preference_config['discussion']['notification_types'][ + 'content_reported'] + self.assertEqual( + notification_type['email_cadence'], + 'Weekly' + ) + + @patch.dict('openedx.core.djangoapps.notifications.serializers.COURSE_NOTIFICATION_APPS', { + **COURSE_NOTIFICATION_APPS, + 'grading': { + 'enabled': False, + 'core_info': 'Notifications for submission grading.', + 'core_web': True, + 'core_email': True, + 'core_push': True, + 'core_email_cadence': 'Daily', + 'non_editable': [] + } + }) + def test_update_disabled_app(self): + """ + Test updating notification for a disabled app + """ + # Disable the grading app in all preferences + for pref in self.preferences: + config = pref.notification_preference_config + config['grading']['enabled'] = False + pref.notification_preference_config = config + pref.save() + + data = { + 'notification_app': 'grading', + 'notification_type': 'core', + 'notification_channel': 'email', + 'value': False + } + response = self.client.post(self.url, data, format='json') + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data['status'], 'error') + + def test_invalid_serializer_data(self): + """ + Test handling of invalid input data + """ + test_cases = [ + { + 'notification_app': 'invalid_app', + 'notification_type': 'core', + 'notification_channel': 'push', + 'value': False + }, + { + 'notification_app': 'discussion', + 'notification_type': 'invalid_type', + 'notification_channel': 'push', + 'value': False + }, + { + 'notification_app': 'discussion', + 'notification_type': 'core', + 'notification_channel': 'invalid_channel', + 'value': False + }, + { + 'notification_app': 'discussion', + 'notification_type': 'core', + 'notification_channel': 'email_cadence', + 'value': 'Invalid_Cadence' + } + ] + + for test_case in test_cases: + response = self.client.post(self.url, test_case, format='json') + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + +class GetAggregateNotificationPreferencesTest(APITestCase): + """ + Tests for the GetAggregateNotificationPreferences API view. + """ + + def setUp(self): + # Set up a user and API client + self.user = User.objects.create_user(username='testuser', password='testpass') + self.client = APIClient() + self.client.force_authenticate(user=self.user) + self.url = reverse('notification-preferences-aggregated') # Adjust with the actual name + + def test_no_active_notification_preferences(self): + """ + Test case: No active notification preferences found for the user + """ + response = self.client.get(self.url) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.data['status'], 'error') + self.assertEqual(response.data['message'], 'No active notification preferences found') + + @patch('openedx.core.djangoapps.notifications.views.aggregate_notification_configs') + def test_with_active_notification_preferences(self, mock_aggregate): + """ + Test case: Active notification preferences found for the user + """ + # Mock aggregate_notification_configs for a controlled output + mock_aggregate.return_value = {'mocked': 'data'} + + # Create active notification preferences for the user + CourseNotificationPreference.objects.create( + user=self.user, + is_active=True, + notification_preference_config={'example': 'config'} + ) + + response = self.client.get(self.url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['status'], 'success') + self.assertEqual(response.data['message'], 'Notification preferences retrieved') + self.assertEqual(response.data['data'], {'mocked': 'data'}) + + def test_unauthenticated_user(self): + """ + Test case: Request without authentication + """ + # Test case: Request without authentication + self.client.logout() # Remove authentication + response = self.client.get(self.url) + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) diff --git a/openedx/core/djangoapps/notifications/urls.py b/openedx/core/djangoapps/notifications/urls.py index 7f611bc2c4ca..9892fa72de0d 100644 --- a/openedx/core/djangoapps/notifications/urls.py +++ b/openedx/core/djangoapps/notifications/urls.py @@ -11,13 +11,13 @@ NotificationCountView, NotificationListAPIView, NotificationReadAPIView, + UpdateAllNotificationPreferencesView, UserNotificationPreferenceView, - preference_update_from_encrypted_username_view, + preference_update_from_encrypted_username_view, AggregatedNotificationPreferences ) router = routers.DefaultRouter() - urlpatterns = [ path('enrollments/', CourseEnrollmentListView.as_view(), name='enrollment-list'), re_path( @@ -25,6 +25,11 @@ UserNotificationPreferenceView.as_view(), name='notification-preferences' ), + path( + 'configurations/', + AggregatedNotificationPreferences.as_view(), + name='notification-preferences-aggregated' + ), path('', NotificationListAPIView.as_view(), name='notifications-list'), path('count/', NotificationCountView.as_view(), name='notifications-count'), path( @@ -35,6 +40,11 @@ path('read/', NotificationReadAPIView.as_view(), name='notifications-read'), path('preferences/update///', preference_update_from_encrypted_username_view, name='preference_update_from_encrypted_username_view'), + path( + 'preferences/update-all/', + UpdateAllNotificationPreferencesView.as_view(), + name='update-all-notification-preferences' + ), ] urlpatterns += router.urls diff --git a/openedx/core/djangoapps/notifications/utils.py b/openedx/core/djangoapps/notifications/utils.py index fa948dcf425e..34f9d71cb04b 100644 --- a/openedx/core/djangoapps/notifications/utils.py +++ b/openedx/core/djangoapps/notifications/utils.py @@ -1,11 +1,12 @@ """ Utils function for notifications app """ -from typing import Dict, List +import copy +from typing import Dict, List, Set from common.djangoapps.student.models import CourseAccessRole, CourseEnrollment from openedx.core.djangoapps.django_comment_common.models import Role -from openedx.core.djangoapps.notifications.config.waffle import ENABLE_NOTIFICATIONS, ENABLE_NEW_NOTIFICATION_VIEW +from openedx.core.djangoapps.notifications.config.waffle import ENABLE_NEW_NOTIFICATION_VIEW, ENABLE_NOTIFICATIONS from openedx.core.lib.cache_utils import request_cached @@ -158,3 +159,113 @@ def clean_arguments(kwargs): if kwargs.get('created', {}): clean_kwargs.update(kwargs.get('created')) return clean_kwargs + + +def update_notification_types( + app_config: Dict, + user_app_config: Dict, +) -> None: + """ + Update notification types for a specific category configuration. + """ + if "notification_types" not in user_app_config: + return + + for type_key, type_config in user_app_config["notification_types"].items(): + if type_key not in app_config["notification_types"]: + continue + + update_notification_fields( + app_config["notification_types"][type_key], + type_config, + ) + + +def update_notification_fields( + target_config: Dict, + source_config: Dict, +) -> None: + """ + Update individual notification fields (web, push, email) and email_cadence. + """ + for field in ["web", "push", "email"]: + if field in source_config: + target_config[field] |= source_config[field] + if "email_cadence" in source_config: + if not target_config.get("email_cadence") or isinstance(target_config.get("email_cadence"), str): + target_config["email_cadence"] = set() + + target_config["email_cadence"].add(source_config["email_cadence"]) + + +def update_core_notification_types(app_config: Dict, user_config: Dict) -> None: + """ + Update core notification types by merging existing and new types. + """ + if "core_notification_types" not in user_config: + return + + existing_types: Set = set(app_config.get("core_notification_types", [])) + existing_types.update(user_config["core_notification_types"]) + app_config["core_notification_types"] = list(existing_types) + + +def process_app_config( + app_config: Dict, + user_config: Dict, + app: str, + default_config: Dict, +) -> None: + """ + Process a single category configuration against another config. + """ + if app not in user_config: + return + + user_app_config = user_config[app] + + # Update enabled status + app_config["enabled"] |= user_app_config.get("enabled", False) + + # Update core notification types + update_core_notification_types(app_config, user_app_config) + + # Update notification types + update_notification_types(app_config, user_app_config) + + +def aggregate_notification_configs(existing_user_configs: List[Dict]) -> Dict: + """ + Update default notification config with values from other configs. + Rules: + 1. Start with default config as base + 2. If any value is True in other configs, make it True + 3. Set email_cadence to "Mixed" if different cadences found, else use default + + Args: + existing_user_configs: List of notification config dictionaries to apply + + Returns: + Updated config following the same structure + """ + if not existing_user_configs: + return {} + + result_config = copy.deepcopy(existing_user_configs[0]) + apps = result_config.keys() + + for app in apps: + app_config = result_config[app] + + for user_config in existing_user_configs: + process_app_config(app_config, user_config, app, existing_user_configs[0]) + + # if email_cadence is mixed, set it to "Mixed" + for app in result_config: + for type_key, type_config in result_config[app]["notification_types"].items(): + if len(type_config.get("email_cadence", [])) > 1: + result_config[app]["notification_types"][type_key]["email_cadence"] = "Mixed" + else: + result_config[app]["notification_types"][type_key]["email_cadence"] = ( + result_config[app]["notification_types"][type_key]["email_cadence"].pop()) + return result_config diff --git a/openedx/core/djangoapps/notifications/views.py b/openedx/core/djangoapps/notifications/views.py index e87274088f84..8e41b11554c8 100644 --- a/openedx/core/djangoapps/notifications/views.py +++ b/openedx/core/djangoapps/notifications/views.py @@ -1,9 +1,11 @@ """ Views for the notifications API. """ +import copy from datetime import datetime, timedelta from django.conf import settings +from django.db import transaction from django.db.models import Count from django.shortcuts import get_object_or_404 from django.utils.translation import gettext as _ @@ -17,10 +19,7 @@ from common.djangoapps.student.models import CourseEnrollment from openedx.core.djangoapps.notifications.email.utils import update_user_preferences_from_patch -from openedx.core.djangoapps.notifications.models import ( - CourseNotificationPreference, - get_course_notification_preference_config_version -) +from openedx.core.djangoapps.notifications.models import get_course_notification_preference_config_version from openedx.core.djangoapps.notifications.permissions import allow_any_authenticated_user from .base_notification import COURSE_NOTIFICATION_APPS @@ -32,14 +31,15 @@ notification_tray_opened_event, notifications_app_all_read_event ) -from .models import Notification +from .models import CourseNotificationPreference, Notification from .serializers import ( NotificationCourseEnrollmentSerializer, NotificationSerializer, UserCourseNotificationPreferenceSerializer, - UserNotificationPreferenceUpdateSerializer, + UserNotificationPreferenceUpdateAllSerializer, + UserNotificationPreferenceUpdateSerializer ) -from .utils import get_show_notifications_tray, get_is_new_notification_view_enabled +from .utils import get_is_new_notification_view_enabled, get_show_notifications_tray, aggregate_notification_configs @allow_any_authenticated_user() @@ -444,3 +444,144 @@ def preference_update_from_encrypted_username_view(request, username, patch): """ update_user_preferences_from_patch(username, patch) return Response({"result": "success"}, status=status.HTTP_200_OK) + + +@allow_any_authenticated_user() +class UpdateAllNotificationPreferencesView(APIView): + """ + API view for updating all notification preferences for the current user. + """ + + def post(self, request): + """ + Update all notification preferences for the current user. + """ + # check if request have required params + serializer = UserNotificationPreferenceUpdateAllSerializer(data=request.data) + if not serializer.is_valid(): + return Response({ + 'status': 'error', + 'message': serializer.errors + }, status=status.HTTP_400_BAD_REQUEST) + # check if required config is not editable + try: + with transaction.atomic(): + # Get all active notification preferences for the current user + notification_preferences = ( + CourseNotificationPreference.objects + .select_for_update() + .filter( + user=request.user, + is_active=True + ) + ) + + if not notification_preferences.exists(): + return Response({ + 'status': 'error', + 'message': 'No active notification preferences found' + }, status=status.HTTP_404_NOT_FOUND) + + data = serializer.validated_data + app = data['notification_app'] + email_cadence = data.get('email_cadence', None) + channel = data.get('notification_channel', 'email_cadence' if email_cadence else None) + notification_type = data['notification_type'] + value = data.get('value', email_cadence if email_cadence else None) + + updated_courses = [] + errors = [] + + # Update each preference + for preference in notification_preferences: + try: + # Create a deep copy of the current config + updated_config = copy.deepcopy(preference.notification_preference_config) + + # Check if the path exists and update the value + if ( + updated_config.get(app, {}) + .get('notification_types', {}) + .get(notification_type, {}) + .get(channel) + ) is not None: + + # Update the specific setting in the config + updated_config[app]['notification_types'][notification_type][channel] = value + + # Update the notification preference + preference.notification_preference_config = updated_config + preference.save() + + updated_courses.append({ + 'course_id': str(preference.course_id), + 'current_setting': updated_config[app]['notification_types'][notification_type] + }) + else: + errors.append({ + 'course_id': str(preference.course_id), + 'error': f'Invalid path: {app}.notification_types.{notification_type}.{channel}' + }) + + except Exception as e: + errors.append({ + 'course_id': str(preference.course_id), + 'error': str(e) + }) + + response_data = { + 'status': 'success' if updated_courses else 'partial_success' if errors else 'error', + 'message': 'Notification preferences update completed', + 'data': { + 'updated_value': value, + 'notification_type': notification_type, + 'channel': channel, + 'app': app, + 'successfully_updated_courses': updated_courses, + 'total_updated': len(updated_courses), + 'total_courses': notification_preferences.count() + } + } + + if errors: + response_data['errors'] = errors + + return Response( + response_data, + status=status.HTTP_200_OK if updated_courses else status.HTTP_400_BAD_REQUEST + ) + + except Exception as e: + return Response({ + 'status': 'error', + 'message': str(e) + }, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + +@allow_any_authenticated_user() +class AggregatedNotificationPreferences(APIView): + """ + API view for getting the aggregate notification preferences for the current user. + """ + + def get(self, request): + """ + API view for getting the aggregate notification preferences for the current user. + """ + notification_preferences = CourseNotificationPreference.objects.filter(user=request.user, is_active=True) + + if not notification_preferences.exists(): + return Response({ + 'status': 'error', + 'message': 'No active notification preferences found' + }, status=status.HTTP_404_NOT_FOUND) + notification_configs = notification_preferences.values_list('notification_preference_config', flat=True) + notification_configs = aggregate_notification_configs( + notification_configs + ) + + return Response({ + 'status': 'success', + 'message': 'Notification preferences retrieved', + 'data': notification_configs + }, status=status.HTTP_200_OK) diff --git a/openedx/core/djangoapps/schedules/resolvers.py b/openedx/core/djangoapps/schedules/resolvers.py index a07f9c5de7f5..40d565962b0c 100644 --- a/openedx/core/djangoapps/schedules/resolvers.py +++ b/openedx/core/djangoapps/schedules/resolvers.py @@ -382,6 +382,13 @@ def send(self, msg_type): language, context, ) + LOG.info( + 'Sending email to user: {} for Instructor-paced course with course-key: {} and language: {}'.format( + user.username, + self.course_id, + language + ) + ) with function_trace('enqueue_send_task'): self.async_send_task.apply_async((self.site.id, str(msg)), retry=False) # pylint: disable=no-member @@ -469,6 +476,13 @@ def send(self): # lint-amnesty, pylint: disable=arguments-differ self.course_id ) ) + LOG.info( + 'Sending email to user: {} for Self-paced course with course-key: {} and language: {}'.format( + user.username, + self.course_id, + language + ) + ) with function_trace('enqueue_send_task'): self.async_send_task.apply_async((self.site.id, str(msg)), retry=False) diff --git a/openedx/core/djangoapps/theming/management/commands/compile_sass.py b/openedx/core/djangoapps/theming/management/commands/compile_sass.py deleted file mode 100644 index fbfdd2f222a4..000000000000 --- a/openedx/core/djangoapps/theming/management/commands/compile_sass.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Management command for compiling sass. - -DEPRECATED in favor of `npm run compile-sass`. -""" -import shlex - -from django.core.management import BaseCommand -from django.conf import settings - -from pavelib.assets import run_deprecated_command_wrapper - - -class Command(BaseCommand): - """ - Compile theme sass and collect theme assets. - """ - - help = "DEPRECATED. Use 'npm run compile-sass' instead." - - # NOTE (CCB): This allows us to compile static assets in Docker containers without database access. - requires_system_checks = [] - - def add_arguments(self, parser): - """ - Add arguments for compile_sass command. - - Args: - parser (django.core.management.base.CommandParser): parsed for parsing command line arguments. - """ - parser.add_argument( - 'system', type=str, nargs='*', default=["lms", "cms"], - help="lms or studio", - ) - - # Named (optional) arguments - parser.add_argument( - '--theme-dirs', - dest='theme_dirs', - type=str, - nargs='+', - default=None, - help="List of dirs where given themes would be looked.", - ) - - parser.add_argument( - '--themes', - type=str, - nargs='+', - default=["all"], - help="List of themes whose sass need to compiled. Or 'no'/'all' to compile for no/all themes.", - ) - - # Named (optional) arguments - parser.add_argument( - '--force', - action='store_true', - default=False, - help="DEPRECATED. Full recompilation is now always forced.", - ) - parser.add_argument( - '--debug', - action='store_true', - default=False, - help="Disable Sass compression", - ) - - def handle(self, *args, **options): - """ - Handle compile_sass command. - """ - systems = set( - {"lms": "lms", "cms": "cms", "studio": "cms"}[sys] - for sys in options.get("system", ["lms", "cms"]) - ) - theme_dirs = options.get("theme_dirs") or settings.COMPREHENSIVE_THEME_DIRS or [] - themes_option = options.get("themes") or [] # '[]' means 'all' - if not settings.ENABLE_COMPREHENSIVE_THEMING: - compile_themes = False - themes = [] - elif "no" in themes_option: - compile_themes = False - themes = [] - elif "all" in themes_option: - compile_themes = True - themes = [] - else: - compile_themes = True - themes = themes_option - run_deprecated_command_wrapper( - old_command="./manage.py [lms|cms] compile_sass", - ignored_old_flags=list(set(["force"]) & set(options)), - new_command=shlex.join([ - "npm", - "run", - ("compile-sass-dev" if options.get("debug") else "compile-sass"), - "--", - *(["--skip-lms"] if "lms" not in systems else []), - *(["--skip-cms"] if "cms" not in systems else []), - *(["--skip-themes"] if not compile_themes else []), - *( - arg - for theme_dir in theme_dirs - for arg in ["--theme-dir", str(theme_dir)] - ), - *( - arg - for theme in themes - for arg in ["--theme", theme] - ), - ]), - ) diff --git a/openedx/core/djangoapps/util/management/commands/print_setting.py b/openedx/core/djangoapps/util/management/commands/print_setting.py index d90a17b9eb42..c53f49d23a19 100644 --- a/openedx/core/djangoapps/util/management/commands/print_setting.py +++ b/openedx/core/djangoapps/util/management/commands/print_setting.py @@ -3,7 +3,11 @@ ============= Django command to output a single Django setting. -Useful when paver or a shell script needs such a value. +Originally used by "paver" scripts before we removed them. +Still useful when a shell script needs such a value. +Keep in mind that the LMS/CMS startup time is slow, so if you invoke this +Django management multiple times in a command that gets run often, you are +going to be sad. This handles the one specific use case of the "print_settings" command from django-extensions that we were actually using. diff --git a/openedx/core/djangoapps/xblock/runtime/learning_core_runtime.py b/openedx/core/djangoapps/xblock/runtime/learning_core_runtime.py index dde2084e54cd..44dedcf42874 100644 --- a/openedx/core/djangoapps/xblock/runtime/learning_core_runtime.py +++ b/openedx/core/djangoapps/xblock/runtime/learning_core_runtime.py @@ -6,6 +6,7 @@ import logging from collections import defaultdict from datetime import datetime, timezone +from urllib.parse import unquote from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.db.transaction import atomic @@ -446,9 +447,20 @@ def _lookup_asset_url(self, block: XBlock, asset_path: str) -> str | None: .get(key=f"static/{asset_path}") ) except ObjectDoesNotExist: - # This means we see a path that _looks_ like it should be a static - # asset for this Component, but that static asset doesn't really - # exist. - return None + try: + # Retry with unquoted path. We don't always unquote because it would not + # be backwards-compatible, but we need to try both. + asset_path = unquote(asset_path) + content = ( + component_version + .componentversioncontent_set + .filter(content__has_file=True) + .get(key=f"static/{asset_path}") + ) + except ObjectDoesNotExist: + # This means we see a path that _looks_ like it should be a static + # asset for this Component, but that static asset doesn't really + # exist. + return None return self._absolute_url_for_asset(component_version, asset_path) diff --git a/package.json b/package.json index 2f09f8a7df90..8ed93a322633 100644 --- a/package.json +++ b/package.json @@ -14,24 +14,25 @@ "watch-webpack": "npm run webpack-dev -- --watch", "watch-sass": "scripts/watch_sass.sh", "lint": "python scripts/eslint.py", - "test": "npm run test-cms && npm run test-lms && npm run test-xmodule && npm run test-common && npm run test-jest", - "test-kind-vanilla": "npm run test-cms-vanilla && npm run test-xmodule-vanilla && npm run test-common-vanilla", - "test-kind-require": "npm run test-cms-require && npm run test-common-require", - "test-kind-webpack": "npm run test-cms-webpack && npm run test-lms-webpack && npm run test-xmodule-webpack", - "test-cms": "npm run test-cms-vanilla && npm run test-cms-require", - "test-cms-vanilla": "npm run test-suite -- cms/static/karma_cms.conf.js", - "test-cms-require": "npm run test-suite -- cms/static/karma_cms_squire.conf.js", - "test-cms-webpack": "npm run test-suite -- cms/static/karma_cms_webpack.conf.js", - "test-lms": "echo 'WARNING: Webpack JS tests are disabled. No LMS JS tests will be run. See https://github.com/openedx/edx-platform/issues/35956 for details.'", - "test-lms-webpack": "npm run test-suite -- lms/static/karma_lms.conf.js", - "test-xmodule": "npm run test-xmodule-vanilla", - "test-xmodule-vanilla": "npm run test-suite -- xmodule/js/karma_xmodule.conf.js", - "test-xmodule-webpack": "npm run test-suite -- xmodule/js/karma_xmodule_webpack.conf.js", + "test": "npm run test-jest && npm run test-karma", + "test-jest": "jest", + "test-karma": "npm run test-karma-vanilla && npm run test-karma-require && echo 'WARNING: Skipped broken webpack tests. For details, see: https://github.com/openedx/edx-platform/issues/35956'", + "test-karma-vanilla": "npm run test-cms-vanilla && npm run test-xmodule-vanilla && npm run test-common-vanilla", + "test-karma-require": "npm run test-cms-require && npm run test-common-require", + "test-karma-webpack": "npm run test-cms-webpack && npm run test-lms-webpack && npm run test-xmodule-webpack", + "test-karma-conf": "${NODE_WRAPPER:-xvfb-run --auto-servernum} node --max_old_space_size=4096 node_modules/.bin/karma start --single-run=true --capture-timeout=60000 --browsers=FirefoxNoUpdates", + "test-cms": "npm run test-cms-vanilla && npm run test-cms-require && npm run test-cms-webpack", + "test-cms-vanilla": "npm run test-karma-conf -- cms/static/karma_cms.conf.js", + "test-cms-require": "npm run test-karma-conf -- cms/static/karma_cms_squire.conf.js", + "test-cms-webpack": "npm run test-karma-conf -- cms/static/karma_cms_webpack.conf.js", + "test-lms": "npm run test-jest && npm run test-lms-webpack", + "test-lms-webpack": "npm run test-karma-conf -- lms/static/karma_lms.conf.js", + "test-xmodule": "npm run test-xmodule-vanilla && npm run test-xmodule-webpack", + "test-xmodule-vanilla": "npm run test-karma-conf -- xmodule/js/karma_xmodule.conf.js", + "test-xmodule-webpack": "npm run test-karma-conf -- xmodule/js/karma_xmodule_webpack.conf.js", "test-common": "npm run test-common-vanilla && npm run test-common-require", - "test-common-vanilla": "npm run test-suite -- common/static/karma_common.conf.js", - "test-common-require": "npm run test-suite -- common/static/karma_common_requirejs.conf.js", - "test-suite": "${NODE_WRAPPER:-xvfb-run --auto-servernum} node --max_old_space_size=4096 node_modules/.bin/karma start --single-run=true --capture-timeout=60000 --browsers=FirefoxNoUpdates", - "test-jest": "jest" + "test-common-vanilla": "npm run test-karma-conf -- common/static/karma_common.conf.js", + "test-common-require": "npm run test-karma-conf -- common/static/karma_common_requirejs.conf.js" }, "dependencies": { "@babel/core": "7.26.0", diff --git a/pavelib/__init__.py b/pavelib/__init__.py deleted file mode 100644 index 24f05618bdd7..000000000000 --- a/pavelib/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -""" # lint-amnesty, pylint: disable=django-not-configured -paver commands -""" - - -from . import assets diff --git a/pavelib/assets.py b/pavelib/assets.py deleted file mode 100644 index f437b6427f93..000000000000 --- a/pavelib/assets.py +++ /dev/null @@ -1,506 +0,0 @@ -""" -Asset compilation and collection. - -This entire module is DEPRECATED. In Redwood, it exists just as a collection of temporary compatibility wrappers. -In Sumac, this module will be deleted. To migrate, follow the advice in the printed warnings and/or read the -instructions on the DEPR ticket: https://github.com/openedx/edx-platform/issues/31895 -""" - -import argparse -import glob -import json -import shlex -import traceback -from functools import wraps -from threading import Timer - -from paver import tasks -from paver.easy import call_task, cmdopts, consume_args, needs, no_help, sh, task -from watchdog.events import PatternMatchingEventHandler -from watchdog.observers import Observer # pylint: disable=unused-import # Used by Tutor. Remove after Sumac cut. - -from .utils.cmd import django_cmd -from .utils.envs import Env -from .utils.timer import timed - - -SYSTEMS = { - 'lms': 'lms', - 'cms': 'cms', - 'studio': 'cms', -} - -WARNING_SYMBOLS = "⚠️ " * 50 # A row of 'warning' emoji to catch CLI users' attention - - -def run_deprecated_command_wrapper(*, old_command, ignored_old_flags, new_command): - """ - Run the new version of shell command, plus a warning that the old version is deprecated. - """ - depr_warning = ( - "\n" + - f"{WARNING_SYMBOLS}\n" + - "\n" + - f"WARNING: '{old_command}' is DEPRECATED! It will be removed before Sumac.\n" + - "The command you ran is now just a temporary wrapper around a new,\n" + - "supported command, which you should use instead:\n" + - "\n" + - f"\t{new_command}\n" + - "\n" + - "Details: https://github.com/openedx/edx-platform/issues/31895\n" + - "".join( - f" WARNING: ignored deprecated paver flag '{flag}'\n" - for flag in ignored_old_flags - ) + - f"{WARNING_SYMBOLS}\n" + - "\n" - ) - # Print deprecation warning twice so that it's more likely to be seen in the logs. - print(depr_warning) - sh(new_command) - print(depr_warning) - - -def debounce(seconds=1): - """ - Prevents the decorated function from being called more than every `seconds` - seconds. Waits until calls stop coming in before calling the decorated - function. - - This is DEPRECATED. It exists in Redwood just to ease the transition for Tutor. - """ - def decorator(func): - func.timer = None - - @wraps(func) - def wrapper(*args, **kwargs): - def call(): - func(*args, **kwargs) - func.timer = None - if func.timer: - func.timer.cancel() - func.timer = Timer(seconds, call) - func.timer.start() - - return wrapper - return decorator - - -class SassWatcher(PatternMatchingEventHandler): - """ - Watches for sass file changes - - This is DEPRECATED. It exists in Redwood just to ease the transition for Tutor. - """ - ignore_directories = True - patterns = ['*.scss'] - - def register(self, observer, directories): - """ - register files with observer - Arguments: - observer (watchdog.observers.Observer): sass file observer - directories (list): list of directories to be register for sass watcher. - """ - for dirname in directories: - paths = [] - if '*' in dirname: - paths.extend(glob.glob(dirname)) - else: - paths.append(dirname) - - for obs_dirname in paths: - observer.schedule(self, obs_dirname, recursive=True) - - @debounce() - def on_any_event(self, event): - print('\tCHANGED:', event.src_path) - try: - compile_sass() # pylint: disable=no-value-for-parameter - except Exception: # pylint: disable=broad-except - traceback.print_exc() - - -@task -@no_help -@cmdopts([ - ('system=', 's', 'The system to compile sass for (defaults to all)'), - ('theme-dirs=', '-td', 'Theme dirs containing all themes (defaults to None)'), - ('themes=', '-t', 'The theme to compile sass for (defaults to None)'), - ('debug', 'd', 'Whether to use development settings'), - ('force', '', 'DEPRECATED. Full recompilation is now always forced.'), -]) -@timed -def compile_sass(options): - """ - Compile Sass to CSS. If command is called without any arguments, it will - only compile lms, cms sass for the open source theme. And none of the comprehensive theme's sass would be compiled. - - If you want to compile sass for all comprehensive themes you will have to run compile_sass - specifying all the themes that need to be compiled.. - - The following is a list of some possible ways to use this command. - - Command: - paver compile_sass - Description: - compile sass files for both lms and cms. If command is called like above (i.e. without any arguments) it will - only compile lms, cms sass for the open source theme. None of the theme's sass will be compiled. - - Command: - paver compile_sass --theme-dirs /edx/app/edxapp/edx-platform/themes --themes=red-theme - Description: - compile sass files for both lms and cms for 'red-theme' present in '/edx/app/edxapp/edx-platform/themes' - - Command: - paver compile_sass --theme-dirs=/edx/app/edxapp/edx-platform/themes --themes red-theme stanford-style - Description: - compile sass files for both lms and cms for 'red-theme' and 'stanford-style' present in - '/edx/app/edxapp/edx-platform/themes'. - - Command: - paver compile_sass --system=cms - --theme-dirs /edx/app/edxapp/edx-platform/themes /edx/app/edxapp/edx-platform/common/test/ - --themes red-theme stanford-style test-theme - Description: - compile sass files for cms only for 'red-theme', 'stanford-style' and 'test-theme' present in - '/edx/app/edxapp/edx-platform/themes' and '/edx/app/edxapp/edx-platform/common/test/'. - - This is a DEPRECATED COMPATIBILITY WRAPPER. Use `npm run compile-sass` instead. - """ - systems = [SYSTEMS[sys] for sys in get_parsed_option(options, 'system', ['lms', 'cms'])] # normalize studio->cms - run_deprecated_command_wrapper( - old_command="paver compile_sass", - ignored_old_flags=(set(["--force"]) & set(options)), - new_command=shlex.join([ - "npm", - "run", - ("compile-sass-dev" if options.get("debug") else "compile-sass"), - "--", - *(["--dry"] if tasks.environment.dry_run else []), - *(["--skip-lms"] if "lms" not in systems else []), - *(["--skip-cms"] if "cms" not in systems else []), - *( - arg - for theme_dir in get_parsed_option(options, 'theme_dirs', []) - for arg in ["--theme-dir", str(theme_dir)] - ), - *( - arg - for theme in get_parsed_option(options, "themes", []) - for arg in ["--theme", theme] - ), - ]), - ) - - -def _compile_sass(system, theme, debug, force, _timing_info): - """ - This is a DEPRECATED COMPATIBILITY WRAPPER - - It exists to ease the transition for Tutor in Redwood, which directly imported and used this function. - """ - run_deprecated_command_wrapper( - old_command="pavelib.assets:_compile_sass", - ignored_old_flags=(set(["--force"]) if force else set()), - new_command=[ - "npm", - "run", - ("compile-sass-dev" if debug else "compile-sass"), - "--", - *(["--dry"] if tasks.environment.dry_run else []), - *( - ["--skip-default", "--theme-dir", str(theme.parent), "--theme", str(theme.name)] - if theme - else [] - ), - ("--skip-cms" if system == "lms" else "--skip-lms"), - ] - ) - - -def process_npm_assets(): - """ - Process vendor libraries installed via NPM. - - This is a DEPRECATED COMPATIBILITY WRAPPER. It is now handled as part of `npm clean-install`. - If you need to invoke it explicitly, you can run `npm run postinstall`. - """ - run_deprecated_command_wrapper( - old_command="pavelib.assets:process_npm_assets", - ignored_old_flags=[], - new_command=shlex.join(["npm", "run", "postinstall"]), - ) - - -@task -@no_help -def process_xmodule_assets(): - """ - Process XModule static assets. - - This is a DEPRECATED COMPATIBILITY STUB. Refrences to it should be deleted. - """ - print( - "\n" + - f"{WARNING_SYMBOLS}", - "\n" + - "WARNING: 'paver process_xmodule_assets' is DEPRECATED! It will be removed before Sumac.\n" + - "\n" + - "Starting with Quince, it is no longer necessary to post-process XModule assets, so \n" + - "'paver process_xmodule_assets' is a no-op. Please simply remove it from your build scripts.\n" + - "\n" + - "Details: https://github.com/openedx/edx-platform/issues/31895\n" + - f"{WARNING_SYMBOLS}", - ) - - -def collect_assets(systems, settings, **kwargs): - """ - Collect static assets, including Django pipeline processing. - `systems` is a list of systems (e.g. 'lms' or 'studio' or both) - `settings` is the Django settings module to use. - `**kwargs` include arguments for using a log directory for collectstatic output. Defaults to /dev/null. - - This is a DEPRECATED COMPATIBILITY WRAPPER - - It exists to ease the transition for Tutor in Redwood, which directly imported and used this function. - """ - run_deprecated_command_wrapper( - old_command="pavelib.asset:collect_assets", - ignored_old_flags=[], - new_command=" && ".join( - "( " + - shlex.join( - ["./manage.py", SYSTEMS[sys], f"--settings={settings}", "collectstatic", "--noinput"] - ) + ( - "" - if "collect_log_dir" not in kwargs else - " > /dev/null" - if kwargs["collect_log_dir"] is None else - f"> {kwargs['collect_log_dir']}/{SYSTEMS[sys]}-collectstatic.out" - ) + - " )" - for sys in systems - ), - ) - - -def execute_compile_sass(args): - """ - Construct django management command compile_sass (defined in theming app) and execute it. - Args: - args: command line argument passed via update_assets command - - This is a DEPRECATED COMPATIBILITY WRAPPER. Use `npm run compile-sass` instead. - """ - for sys in args.system: - options = "" - options += " --theme-dirs " + " ".join(args.theme_dirs) if args.theme_dirs else "" - options += " --themes " + " ".join(args.themes) if args.themes else "" - options += " --debug" if args.debug else "" - - sh( - django_cmd( - sys, - args.settings, - "compile_sass {system} {options}".format( - system='cms' if sys == 'studio' else sys, - options=options, - ), - ), - ) - - -@task -@cmdopts([ - ('settings=', 's', "Django settings (defaults to devstack)"), - ('watch', 'w', "DEPRECATED. This flag never did anything anyway."), -]) -@timed -def webpack(options): - """ - Run a Webpack build. - - This is a DEPRECATED COMPATIBILITY WRAPPER. Use `npm run webpack` instead. - """ - settings = getattr(options, 'settings', Env.DEVSTACK_SETTINGS) - result = Env.get_django_settings(['STATIC_ROOT', 'WEBPACK_CONFIG_PATH'], "lms", settings=settings) - static_root_lms, config_path = result - static_root_cms, = Env.get_django_settings(["STATIC_ROOT"], "cms", settings=settings) - js_env_extra_config_setting, = Env.get_django_json_settings(["JS_ENV_EXTRA_CONFIG"], "cms", settings=settings) - js_env_extra_config = json.dumps(js_env_extra_config_setting or "{}") - node_env = "development" if config_path == 'webpack.dev.config.js' else "production" - run_deprecated_command_wrapper( - old_command="paver webpack", - ignored_old_flags=(set(["watch"]) & set(options)), - new_command=' '.join([ - f"WEBPACK_CONFIG_PATH={config_path}", - f"NODE_ENV={node_env}", - f"STATIC_ROOT_LMS={static_root_lms}", - f"STATIC_ROOT_CMS={static_root_cms}", - f"JS_ENV_EXTRA_CONFIG={js_env_extra_config}", - "npm", - "run", - "webpack", - ]), - ) - - -def get_parsed_option(command_opts, opt_key, default=None): - """ - Extract user command option and parse it. - Arguments: - command_opts: Command line arguments passed via paver command. - opt_key: name of option to get and parse - default: if `command_opt_value` not in `command_opts`, `command_opt_value` will be set to default. - Returns: - list or None - """ - command_opt_value = getattr(command_opts, opt_key, default) - if command_opt_value: - command_opt_value = listfy(command_opt_value) - - return command_opt_value - - -def listfy(data): - """ - Check and convert data to list. - Arguments: - data: data structure to be converted. - """ - - if isinstance(data, str): - data = data.split(',') - elif not isinstance(data, list): - data = [data] - - return data - - -@task -@cmdopts([ - ('background', 'b', 'DEPRECATED. Use shell tools like & to run in background if needed.'), - ('settings=', 's', "DEPRECATED. Django is not longer invoked to compile JS/Sass."), - ('theme-dirs=', '-td', 'The themes dir containing all themes (defaults to None)'), - ('themes=', '-t', 'DEPRECATED. All themes in --theme-dirs are now watched.'), - ('wait=', '-w', 'DEPRECATED. Watchdog\'s default wait time is now used.'), -]) -@timed -def watch_assets(options): - """ - Watch for changes to asset files, and regenerate js/css - - This is a DEPRECATED COMPATIBILITY WRAPPER. Use `npm run watch` instead. - """ - # Don't watch assets when performing a dry run - if tasks.environment.dry_run: - return - - theme_dirs = ':'.join(get_parsed_option(options, 'theme_dirs', [])) - run_deprecated_command_wrapper( - old_command="paver watch_assets", - ignored_old_flags=(set(["debug", "themes", "settings", "background"]) & set(options)), - new_command=shlex.join([ - *( - ["env", f"COMPREHENSIVE_THEME_DIRS={theme_dirs}"] - if theme_dirs else [] - ), - "npm", - "run", - "watch", - ]), - ) - - -@task -@needs( - 'pavelib.prereqs.install_node_prereqs', - 'pavelib.prereqs.install_python_prereqs', -) -@consume_args -@timed -def update_assets(args): - """ - Compile Sass, then collect static assets. - - This is a DEPRECATED COMPATIBILITY WRAPPER around other DEPRECATED COMPATIBILITY WRAPPERS. - The aggregate affect of this command can be achieved with this sequence of commands instead: - - * pip install -r requirements/edx/assets.txt # replaces install_python_prereqs - * npm clean-install # replaces install_node_prereqs - * npm run build # replaces execute_compile_sass and webpack - * ./manage.py lms collectstatic --noinput # replaces collect_assets (for LMS) - * ./manage.py cms collectstatic --noinput # replaces collect_assets (for CMS) - """ - parser = argparse.ArgumentParser(prog='paver update_assets') - parser.add_argument( - 'system', type=str, nargs='*', default=["lms", "studio"], - help="lms or studio", - ) - parser.add_argument( - '--settings', type=str, default=Env.DEVSTACK_SETTINGS, - help="Django settings module", - ) - parser.add_argument( - '--debug', action='store_true', default=False, - help="Enable all debugging", - ) - parser.add_argument( - '--debug-collect', action='store_true', default=False, - help="Disable collect static", - ) - parser.add_argument( - '--skip-collect', dest='collect', action='store_false', default=True, - help="Skip collection of static assets", - ) - parser.add_argument( - '--watch', action='store_true', default=False, - help="Watch files for changes", - ) - parser.add_argument( - '--theme-dirs', dest='theme_dirs', type=str, nargs='+', default=None, - help="base directories where themes are placed", - ) - parser.add_argument( - '--themes', type=str, nargs='+', default=None, - help="list of themes to compile sass for. ignored when --watch is used; all themes are watched.", - ) - parser.add_argument( - '--collect-log', dest="collect_log_dir", default=None, - help="When running collectstatic, direct output to specified log directory", - ) - parser.add_argument( - '--wait', type=float, default=0.0, - help="DEPRECATED. Watchdog's default wait time is now used.", - ) - args = parser.parse_args(args) - - # Build Webpack - call_task('pavelib.assets.webpack', options={'settings': args.settings}) - - # Compile sass for themes and system - execute_compile_sass(args) - - if args.collect: - if args.collect_log_dir: - collect_log_args = {"collect_log_dir": args.collect_log_dir} - elif args.debug or args.debug_collect: - collect_log_args = {"collect_log_dir": None} - else: - collect_log_args = {} - - collect_assets(args.system, args.settings, **collect_log_args) - - if args.watch: - call_task( - 'pavelib.assets.watch_assets', - options={ - 'background': not args.debug, - 'settings': args.settings, - 'theme_dirs': args.theme_dirs, - 'themes': args.themes, - 'wait': [float(args.wait)] - }, - ) diff --git a/pavelib/paver_tests/__init__.py b/pavelib/paver_tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/pavelib/paver_tests/pylint_test_list.json b/pavelib/paver_tests/pylint_test_list.json deleted file mode 100644 index d0e8b43aa93d..000000000000 --- a/pavelib/paver_tests/pylint_test_list.json +++ /dev/null @@ -1,6 +0,0 @@ -[ - "foo/bar.py:192: [C0111(missing-docstring), Bliptv] Missing docstring", - "foo/bar/test.py:74: [C0322(no-space-before-operator)] Operator not preceded by a space", - "ugly/string/test.py:16: [C0103(invalid-name)] Invalid name \"whats up\" for type constant (should match (([A-Z_][A-Z0-9_]*)|(__.*__)|log|urlpatterns)$)", - "multiple/lines/test.py:72: [C0322(no-space-before-operator)] Operator not preceded by a space\nFOO_BAR='pipeline.storage.NonPackagingPipelineStorage'\n ^" -] diff --git a/pavelib/paver_tests/test_assets.py b/pavelib/paver_tests/test_assets.py deleted file mode 100644 index f7100a7f03c3..000000000000 --- a/pavelib/paver_tests/test_assets.py +++ /dev/null @@ -1,130 +0,0 @@ -"""Unit tests for the Paver asset tasks.""" - -import json -import os -from pathlib import Path -from unittest import TestCase -from unittest.mock import patch - -import ddt -import paver.easy -from paver import tasks - -import pavelib.assets -from pavelib.assets import Env - - -REPO_ROOT = Path(__file__).parent.parent.parent - -LMS_SETTINGS = { - "WEBPACK_CONFIG_PATH": "webpack.fake.config.js", - "STATIC_ROOT": "/fake/lms/staticfiles", - -} -CMS_SETTINGS = { - "WEBPACK_CONFIG_PATH": "webpack.fake.config", - "STATIC_ROOT": "/fake/cms/staticfiles", - "JS_ENV_EXTRA_CONFIG": json.dumps({"key1": [True, False], "key2": {"key2.1": 1369, "key2.2": "1369"}}), -} - - -def _mock_get_django_settings(django_settings, system, settings=None): # pylint: disable=unused-argument - return [(LMS_SETTINGS if system == "lms" else CMS_SETTINGS)[s] for s in django_settings] - - -@ddt.ddt -@patch.object(Env, 'get_django_settings', _mock_get_django_settings) -@patch.object(Env, 'get_django_json_settings', _mock_get_django_settings) -class TestDeprecatedPaverAssets(TestCase): - """ - Simple test to ensure that the soon-to-be-removed Paver commands are correctly translated into the new npm-run - commands. - """ - def setUp(self): - super().setUp() - self.maxDiff = None - os.environ['NO_PREREQ_INSTALL'] = 'true' - tasks.environment = tasks.Environment() - - def tearDown(self): - super().tearDown() - del os.environ['NO_PREREQ_INSTALL'] - - @ddt.data( - dict( - task_name='pavelib.assets.compile_sass', - args=[], - kwargs={}, - expected=["npm run compile-sass --"], - ), - dict( - task_name='pavelib.assets.compile_sass', - args=[], - kwargs={"system": "lms,studio"}, - expected=["npm run compile-sass --"], - ), - dict( - task_name='pavelib.assets.compile_sass', - args=[], - kwargs={"debug": True}, - expected=["npm run compile-sass-dev --"], - ), - dict( - task_name='pavelib.assets.compile_sass', - args=[], - kwargs={"system": "lms"}, - expected=["npm run compile-sass -- --skip-cms"], - ), - dict( - task_name='pavelib.assets.compile_sass', - args=[], - kwargs={"system": "studio"}, - expected=["npm run compile-sass -- --skip-lms"], - ), - dict( - task_name='pavelib.assets.compile_sass', - args=[], - kwargs={"system": "cms", "theme_dirs": f"{REPO_ROOT}/common/test,{REPO_ROOT}/themes"}, - expected=[ - "npm run compile-sass -- --skip-lms " + - f"--theme-dir {REPO_ROOT}/common/test --theme-dir {REPO_ROOT}/themes" - ], - ), - dict( - task_name='pavelib.assets.compile_sass', - args=[], - kwargs={"theme_dirs": f"{REPO_ROOT}/common/test,{REPO_ROOT}/themes", "themes": "red-theme,test-theme"}, - expected=[ - "npm run compile-sass -- " + - f"--theme-dir {REPO_ROOT}/common/test --theme-dir {REPO_ROOT}/themes " + - "--theme red-theme --theme test-theme" - ], - ), - dict( - task_name='pavelib.assets.update_assets', - args=["lms", "studio", "--settings=fake.settings"], - kwargs={}, - expected=[ - ( - "WEBPACK_CONFIG_PATH=webpack.fake.config.js " + - "NODE_ENV=production " + - "STATIC_ROOT_LMS=/fake/lms/staticfiles " + - "STATIC_ROOT_CMS=/fake/cms/staticfiles " + - 'JS_ENV_EXTRA_CONFIG=' + - '"{\\"key1\\": [true, false], \\"key2\\": {\\"key2.1\\": 1369, \\"key2.2\\": \\"1369\\"}}" ' + - "npm run webpack" - ), - "python manage.py lms --settings=fake.settings compile_sass lms ", - "python manage.py cms --settings=fake.settings compile_sass cms ", - ( - "( ./manage.py lms --settings=fake.settings collectstatic --noinput ) && " + - "( ./manage.py cms --settings=fake.settings collectstatic --noinput )" - ), - ], - ), - ) - @ddt.unpack - @patch.object(pavelib.assets, 'sh') - def test_paver_assets_wrapper_invokes_new_commands(self, mock_sh, task_name, args, kwargs, expected): - paver.easy.call_task(task_name, args=args, options=kwargs) - assert [call_args[0] for (call_args, call_kwargs) in mock_sh.call_args_list] == expected diff --git a/pavelib/paver_tests/utils.py b/pavelib/paver_tests/utils.py deleted file mode 100644 index 1db26cf76a4c..000000000000 --- a/pavelib/paver_tests/utils.py +++ /dev/null @@ -1,97 +0,0 @@ -"""Unit tests for the Paver server tasks.""" - - -import os -from unittest import TestCase -from uuid import uuid4 - -from paver import tasks -from paver.easy import BuildFailure - - -class PaverTestCase(TestCase): - """ - Base class for Paver test cases. - """ - def setUp(self): - super().setUp() - - # Show full length diffs upon test failure - self.maxDiff = None # pylint: disable=invalid-name - - # Create a mock Paver environment - tasks.environment = MockEnvironment() - - # Don't run pre-reqs - os.environ['NO_PREREQ_INSTALL'] = 'true' - - def tearDown(self): - super().tearDown() - tasks.environment = tasks.Environment() - del os.environ['NO_PREREQ_INSTALL'] - - @property - def task_messages(self): - """Returns the messages output by the Paver task.""" - return tasks.environment.messages - - @property - def platform_root(self): - """Returns the current platform's root directory.""" - return os.getcwd() - - def reset_task_messages(self): - """Clear the recorded message""" - tasks.environment.messages = [] - - -class MockEnvironment(tasks.Environment): - """ - Mock environment that collects information about Paver commands. - """ - def __init__(self): - super().__init__() - self.dry_run = True - self.messages = [] - - def info(self, message, *args): - """Capture any messages that have been recorded""" - if args: - output = message % args - else: - output = message - if not output.startswith("--->"): - self.messages.append(str(output)) - - -def fail_on_eslint(*args, **kwargs): - """ - For our tests, we need the call for diff-quality running eslint reports - to fail, since that is what is going to fail when we pass in a - percentage ("p") requirement. - """ - if "eslint" in args[0]: # lint-amnesty, pylint: disable=no-else-raise - raise BuildFailure('Subprocess return code: 1') - else: - if kwargs.get('capture', False): - return uuid4().hex - else: - return - - -def fail_on_npm_install(): - """ - Used to simulate an error when executing "npm install" - """ - return 1 - - -def unexpected_fail_on_npm_install(*args, **kwargs): # pylint: disable=unused-argument - """ - For our tests, we need the call for diff-quality running pycodestyle reports to fail, since that is what - is going to fail when we pass in a percentage ("p") requirement. - """ - if ["npm", "install", "--verbose"] == args[0]: # lint-amnesty, pylint: disable=no-else-raise - raise BuildFailure('Subprocess return code: 50') - else: - return diff --git a/pavelib/prereqs.py b/pavelib/prereqs.py deleted file mode 100644 index 4453176c94da..000000000000 --- a/pavelib/prereqs.py +++ /dev/null @@ -1,351 +0,0 @@ -""" -Install Python and Node prerequisites. -""" - - -import hashlib -import os -import re -import subprocess -import sys -from distutils import sysconfig # pylint: disable=deprecated-module - -from paver.easy import sh, task # lint-amnesty, pylint: disable=unused-import - -from .utils.envs import Env -from .utils.timer import timed - -PREREQS_STATE_DIR = os.getenv('PREREQ_CACHE_DIR', Env.REPO_ROOT / '.prereqs_cache') -NO_PREREQ_MESSAGE = "NO_PREREQ_INSTALL is set, not installing prereqs" -NO_PYTHON_UNINSTALL_MESSAGE = 'NO_PYTHON_UNINSTALL is set. No attempts will be made to uninstall old Python libs.' -COVERAGE_REQ_FILE = 'requirements/edx/coverage.txt' - -# If you make any changes to this list you also need to make -# a corresponding change to circle.yml, which is how the python -# prerequisites are installed for builds on circleci.com -toxenv = os.environ.get('TOXENV') -if toxenv and toxenv != 'quality': - PYTHON_REQ_FILES = ['requirements/edx/testing.txt'] -else: - PYTHON_REQ_FILES = ['requirements/edx/development.txt'] - -# Developers can have private requirements, for local copies of github repos, -# or favorite debugging tools, etc. -PRIVATE_REQS = 'requirements/edx/private.txt' -if os.path.exists(PRIVATE_REQS): - PYTHON_REQ_FILES.append(PRIVATE_REQS) - - -def str2bool(s): - s = str(s) - return s.lower() in ('yes', 'true', 't', '1') - - -def no_prereq_install(): - """ - Determine if NO_PREREQ_INSTALL should be truthy or falsy. - """ - return str2bool(os.environ.get('NO_PREREQ_INSTALL', 'False')) - - -def no_python_uninstall(): - """ Determine if we should run the uninstall_python_packages task. """ - return str2bool(os.environ.get('NO_PYTHON_UNINSTALL', 'False')) - - -def create_prereqs_cache_dir(): - """Create the directory for storing the hashes, if it doesn't exist already.""" - try: - os.makedirs(PREREQS_STATE_DIR) - except OSError: - if not os.path.isdir(PREREQS_STATE_DIR): - raise - - -def compute_fingerprint(path_list): - """ - Hash the contents of all the files and directories in `path_list`. - Returns the hex digest. - """ - - hasher = hashlib.sha1() - - for path_item in path_list: - - # For directories, create a hash based on the modification times - # of first-level subdirectories - if os.path.isdir(path_item): - for dirname in sorted(os.listdir(path_item)): - path_name = os.path.join(path_item, dirname) - if os.path.isdir(path_name): - hasher.update(str(os.stat(path_name).st_mtime).encode('utf-8')) - - # For files, hash the contents of the file - if os.path.isfile(path_item): - with open(path_item, "rb") as file_handle: - hasher.update(file_handle.read()) - - return hasher.hexdigest() - - -def prereq_cache(cache_name, paths, install_func): - """ - Conditionally execute `install_func()` only if the files/directories - specified by `paths` have changed. - - If the code executes successfully (no exceptions are thrown), the cache - is updated with the new hash. - """ - # Retrieve the old hash - cache_filename = cache_name.replace(" ", "_") - cache_file_path = os.path.join(PREREQS_STATE_DIR, f"{cache_filename}.sha1") - old_hash = None - if os.path.isfile(cache_file_path): - with open(cache_file_path) as cache_file: - old_hash = cache_file.read() - - # Compare the old hash to the new hash - # If they do not match (either the cache hasn't been created, or the files have changed), - # then execute the code within the block. - new_hash = compute_fingerprint(paths) - if new_hash != old_hash: - install_func() - - # Update the cache with the new hash - # If the code executed within the context fails (throws an exception), - # then this step won't get executed. - create_prereqs_cache_dir() - with open(cache_file_path, "wb") as cache_file: - # Since the pip requirement files are modified during the install - # process, we need to store the hash generated AFTER the installation - post_install_hash = compute_fingerprint(paths) - cache_file.write(post_install_hash.encode('utf-8')) - else: - print(f'{cache_name} unchanged, skipping...') - - -def node_prereqs_installation(): - """ - Configures npm and installs Node prerequisites - """ - # Before July 2023, these directories were created and written to - # as root. Afterwards, they are created as being owned by the - # `app` user -- but also need to be deleted by that user (due to - # how npm runs post-install scripts.) Developers with an older - # devstack installation who are reprovisioning will see errors - # here if the files are still owned by root. Deleting the files in - # advance prevents this error. - # - # This hack should probably be left in place for at least a year. - # See ADR 17 for more background on the transition. - sh("rm -rf common/static/common/js/vendor/ common/static/common/css/vendor/") - # At the time of this writing, the js dir has git-versioned files - # but the css dir does not, so the latter would have been created - # as root-owned (in the process of creating the vendor - # subdirectory). Delete it only if empty, just in case - # git-versioned files are added later. - sh("rmdir common/static/common/css || true") - - # NPM installs hang sporadically. Log the installation process so that we - # determine if any packages are chronic offenders. - npm_log_file_path = f'{Env.GEN_LOG_DIR}/npm-install.log' - npm_log_file = open(npm_log_file_path, 'wb') # lint-amnesty, pylint: disable=consider-using-with - npm_command = 'npm ci --verbose'.split() - - # The implementation of Paver's `sh` function returns before the forked - # actually returns. Using a Popen object so that we can ensure that - # the forked process has returned - proc = subprocess.Popen(npm_command, stderr=npm_log_file) # lint-amnesty, pylint: disable=consider-using-with - retcode = proc.wait() - if retcode == 1: - raise Exception(f"npm install failed: See {npm_log_file_path}") - print("Successfully clean-installed NPM packages. Log found at {}".format( - npm_log_file_path - )) - - -def python_prereqs_installation(): - """ - Installs Python prerequisites - """ - # edx-platform installs some Python projects from within the edx-platform repo itself. - sh("pip install -e .") - for req_file in PYTHON_REQ_FILES: - pip_install_req_file(req_file) - - -def pip_install_req_file(req_file): - """Pip install the requirements file.""" - pip_cmd = 'pip install -q --disable-pip-version-check --exists-action w' - sh(f"{pip_cmd} -r {req_file}") - - -@task -@timed -def install_node_prereqs(): - """ - Installs Node prerequisites - """ - if no_prereq_install(): - print(NO_PREREQ_MESSAGE) - return - - prereq_cache("Node prereqs", ["package.json", "package-lock.json"], node_prereqs_installation) - - -# To add a package to the uninstall list, just add it to this list! No need -# to touch any other part of this file. -PACKAGES_TO_UNINSTALL = [ - "MySQL-python", # Because mysqlclient shares the same directory name - "South", # Because it interferes with Django 1.8 migrations. - "edxval", # Because it was bork-installed somehow. - "django-storages", - "django-oauth2-provider", # Because now it's called edx-django-oauth2-provider. - "edx-oauth2-provider", # Because it moved from github to pypi - "enum34", # Because enum34 is not needed in python>3.4 - "i18n-tools", # Because now it's called edx-i18n-tools - "moto", # Because we no longer use it and it conflicts with recent jsondiff versions - "python-saml", # Because python3-saml shares the same directory name - "pytest-faulthandler", # Because it was bundled into pytest - "djangorestframework-jwt", # Because now its called drf-jwt. -] - - -@task -@timed -def uninstall_python_packages(): - """ - Uninstall Python packages that need explicit uninstallation. - - Some Python packages that we no longer want need to be explicitly - uninstalled, notably, South. Some other packages were once installed in - ways that were resistant to being upgraded, like edxval. Also uninstall - them. - """ - - if no_python_uninstall(): - print(NO_PYTHON_UNINSTALL_MESSAGE) - return - - # So that we don't constantly uninstall things, use a hash of the packages - # to be uninstalled. Check it, and skip this if we're up to date. - hasher = hashlib.sha1() - hasher.update(repr(PACKAGES_TO_UNINSTALL).encode('utf-8')) - expected_version = hasher.hexdigest() - state_file_path = os.path.join(PREREQS_STATE_DIR, "Python_uninstall.sha1") - create_prereqs_cache_dir() - - if os.path.isfile(state_file_path): - with open(state_file_path) as state_file: - version = state_file.read() - if version == expected_version: - print('Python uninstalls unchanged, skipping...') - return - - # Run pip to find the packages we need to get rid of. Believe it or not, - # edx-val is installed in a way that it is present twice, so we have a loop - # to really really get rid of it. - for _ in range(3): - uninstalled = False - frozen = sh("pip freeze", capture=True) - - for package_name in PACKAGES_TO_UNINSTALL: - if package_in_frozen(package_name, frozen): - # Uninstall the pacakge - sh(f"pip uninstall --disable-pip-version-check -y {package_name}") - uninstalled = True - if not uninstalled: - break - else: - # We tried three times and didn't manage to get rid of the pests. - print("Couldn't uninstall unwanted Python packages!") - return - - # Write our version. - with open(state_file_path, "wb") as state_file: - state_file.write(expected_version.encode('utf-8')) - - -def package_in_frozen(package_name, frozen_output): - """Is this package in the output of 'pip freeze'?""" - # Look for either: - # - # PACKAGE-NAME== - # - # or: - # - # blah_blah#egg=package_name-version - # - pattern = r"(?mi)^{pkg}==|#egg={pkg_under}-".format( - pkg=re.escape(package_name), - pkg_under=re.escape(package_name.replace("-", "_")), - ) - return bool(re.search(pattern, frozen_output)) - - -@task -@timed -def install_coverage_prereqs(): - """ Install python prereqs for measuring coverage. """ - if no_prereq_install(): - print(NO_PREREQ_MESSAGE) - return - pip_install_req_file(COVERAGE_REQ_FILE) - - -@task -@timed -def install_python_prereqs(): - """ - Installs Python prerequisites. - """ - if no_prereq_install(): - print(NO_PREREQ_MESSAGE) - return - - uninstall_python_packages() - - # Include all of the requirements files in the fingerprint. - files_to_fingerprint = list(PYTHON_REQ_FILES) - - # Also fingerprint the directories where packages get installed: - # ("/edx/app/edxapp/venvs/edxapp/lib/python2.7/site-packages") - files_to_fingerprint.append(sysconfig.get_python_lib()) - - # In a virtualenv, "-e installs" get put in a src directory. - if Env.PIP_SRC: - src_dir = Env.PIP_SRC - else: - src_dir = os.path.join(sys.prefix, "src") - if os.path.isdir(src_dir): - files_to_fingerprint.append(src_dir) - - # Also fingerprint this source file, so that if the logic for installations - # changes, we will redo the installation. - this_file = __file__ - if this_file.endswith(".pyc"): - this_file = this_file[:-1] # use the .py file instead of the .pyc - files_to_fingerprint.append(this_file) - - prereq_cache("Python prereqs", files_to_fingerprint, python_prereqs_installation) - - -@task -@timed -def install_prereqs(): - """ - Installs Node and Python prerequisites - """ - if no_prereq_install(): - print(NO_PREREQ_MESSAGE) - return - - if not str2bool(os.environ.get('SKIP_NPM_INSTALL', 'False')): - install_node_prereqs() - install_python_prereqs() - log_installed_python_prereqs() - - -def log_installed_python_prereqs(): - """ Logs output of pip freeze for debugging. """ - sh("pip freeze > {}".format(Env.GEN_LOG_DIR + "/pip_freeze.log")) diff --git a/pavelib/utils/__init__.py b/pavelib/utils/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/pavelib/utils/cmd.py b/pavelib/utils/cmd.py deleted file mode 100644 index a350c90a6a96..000000000000 --- a/pavelib/utils/cmd.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Helper functions for constructing shell commands. -""" - - -def cmd(*args): - """ - Concatenate the arguments into a space-separated shell command. - """ - return " ".join(str(arg) for arg in args if arg) - - -def django_cmd(sys, settings, *args): - """ - Construct a Django management command. - - `sys` is either 'lms' or 'studio'. - `settings` is the Django settings module (such as "dev" or "test") - `args` are concatenated to form the rest of the command. - """ - # Maintain backwards compatibility with manage.py, - # which calls "studio" "cms" - sys = 'cms' if sys == 'studio' else sys - return cmd("python manage.py", sys, f"--settings={settings}", *args) diff --git a/pavelib/utils/envs.py b/pavelib/utils/envs.py deleted file mode 100644 index 7dc9870fbdea..000000000000 --- a/pavelib/utils/envs.py +++ /dev/null @@ -1,271 +0,0 @@ -""" -Helper functions for loading environment settings. -""" -import configparser -import json -import os -import sys -from time import sleep - -from lazy import lazy -from path import Path as path -from paver.easy import BuildFailure, sh - -from pavelib.utils.cmd import django_cmd - - -def repo_root(): - """ - Get the root of the git repository (edx-platform). - - This sometimes fails on Docker Devstack, so it's been broken - down with some additional error handling. It usually starts - working within 30 seconds or so; for more details, see - https://openedx.atlassian.net/browse/PLAT-1629 and - https://github.com/docker/for-mac/issues/1509 - """ - file_path = path(__file__) - attempt = 1 - while True: - try: - absolute_path = file_path.abspath() - break - except OSError: - print(f'Attempt {attempt}/180 to get an absolute path failed') - if attempt < 180: - attempt += 1 - sleep(1) - else: - print('Unable to determine the absolute path of the edx-platform repo, aborting') - raise - return absolute_path.parent.parent.parent - - -class Env: - """ - Load information about the execution environment. - """ - - # Root of the git repository (edx-platform) - REPO_ROOT = repo_root() - - # Reports Directory - REPORT_DIR = REPO_ROOT / 'reports' - METRICS_DIR = REPORT_DIR / 'metrics' - QUALITY_DIR = REPORT_DIR / 'quality_junitxml' - - # Generic log dir - GEN_LOG_DIR = REPO_ROOT / "test_root" / "log" - - # Python unittest dirs - PYTHON_COVERAGERC = REPO_ROOT / ".coveragerc" - - # Which Python version should be used in xdist workers? - PYTHON_VERSION = os.environ.get("PYTHON_VERSION", "2.7") - - # Directory that videos are served from - VIDEO_SOURCE_DIR = REPO_ROOT / "test_root" / "data" / "video" - - PRINT_SETTINGS_LOG_FILE = GEN_LOG_DIR / "print_settings.log" - - # Detect if in a Docker container, and if so which one - FRONTEND_TEST_SERVER_HOST = os.environ.get('FRONTEND_TEST_SERVER_HOSTNAME', '0.0.0.0') - USING_DOCKER = FRONTEND_TEST_SERVER_HOST != '0.0.0.0' - DEVSTACK_SETTINGS = 'devstack_docker' if USING_DOCKER else 'devstack' - TEST_SETTINGS = 'test' - - # Mongo databases that will be dropped before/after the tests run - MONGO_HOST = 'localhost' - - # Test Ids Directory - TEST_DIR = REPO_ROOT / ".testids" - - # Configured browser to use for the js test suites - SELENIUM_BROWSER = os.environ.get('SELENIUM_BROWSER', 'firefox') - if USING_DOCKER: - KARMA_BROWSER = 'ChromeDocker' if SELENIUM_BROWSER == 'chrome' else 'FirefoxDocker' - else: - KARMA_BROWSER = 'FirefoxNoUpdates' - - # Files used to run each of the js test suites - # TODO: We have [temporarily disabled] the three Webpack-based tests suites. They have been silently - # broken for a long time; after noticing they were broken, we added the DieHardPlugin to - # webpack.common.config.js to prevent future silent breakage, but have not yet been able to - # fix and re-enable the suites. Note that the LMS suite is all Webpack-based even though it's - # not in the name. - # Issue: https://github.com/openedx/edx-platform/issues/35956 - KARMA_CONFIG_FILES = [ - REPO_ROOT / 'cms/static/karma_cms.conf.js', - REPO_ROOT / 'cms/static/karma_cms_squire.conf.js', - ## [temporarily disabled] REPO_ROOT / 'cms/static/karma_cms_webpack.conf.js', - ## [temporarily disabled] REPO_ROOT / 'lms/static/karma_lms.conf.js', - REPO_ROOT / 'xmodule/js/karma_xmodule.conf.js', - ## [temporarily disabled] REPO_ROOT / 'xmodule/js/karma_xmodule_webpack.conf.js', - REPO_ROOT / 'common/static/karma_common.conf.js', - REPO_ROOT / 'common/static/karma_common_requirejs.conf.js', - ] - - JS_TEST_ID_KEYS = [ - 'cms', - 'cms-squire', - ## [temporarily-disabled] 'cms-webpack', - ## [temporarily-disabled] 'lms', - 'xmodule', - ## [temporarily-disabled] 'xmodule-webpack', - 'common', - 'common-requirejs', - 'jest-snapshot' - ] - - JS_REPORT_DIR = REPORT_DIR / 'javascript' - - # Directories used for pavelib/ tests - IGNORED_TEST_DIRS = ('__pycache__', '.cache', '.pytest_cache') - LIB_TEST_DIRS = [path("pavelib/paver_tests"), path("scripts/xsslint/tests")] - - # Directory for i18n test reports - I18N_REPORT_DIR = REPORT_DIR / 'i18n' - - # Directory for keeping src folder that comes with pip installation. - # Setting this is equivalent to passing `--src ` to pip directly. - PIP_SRC = os.environ.get("PIP_SRC") - - # Service variant (lms, cms, etc.) configured with an environment variable - # We use this to determine which envs.json file to load. - SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None) - - # If service variant not configured in env, then pass the correct - # environment for lms / cms - if not SERVICE_VARIANT: # this will intentionally catch ""; - if any(i in sys.argv[1:] for i in ('cms', 'studio')): - SERVICE_VARIANT = 'cms' - else: - SERVICE_VARIANT = 'lms' - - @classmethod - def get_django_settings(cls, django_settings, system, settings=None, print_setting_args=None): - """ - Interrogate Django environment for specific settings values - :param django_settings: list of django settings values to get - :param system: the django app to use when asking for the setting (lms | cms) - :param settings: the settings file to use when asking for the value - :param print_setting_args: the additional arguments to send to print_settings - :return: unicode value of the django setting - """ - if not settings: - settings = os.environ.get("EDX_PLATFORM_SETTINGS", "aws") - log_dir = os.path.dirname(cls.PRINT_SETTINGS_LOG_FILE) - if not os.path.exists(log_dir): - os.makedirs(log_dir) - settings_length = len(django_settings) - django_settings = ' '.join(django_settings) # parse_known_args makes a list again - print_setting_args = ' '.join(print_setting_args or []) - try: - value = sh( - django_cmd( - system, - settings, - "print_setting {django_settings} 2>{log_file} {print_setting_args}".format( - django_settings=django_settings, - print_setting_args=print_setting_args, - log_file=cls.PRINT_SETTINGS_LOG_FILE - ).strip() - ), - capture=True - ) - # else for cases where values are not found & sh returns one None value - return tuple(str(value).splitlines()) if value else tuple(None for _ in range(settings_length)) - except BuildFailure: - print(f"Unable to print the value of the {django_settings} setting:") - with open(cls.PRINT_SETTINGS_LOG_FILE) as f: - print(f.read()) - sys.exit(1) - - @classmethod - def get_django_json_settings(cls, django_settings, system, settings=None): - """ - Interrogate Django environment for specific settings value - :param django_settings: list of django settings values to get - :param system: the django app to use when asking for the setting (lms | cms) - :param settings: the settings file to use when asking for the value - :return: json string value of the django setting - """ - return cls.get_django_settings( - django_settings, - system, - settings=settings, - print_setting_args=["--json"], - ) - - @classmethod - def covered_modules(cls): - """ - List the source modules listed in .coveragerc for which coverage - will be measured. - """ - coveragerc = configparser.RawConfigParser() - coveragerc.read(cls.PYTHON_COVERAGERC) - modules = coveragerc.get('run', 'source') - result = [] - for module in modules.split('\n'): - module = module.strip() - if module: - result.append(module) - return result - - @lazy - def env_tokens(self): - """ - Return a dict of environment settings. - If we couldn't find the JSON file, issue a warning and return an empty dict. - """ - - # Find the env JSON file - if self.SERVICE_VARIANT: - env_path = self.REPO_ROOT.parent / f"{self.SERVICE_VARIANT}.env.json" - else: - env_path = path("env.json").abspath() - - # If the file does not exist, here or one level up, - # issue a warning and return an empty dict - if not env_path.isfile(): - env_path = env_path.parent.parent / env_path.basename() - if not env_path.isfile(): - print( - "Warning: could not find environment JSON file " - "at '{path}'".format(path=env_path), - file=sys.stderr, - ) - return {} - - # Otherwise, load the file as JSON and return the resulting dict - try: - with open(env_path) as env_file: - return json.load(env_file) - - except ValueError: - print( - "Error: Could not parse JSON " - "in {path}".format(path=env_path), - file=sys.stderr, - ) - sys.exit(1) - - @lazy - def feature_flags(self): - """ - Return a dictionary of feature flags configured by the environment. - """ - return self.env_tokens.get('FEATURES', {}) - - @classmethod - def rsync_dirs(cls): - """ - List the directories that should be synced during pytest-xdist - execution. Needs to include all modules for which coverage is - measured, not just the tests being run. - """ - result = set() - for module in cls.covered_modules(): - result.add(module.split('/')[0]) - return result diff --git a/pavelib/utils/process.py b/pavelib/utils/process.py deleted file mode 100644 index da2dafa8803d..000000000000 --- a/pavelib/utils/process.py +++ /dev/null @@ -1,121 +0,0 @@ -""" -Helper functions for managing processes. -""" - - -import atexit -import os -import signal -import subprocess -import sys - -import psutil -from paver import tasks - - -def kill_process(proc): - """ - Kill the process `proc` created with `subprocess`. - """ - p1_group = psutil.Process(proc.pid) - child_pids = p1_group.children(recursive=True) - - for child_pid in child_pids: - os.kill(child_pid.pid, signal.SIGKILL) - - -def run_multi_processes(cmd_list, out_log=None, err_log=None): - """ - Run each shell command in `cmd_list` in a separate process, - piping stdout to `out_log` (a path) and stderr to `err_log` (also a path). - - Terminates the processes on CTRL-C and ensures the processes are killed - if an error occurs. - """ - kwargs = {'shell': True, 'cwd': None} - pids = [] - - if out_log: - out_log_file = open(out_log, 'w') # lint-amnesty, pylint: disable=consider-using-with - kwargs['stdout'] = out_log_file - - if err_log: - err_log_file = open(err_log, 'w') # lint-amnesty, pylint: disable=consider-using-with - kwargs['stderr'] = err_log_file - - # If the user is performing a dry run of a task, then just log - # the command strings and return so that no destructive operations - # are performed. - if tasks.environment.dry_run: - for cmd in cmd_list: - tasks.environment.info(cmd) - return - - try: - for cmd in cmd_list: - pids.extend([subprocess.Popen(cmd, **kwargs)]) - - # pylint: disable=unused-argument - def _signal_handler(*args): - """ - What to do when process is ended - """ - print("\nEnding...") - - signal.signal(signal.SIGINT, _signal_handler) - print("Enter CTL-C to end") - signal.pause() - print("Processes ending") - - # pylint: disable=broad-except - except Exception as err: - print(f"Error running process {err}", file=sys.stderr) - - finally: - for pid in pids: - kill_process(pid) - - -def run_process(cmd, out_log=None, err_log=None): - """ - Run the shell command `cmd` in a separate process, - piping stdout to `out_log` (a path) and stderr to `err_log` (also a path). - - Terminates the process on CTRL-C or if an error occurs. - """ - return run_multi_processes([cmd], out_log=out_log, err_log=err_log) - - -def run_background_process(cmd, out_log=None, err_log=None, cwd=None): - """ - Runs a command as a background process. Sends SIGINT at exit. - """ - - kwargs = {'shell': True, 'cwd': cwd} - if out_log: - out_log_file = open(out_log, 'w') # lint-amnesty, pylint: disable=consider-using-with - kwargs['stdout'] = out_log_file - - if err_log: - err_log_file = open(err_log, 'w') # lint-amnesty, pylint: disable=consider-using-with - kwargs['stderr'] = err_log_file - - proc = subprocess.Popen(cmd, **kwargs) # lint-amnesty, pylint: disable=consider-using-with - - def exit_handler(): - """ - Send SIGINT to the process's children. This is important - for running commands under coverage, as coverage will not - produce the correct artifacts if the child process isn't - killed properly. - """ - p1_group = psutil.Process(proc.pid) - child_pids = p1_group.children(recursive=True) - - for child_pid in child_pids: - os.kill(child_pid.pid, signal.SIGINT) - - # Wait for process to actually finish - proc.wait() - - atexit.register(exit_handler) diff --git a/pavelib/utils/test/__init__.py b/pavelib/utils/test/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/pavelib/utils/timer.py b/pavelib/utils/timer.py deleted file mode 100644 index fc6f3003736a..000000000000 --- a/pavelib/utils/timer.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -Tools for timing paver tasks -""" - - -import json -import logging -import os -import sys -import traceback -from datetime import datetime -from os.path import dirname, exists - -import wrapt - -LOGGER = logging.getLogger(__file__) -PAVER_TIMER_LOG = os.environ.get('PAVER_TIMER_LOG') - - -@wrapt.decorator -def timed(wrapped, instance, args, kwargs): # pylint: disable=unused-argument - """ - Log execution time for a function to a log file. - - Logging is only actually executed if the PAVER_TIMER_LOG environment variable - is set. That variable is expanded for the current user and current - environment variables. It also can have :meth:`~Datetime.strftime` format - identifiers which are substituted using the time when the task started. - - For example, ``PAVER_TIMER_LOG='~/.paver.logs/%Y-%d-%m.log'`` will create a new - log file every day containing reconds for paver tasks run that day, and - will put those log files in the ``.paver.logs`` directory inside the users - home. - - Must be earlier in the decorator stack than the paver task declaration. - """ - start = datetime.utcnow() - exception_info = {} - try: - return wrapped(*args, **kwargs) - except Exception as exc: - exception_info = { - 'exception': "".join(traceback.format_exception_only(type(exc), exc)).strip() - } - raise - finally: - end = datetime.utcnow() - - # N.B. This is intended to provide a consistent interface and message format - # across all of Open edX tooling, so it deliberately eschews standard - # python logging infrastructure. - if PAVER_TIMER_LOG is not None: - - log_path = start.strftime(PAVER_TIMER_LOG) - - log_message = { - 'python_version': sys.version, - 'task': f"{wrapped.__module__}.{wrapped.__name__}", - 'args': [repr(arg) for arg in args], - 'kwargs': {key: repr(value) for key, value in kwargs.items()}, - 'started_at': start.isoformat(' '), - 'ended_at': end.isoformat(' '), - 'duration': (end - start).total_seconds(), - } - log_message.update(exception_info) - - try: - log_dir = dirname(log_path) - if log_dir and not exists(log_dir): - os.makedirs(log_dir) - - with open(log_path, 'a') as outfile: - json.dump( - log_message, - outfile, - separators=(',', ':'), - sort_keys=True, - ) - outfile.write('\n') - except OSError: - # Squelch OSErrors, because we expect them and they shouldn't - # interrupt the rest of the process. - LOGGER.exception("Unable to write timing logs") diff --git a/pavement.py b/pavement.py deleted file mode 100644 index 41a6227dbfb5..000000000000 --- a/pavement.py +++ /dev/null @@ -1,12 +0,0 @@ -import sys # lint-amnesty, pylint: disable=django-not-configured, missing-module-docstring -import os - -# Ensure that we can import pavelib, and that our copy of pavelib -# takes precedence over anything else installed in the virtualenv. -# In local dev, we usually don't need to do this, because Python -# automatically puts the current working directory on the system path. -# Until we re-run pip install, the other copies of edx-platform could -# take precedence, leading to some strange results. -sys.path.insert(0, os.path.dirname(__file__)) - -from pavelib import * # lint-amnesty, pylint: disable=wildcard-import, wrong-import-position diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 24d822f08744..a2c90429c5b0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -60,12 +60,12 @@ django-webpack-loader==0.7.0 # Adding pin to avoid any major upgrade djangorestframework<3.15.0 -# Date: 2023-07-19 -# The version of django-stubs we can use depends on which Django release we're using -# 1.16.0 works with Django 3.2 through 4.1 -# Issue for unpinning: https://github.com/openedx/edx-platform/issues/35275 -django-stubs==1.16.0 -djangorestframework-stubs==3.14.0 # Pinned to match django-stubs. Remove this when we can remove the above pin. +# Date: 2024-07-19 +# Generally speaking, the major version of django-stubs should match the major version of django. +# Specifically, we need to perpetually constrain django-stubs to a compatible version based on: +# https://github.com/typeddjango/django-stubs?tab=readme-ov-file#version-compatibility +# Issue: https://github.com/openedx/edx-platform/issues/35275 +django-stubs<5 # Date: 2024-07-23 # django-storages==1.14.4 breaks course imports @@ -78,7 +78,7 @@ django-storages<1.14.4 # The team that owns this package will manually bump this package rather than having it pulled in automatically. # This is to allow them to better control its deployment and to do it in a process that works better # for them. -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # Date: 2024-05-09 # This has to be constrained as well because newer versions of edx-i18n-tools need the diff --git a/requirements/edx-sandbox/py38.txt b/requirements/edx-sandbox/py38.txt deleted file mode 100644 index 5164c3975a12..000000000000 --- a/requirements/edx-sandbox/py38.txt +++ /dev/null @@ -1,4 +0,0 @@ -# This file is a temporary compatibility wrapper around quince.txt. -# It will be removed before Sumac. - --r releases/quince.txt diff --git a/requirements/edx/base.txt b/requirements/edx/base.txt index 6f942d00b096..7778afbb0f47 100644 --- a/requirements/edx/base.txt +++ b/requirements/edx/base.txt @@ -103,7 +103,6 @@ celery==5.4.0 # openedx-learning certifi==2024.8.30 # via - # -r requirements/edx/paver.txt # elasticsearch # py2neo # requests @@ -118,7 +117,6 @@ chardet==5.2.0 charset-normalizer==2.0.12 # via # -c requirements/edx/../constraints.txt - # -r requirements/edx/paver.txt # requests # snowflake-connector-python chem==1.3.0 @@ -393,9 +391,7 @@ djangorestframework==3.14.0 djangorestframework-xml==2.0.0 # via edx-enterprise dnspython==2.7.0 - # via - # -r requirements/edx/paver.txt - # pymongo + # via pymongo done-xblock==2.4.0 # via -r requirements/edx/bundled.in drf-jwt==1.19.2 @@ -472,7 +468,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/kernel.in @@ -493,7 +489,6 @@ edx-name-affirmation==3.0.1 edx-opaque-keys[django]==2.11.0 # via # -r requirements/edx/kernel.in - # -r requirements/edx/paver.txt # edx-bulk-grades # edx-ccx-keys # edx-completion @@ -652,7 +647,6 @@ icalendar==6.1.0 # via -r requirements/edx/kernel.in idna==3.10 # via - # -r requirements/edx/paver.txt # optimizely-sdk # requests # snowflake-connector-python @@ -704,15 +698,10 @@ laboratory==1.0.2 # via -r requirements/edx/kernel.in lazy==1.6 # via - # -r requirements/edx/paver.txt # acid-xblock # lti-consumer-xblock # ora2 # xblock -libsass==0.10.0 - # via - # -c requirements/edx/../constraints.txt - # -r requirements/edx/paver.txt loremipsum==1.0.5 # via ora2 lti-consumer-xblock==9.12.0 @@ -750,7 +739,6 @@ markdown==3.3.7 # xblock-poll markupsafe==3.0.2 # via - # -r requirements/edx/paver.txt # chem # jinja2 # mako @@ -762,8 +750,6 @@ meilisearch==0.33.0 # via # -r requirements/edx/kernel.in # edx-search -mock==5.1.0 - # via -r requirements/edx/paver.txt mongoengine==0.29.1 # via -r requirements/edx/kernel.in monotonic==1.6 @@ -869,7 +855,6 @@ path==16.11.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/kernel.in - # -r requirements/edx/paver.txt # edx-i18n-tools # path-py path-py==12.5.0 @@ -877,12 +862,8 @@ path-py==12.5.0 # edx-enterprise # ora2 # staff-graded-xblock -paver==1.3.4 - # via -r requirements/edx/paver.txt pbr==6.1.0 - # via - # -r requirements/edx/paver.txt - # stevedore + # via stevedore pgpy==0.6.0 # via edx-enterprise piexif==1.1.3 @@ -917,7 +898,7 @@ protobuf==5.29.1 # proto-plus psutil==6.1.0 # via - # -r requirements/edx/paver.txt + # -r requirements/edx/kernel.in # edx-django-utils py2neo @ https://github.com/overhangio/py2neo/releases/download/2021.2.3/py2neo-2021.2.3.tar.gz # via @@ -945,9 +926,7 @@ pydantic==2.10.3 pydantic-core==2.27.1 # via pydantic pygments==2.18.0 - # via - # -r requirements/edx/bundled.in - # py2neo + # via py2neo pyjwkest==1.4.2 # via # -r requirements/edx/kernel.in @@ -970,12 +949,11 @@ pylatexenc==2.10 pylti1p3==2.0.0 # via -r requirements/edx/kernel.in pymemcache==4.0.0 - # via -r requirements/edx/paver.txt + # via -r requirements/edx/kernel.in pymongo==4.4.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/kernel.in - # -r requirements/edx/paver.txt # edx-opaque-keys # event-tracking # mongoengine @@ -1017,8 +995,6 @@ python-dateutil==2.9.0.post0 # xblock python-ipware==3.0.0 # via django-ipware -python-memcached==1.62 - # via -r requirements/edx/paver.txt python-slugify==8.0.4 # via code-annotations python-swiftclient==4.6.0 @@ -1074,7 +1050,6 @@ regex==2024.11.6 # via nltk requests==2.32.3 # via - # -r requirements/edx/paver.txt # algoliasearch # analytics-python # cachecontrol @@ -1138,7 +1113,6 @@ simplejson==3.19.3 six==1.17.0 # via # -r requirements/edx/kernel.in - # -r requirements/edx/paver.txt # analytics-python # codejail-includes # crowdsourcehinter-xblock @@ -1154,9 +1128,7 @@ six==1.17.0 # fs-s3fs # html5lib # interchange - # libsass # optimizely-sdk - # paver # py2neo # pyjwkest # python-dateutil @@ -1194,7 +1166,6 @@ staff-graded-xblock==2.3.0 stevedore==5.4.0 # via # -r requirements/edx/kernel.in - # -r requirements/edx/paver.txt # code-annotations # edx-ace # edx-django-utils @@ -1218,7 +1189,6 @@ tqdm==4.67.1 # openai typing-extensions==4.12.2 # via - # -r requirements/edx/paver.txt # django-countries # edx-opaque-keys # jwcrypto @@ -1242,7 +1212,6 @@ uritemplate==4.1.1 # google-api-python-client urllib3==2.2.3 # via - # -r requirements/edx/paver.txt # botocore # elasticsearch # py2neo @@ -1258,8 +1227,6 @@ voluptuous==0.15.2 # via ora2 walrus==0.9.4 # via edx-event-bus-redis -watchdog==6.0.0 - # via -r requirements/edx/paver.txt wcwidth==0.2.13 # via prompt-toolkit web-fragments==2.2.0 @@ -1280,7 +1247,7 @@ webob==1.8.9 # -r requirements/edx/kernel.in # xblock wrapt==1.17.0 - # via -r requirements/edx/paver.txt + # via -r requirements/edx/kernel.in xblock[django]==5.1.0 # via # -r requirements/edx/kernel.in diff --git a/requirements/edx/bundled.in b/requirements/edx/bundled.in index a9394b809f55..61f6007aa507 100644 --- a/requirements/edx/bundled.in +++ b/requirements/edx/bundled.in @@ -25,7 +25,6 @@ # Follow up issue to remove this fork: https://github.com/openedx/edx-platform/issues/33456 https://github.com/overhangio/py2neo/releases/download/2021.2.3/py2neo-2021.2.3.tar.gz -pygments # Used to support colors in paver command output # i18n_tool is needed at build time for pulling translations edx-i18n-tools>=0.4.6 # Commands for developers and translators to extract, compile and validate translations diff --git a/requirements/edx/development.txt b/requirements/edx/development.txt index 97b74c737b6f..2b51f6a979cf 100644 --- a/requirements/edx/development.txt +++ b/requirements/edx/development.txt @@ -577,7 +577,7 @@ django-storages==1.14.3 # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt # edxval -django-stubs==1.16.0 +django-stubs==4.2.7 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/development.in @@ -625,10 +625,8 @@ djangorestframework==3.14.0 # openedx-learning # ora2 # super-csv -djangorestframework-stubs==3.14.0 - # via - # -c requirements/edx/../constraints.txt - # -r requirements/edx/development.in +djangorestframework-stubs==3.14.5 + # via -r requirements/edx/development.in djangorestframework-xml==2.0.0 # via # -r requirements/edx/doc.txt @@ -748,7 +746,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/doc.txt @@ -1184,8 +1182,6 @@ libsass==0.10.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/assets.txt - # -r requirements/edx/doc.txt - # -r requirements/edx/testing.txt loremipsum==1.0.5 # via # -r requirements/edx/doc.txt @@ -1263,9 +1259,7 @@ mistune==3.0.2 # -r requirements/edx/doc.txt # sphinx-mdinclude mock==5.1.0 - # via - # -r requirements/edx/doc.txt - # -r requirements/edx/testing.txt + # via -r requirements/edx/testing.txt mongoengine==0.29.1 # via # -r requirements/edx/doc.txt @@ -1301,8 +1295,6 @@ mypy==1.11.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/development.in - # django-stubs - # djangorestframework-stubs mypy-extensions==1.0.0 # via mypy mysqlclient==2.2.6 @@ -1458,10 +1450,6 @@ path-py==12.5.0 # edx-enterprise # ora2 # staff-graded-xblock -paver==1.3.4 - # via - # -r requirements/edx/doc.txt - # -r requirements/edx/testing.txt pbr==6.1.0 # via # -r requirements/edx/doc.txt @@ -1764,10 +1752,6 @@ python-ipware==3.0.0 # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt # django-ipware -python-memcached==1.62 - # via - # -r requirements/edx/doc.txt - # -r requirements/edx/testing.txt python-slugify==8.0.4 # via # -r requirements/edx/doc.txt @@ -1962,7 +1946,6 @@ six==1.17.0 # libsass # optimizely-sdk # pact-python - # paver # py2neo # pyjwkest # python-dateutil @@ -2121,8 +2104,6 @@ tinycss2==1.4.0 # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt # bleach -tomli==2.2.1 - # via django-stubs tomlkit==0.13.2 # via # -r requirements/edx/doc.txt @@ -2228,10 +2209,7 @@ walrus==0.9.4 # -r requirements/edx/testing.txt # edx-event-bus-redis watchdog==6.0.0 - # via - # -r requirements/edx/development.in - # -r requirements/edx/doc.txt - # -r requirements/edx/testing.txt + # via -r requirements/edx/development.in wcwidth==0.2.13 # via # -r requirements/edx/doc.txt diff --git a/requirements/edx/doc.txt b/requirements/edx/doc.txt index 4261f47c2be7..4315eb8f300c 100644 --- a/requirements/edx/doc.txt +++ b/requirements/edx/doc.txt @@ -555,7 +555,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt @@ -855,10 +855,6 @@ lazy==1.6 # xblock lazy-object-proxy==1.10.0 # via astroid -libsass==0.10.0 - # via - # -c requirements/edx/../constraints.txt - # -r requirements/edx/base.txt loremipsum==1.0.5 # via # -r requirements/edx/base.txt @@ -916,8 +912,6 @@ meilisearch==0.33.0 # edx-search mistune==3.0.2 # via sphinx-mdinclude -mock==5.1.0 - # via -r requirements/edx/base.txt mongoengine==0.29.1 # via -r requirements/edx/base.txt monotonic==1.6 @@ -1052,8 +1046,6 @@ path-py==12.5.0 # edx-enterprise # ora2 # staff-graded-xblock -paver==1.3.4 - # via -r requirements/edx/base.txt pbr==6.1.0 # via # -r requirements/edx/base.txt @@ -1228,8 +1220,6 @@ python-ipware==3.0.0 # via # -r requirements/edx/base.txt # django-ipware -python-memcached==1.62 - # via -r requirements/edx/base.txt python-slugify==8.0.4 # via # -r requirements/edx/base.txt @@ -1384,9 +1374,7 @@ six==1.17.0 # fs-s3fs # html5lib # interchange - # libsass # optimizely-sdk - # paver # py2neo # pyjwkest # python-dateutil @@ -1559,8 +1547,6 @@ walrus==0.9.4 # via # -r requirements/edx/base.txt # edx-event-bus-redis -watchdog==6.0.0 - # via -r requirements/edx/base.txt wcwidth==0.2.13 # via # -r requirements/edx/base.txt diff --git a/requirements/edx/kernel.in b/requirements/edx/kernel.in index 60f49c5917e1..d2ec04314801 100644 --- a/requirements/edx/kernel.in +++ b/requirements/edx/kernel.in @@ -3,7 +3,6 @@ -c ../constraints.txt -r github.in # Forks and other dependencies not yet on PyPI --r paver.txt # Requirements for running paver commands # DON'T JUST ADD NEW DEPENDENCIES!!! # Please follow these guidelines whenever you change this file: @@ -126,6 +125,7 @@ openedx-django-wiki path piexif # Exif image metadata manipulation, used in the profile_images app Pillow # Image manipulation library; used for course assets, profile images, invoice PDFs, etc. +psutil # Library for retrieving information on running processes and system utilization pycountry pycryptodomex pyjwkest @@ -133,6 +133,7 @@ pyjwkest # PyJWT 1.6.3 contains PyJWTError, which is required by Apple auth in social-auth-core PyJWT>=1.6.3 pylti1p3 # Required by content_libraries core library to support LTI 1.3 launches +pymemcache # Python interface to the memcached memory cache daemon pymongo # MongoDB driver pynliner # Inlines CSS styles into HTML for email notifications python-dateutil @@ -159,5 +160,6 @@ unicodecsv # Easier support for CSV files with unicode user-util # Functionality for retiring users (GDPR compliance) webob web-fragments # Provides the ability to render fragments of web pages +wrapt # Better functools.wrapped. TODO: functools has since improved, maybe we can switch? XBlock[django] # Courseware component architecture xss-utils # https://github.com/openedx/edx-platform/pull/20633 Fix XSS via Translations diff --git a/requirements/edx/paver.in b/requirements/edx/paver.in deleted file mode 100644 index 6987ede82275..000000000000 --- a/requirements/edx/paver.in +++ /dev/null @@ -1,27 +0,0 @@ -# Requirements to run and test Paver -# -# DON'T JUST ADD NEW DEPENDENCIES!!! -# -# If you open a pull request that adds a new dependency, you should: -# * verify that the dependency has a license compatible with AGPLv3 -# * confirm that it has no system requirements beyond what we already install -# * run "make upgrade" to update the detailed requirements files -# - --c ../constraints.txt - -edx-opaque-keys # Create and introspect course and xblock identities -lazy # Lazily-evaluated attributes for Python objects -libsass # Python bindings for the LibSass CSS compiler -markupsafe # XML/HTML/XHTML Markup safe strings -mock # Stub out code with mock objects and make assertions about how they have been used -path # Easier manipulation of filesystem paths -paver # Build, distribution and deployment scripting tool -psutil # Library for retrieving information on running processes and system utilization -pymongo # via edx-opaque-keys -python-memcached # Python interface to the memcached memory cache daemon -pymemcache # Python interface to the memcached memory cache daemon -requests # Simple interface for making HTTP requests -stevedore # Support for runtime plugins, used for XBlocks and edx-platform Django app plugins -watchdog # Used in paver watch_assets -wrapt # Decorator utilities used in the @timed paver task decorator diff --git a/requirements/edx/paver.txt b/requirements/edx/paver.txt deleted file mode 100644 index c9ee8f3aff49..000000000000 --- a/requirements/edx/paver.txt +++ /dev/null @@ -1,65 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# make upgrade -# -certifi==2024.8.30 - # via requests -charset-normalizer==2.0.12 - # via - # -c requirements/edx/../constraints.txt - # requests -dnspython==2.7.0 - # via pymongo -edx-opaque-keys==2.11.0 - # via -r requirements/edx/paver.in -idna==3.10 - # via requests -lazy==1.6 - # via -r requirements/edx/paver.in -libsass==0.10.0 - # via - # -c requirements/edx/../constraints.txt - # -r requirements/edx/paver.in -markupsafe==3.0.2 - # via -r requirements/edx/paver.in -mock==5.1.0 - # via -r requirements/edx/paver.in -path==16.11.0 - # via - # -c requirements/edx/../constraints.txt - # -r requirements/edx/paver.in -paver==1.3.4 - # via -r requirements/edx/paver.in -pbr==6.1.0 - # via stevedore -psutil==6.1.0 - # via -r requirements/edx/paver.in -pymemcache==4.0.0 - # via -r requirements/edx/paver.in -pymongo==4.4.0 - # via - # -c requirements/edx/../constraints.txt - # -r requirements/edx/paver.in - # edx-opaque-keys -python-memcached==1.62 - # via -r requirements/edx/paver.in -requests==2.32.3 - # via -r requirements/edx/paver.in -six==1.17.0 - # via - # libsass - # paver -stevedore==5.4.0 - # via - # -r requirements/edx/paver.in - # edx-opaque-keys -typing-extensions==4.12.2 - # via edx-opaque-keys -urllib3==2.2.3 - # via requests -watchdog==6.0.0 - # via -r requirements/edx/paver.in -wrapt==1.17.0 - # via -r requirements/edx/paver.in diff --git a/requirements/edx/testing.in b/requirements/edx/testing.in index b903768f4de6..cf57aeb0fc46 100644 --- a/requirements/edx/testing.in +++ b/requirements/edx/testing.in @@ -28,6 +28,7 @@ freezegun # Allows tests to mock the output of assorted datetime httpretty # Library for mocking HTTP requests, used in many tests import-linter # Tool for making assertions about which modules can import which others isort # For checking and fixing the order of imports +mock # Deprecated alias to standard library `unittest.mock` pycodestyle # Checker for compliance with the Python style guide (PEP 8) polib # Library for manipulating gettext translation files, used to test paver i18n commands pyquery # jQuery-like API for retrieving fragments of HTML and XML files in tests diff --git a/requirements/edx/testing.txt b/requirements/edx/testing.txt index dbc2ea7613f5..a5faae04ccd8 100644 --- a/requirements/edx/testing.txt +++ b/requirements/edx/testing.txt @@ -576,7 +576,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt @@ -898,10 +898,6 @@ lazy==1.6 # xblock lazy-object-proxy==1.10.0 # via astroid -libsass==0.10.0 - # via - # -c requirements/edx/../constraints.txt - # -r requirements/edx/base.txt loremipsum==1.0.5 # via # -r requirements/edx/base.txt @@ -962,7 +958,7 @@ meilisearch==0.33.0 # -r requirements/edx/base.txt # edx-search mock==5.1.0 - # via -r requirements/edx/base.txt + # via -r requirements/edx/testing.in mongoengine==0.29.1 # via -r requirements/edx/base.txt monotonic==1.6 @@ -1101,8 +1097,6 @@ path-py==12.5.0 # edx-enterprise # ora2 # staff-graded-xblock -paver==1.3.4 - # via -r requirements/edx/base.txt pbr==6.1.0 # via # -r requirements/edx/base.txt @@ -1341,8 +1335,6 @@ python-ipware==3.0.0 # via # -r requirements/edx/base.txt # django-ipware -python-memcached==1.62 - # via -r requirements/edx/base.txt python-slugify==8.0.4 # via # -r requirements/edx/base.txt @@ -1498,10 +1490,8 @@ six==1.17.0 # fs-s3fs # html5lib # interchange - # libsass # optimizely-sdk # pact-python - # paver # py2neo # pyjwkest # python-dateutil @@ -1647,8 +1637,6 @@ walrus==0.9.4 # via # -r requirements/edx/base.txt # edx-event-bus-redis -watchdog==6.0.0 - # via -r requirements/edx/base.txt wcwidth==0.2.13 # via # -r requirements/edx/base.txt diff --git a/scripts/eslint.py b/scripts/eslint.py index 3723ada6e219..ad5fbf3f3f1b 100644 --- a/scripts/eslint.py +++ b/scripts/eslint.py @@ -49,20 +49,25 @@ def run_eslint(): ) print(result.stdout) - last_line = result.stdout.strip().splitlines()[-1] if result.stdout.strip().splitlines() else "" - regex = r'^\d+' - try: - num_violations = int(re.search(regex, last_line).group(0)) if last_line else 0 - # Fail if number of violations is greater than the limit - if num_violations > violations_limit: - fail_quality( - "FAILURE: Too many eslint violations ({count}).\nThe limit is {violations_limit}.".format(count=num_violations, violations_limit=violations_limit)) - else: - print(f"successfully run eslint with '{num_violations}' violations") + if result.returncode == 0: + fail_quality("No eslint violations found. This is unexpected... are you sure eslint is running correctly?") + elif result.returncode == 1: + last_line = result.stdout.strip().splitlines()[-1] if result.stdout.strip().splitlines() else "" + regex = r'^\d+' + try: + num_violations = int(re.search(regex, last_line).group(0)) if last_line else 0 + # Fail if number of violations is greater than the limit + if num_violations > violations_limit: + fail_quality("FAILURE: Too many eslint violations ({count}).\nThe limit is {violations_limit}.".format(count=num_violations, violations_limit=violations_limit)) + else: + print(f"successfully run eslint with '{num_violations}' violations") - # An AttributeError will occur if the regex finds no matches. - except (AttributeError, ValueError): - fail_quality(f"FAILURE: Number of eslint violations could not be found in '{last_line}'") + # An AttributeError will occur if the regex finds no matches. + except (AttributeError, ValueError): + fail_quality(f"FAILURE: Number of eslint violations could not be found in '{last_line}'") + else: + print(f"Unexpected ESLint failure with exit code {result.returncode}.") + fail_quality(f"Unexpected error: {result.stderr.strip()}") if __name__ == "__main__": diff --git a/scripts/paver_autocomplete.sh b/scripts/paver_autocomplete.sh deleted file mode 100644 index 8b4e8111411c..000000000000 --- a/scripts/paver_autocomplete.sh +++ /dev/null @@ -1,89 +0,0 @@ -# shellcheck disable=all -# ^ Paver in edx-platform is on the way out -# (https://github.com/openedx/edx-platform/issues/31798) -# so we're not going to bother fixing these shellcheck -# violations. - -# Courtesy of Gregory Nicholas - -_subcommand_opts() -{ - local awkfile command cur usage - command=$1 - cur=${COMP_WORDS[COMP_CWORD]} - awkfile=/tmp/paver-option-awkscript-$$.awk - echo ' -BEGIN { - opts = ""; -} - -{ - for (i = 1; i <= NF; i = i + 1) { - # Match short options (-a, -S, -3) - # or long options (--long-option, --another_option) - # in output from paver help [subcommand] - if ($i ~ /^(-[A-Za-z0-9]|--[A-Za-z][A-Za-z0-9_-]*)/) { - opt = $i; - # remove trailing , and = characters. - match(opt, "[,=]"); - if (RSTART > 0) { - opt = substr(opt, 0, RSTART); - } - opts = opts " " opt; - } - } -} - -END { - print opts -}' > $awkfile - - usage=`paver help $command` - options=`echo "$usage"|awk -f $awkfile` - - COMPREPLY=( $(compgen -W "$options" -- "$cur") ) -} - - -_paver() -{ - local cur prev - COMPREPLY=() - # Variable to hold the current word - cur="${COMP_WORDS[COMP_CWORD]}" - prev="${COMP_WORDS[COMP_CWORD - 1]}" - - # Build a list of the available tasks from: `paver --help --quiet` - local cmds=$(paver -hq | awk '/^ ([a-zA-Z][a-zA-Z0-9_]+)/ {print $1}') - - subcmd="${COMP_WORDS[1]}" - # Generate possible matches and store them in the - # array variable COMPREPLY - - if [[ -n $subcmd ]] - then - - if [[ ${#COMP_WORDS[*]} == 3 ]] - then - _subcommand_opts $subcmd - return 0 - else - if [[ "$cur" == -* ]] - then - _subcommand_opts $subcmd - return 0 - else - COMPREPLY=( $(compgen -o nospace -- "$cur") ) - fi - fi - fi - - if [[ ${#COMP_WORDS[*]} == 2 ]] - then - COMPREPLY=( $(compgen -W "${cmds}" -- "$cur") ) - fi -} - -# Assign the auto-completion function for our command. - -complete -F _paver -o default paver diff --git a/xmodule/js/spec/video/video_control_spec.js b/xmodule/js/spec/video/video_control_spec.js index 248c3c31d2be..49326290716b 100644 --- a/xmodule/js/spec/video/video_control_spec.js +++ b/xmodule/js/spec/video/video_control_spec.js @@ -186,6 +186,26 @@ }); describe('constructor with end-time', function() { + it('displays the correct time when startTime and endTime are specified', function(done) { + state = jasmine.initializePlayer({ + start: 10, + end: 20 + }); + spyOn(state.videoPlayer, 'duration').and.returnValue(60); + + state.videoControl.updateVcrVidTime({ + time: 15, + duration: 60 + }); + + jasmine.waitUntil(function() { + var expectedValue = $('.video-controls').find('.vidtime'); + return expectedValue.text().indexOf('0:05 / 0:20') !== -1; // Expecting 15 seconds - 10 seconds = 5 seconds + }).then(function() { + expect($('.video-controls').find('.vidtime')).toHaveText('0:05 / 0:20'); + }).always(done); + }); + it( 'saved position is 0, timer slider and VCR set to 0:00 ' + 'and ending at specified end-time', diff --git a/xmodule/js/spec/video/video_events_plugin_spec.js b/xmodule/js/spec/video/video_events_plugin_spec.js index 860cfb5e07ba..ac809d054edc 100644 --- a/xmodule/js/spec/video/video_events_plugin_spec.js +++ b/xmodule/js/spec/video/video_events_plugin_spec.js @@ -226,6 +226,55 @@ import '../helper.js'; destroy: plugin.destroy }); }); + + describe('getCurrentTime method', function() { + it('returns current time adjusted by startTime if video starts from a subsection', function() { + spyOn(state.videoPlayer, 'currentTime', 'get').and.returnValue(120); + state.config.startTime = 30; + expect(state.videoEventsPlugin.getCurrentTime()).toBe(90); // 120 - 30 = 90 + }); + + it('returns 0 if currentTime is undefined', function() { + spyOn(state.videoPlayer, 'currentTime', 'get').and.returnValue(undefined); + state.config.startTime = 30; // Start time is irrelevant since current time is undefined + expect(state.videoEventsPlugin.getCurrentTime()).toBe(0); + }); + + it('returns unadjusted current time if startTime is not defined', function() { + spyOn(state.videoPlayer, 'currentTime', 'get').and.returnValue(60); + expect(state.videoEventsPlugin.getCurrentTime()).toBe(60); // Returns current time as is + }); + }); + + describe('log method', function() { + it('logs event with adjusted duration when startTime and endTime are defined', function() { + state.config.startTime = 30; + state.config.endTime = 150; + state.duration = 200; + + state.videoEventsPlugin.log('test_event', {}); + + expect(Logger.log).toHaveBeenCalledWith('test_event', { + id: 'id', + code: this.code, + duration: 120, // 150 - 30 = 120 + }); + }); + + it('logs event with full duration when startTime and endTime are not defined', function() { + state.config.startTime = undefined; + state.config.endTime = undefined; + state.duration = 200; + + state.videoEventsPlugin.log('test_event', {}); + + expect(Logger.log).toHaveBeenCalledWith('test_event', { + id: 'id', + code: this.code, + duration: 200 // Full duration as no start/end time adjustment is needed + }); + }); + }); }); describe('VideoPlayer Events plugin', function() { diff --git a/xmodule/js/src/video/04_video_control.js b/xmodule/js/src/video/04_video_control.js index b8cec53cae6f..c6f2ba240745 100644 --- a/xmodule/js/src/video/04_video_control.js +++ b/xmodule/js/src/video/04_video_control.js @@ -152,10 +152,17 @@ } function updateVcrVidTime(params) { - var endTime = (this.config.endTime !== null) ? this.config.endTime : params.duration; + var endTime = (this.config.endTime !== null) ? this.config.endTime : params.duration, + startTime, currentTime; // in case endTime is accidentally specified as being greater than the video endTime = Math.min(endTime, params.duration); - this.videoControl.vidTimeEl.text(Time.format(params.time) + ' / ' + Time.format(endTime)); + startTime = this.config.startTime > 0 ? this.config.startTime : 0; + // if it's a subsection of video, use the clip duration as endTime + if (startTime && this.config.endTime) { + endTime = this.config.endTime - startTime; + } + currentTime = startTime ? params.time - startTime : params.time; + this.videoControl.vidTimeEl.text(Time.format(currentTime) + ' / ' + Time.format(endTime)); } } ); diff --git a/xmodule/js/src/video/09_events_plugin.js b/xmodule/js/src/video/09_events_plugin.js index d407534c983e..6116f26e22c5 100644 --- a/xmodule/js/src/video/09_events_plugin.js +++ b/xmodule/js/src/video/09_events_plugin.js @@ -143,8 +143,15 @@ }, getCurrentTime: function() { - var player = this.state.videoPlayer; - return player ? player.currentTime : 0; + var player = this.state.videoPlayer, + startTime = this.state.config.startTime, + currentTime; + currentTime = player ? player.currentTime : 0; + // if video didn't start from 0(it's a subsection of video), subtract the additional time at start + if (startTime) { + currentTime = currentTime ? currentTime - startTime : 0; + } + return currentTime; }, getCurrentLanguage: function() { @@ -153,11 +160,15 @@ }, log: function(eventName, data) { + // use startTime and endTime to calculate the duration to handle the case where only a subsection of video is used + var endTime = this.state.config.endTime || this.state.duration, + startTime = this.state.config.startTime; + var logInfo = _.extend({ id: this.state.id, // eslint-disable-next-line no-nested-ternary code: this.state.isYoutubeType() ? this.state.youtubeId() : this.state.canPlayHLS ? 'hls' : 'html5', - duration: this.state.duration + duration: endTime - startTime }, data, this.options.data); Logger.log(eventName, logInfo); } diff --git a/xmodule/tests/__init__.py b/xmodule/tests/__init__.py index b2cdd67b71ba..786836c050b5 100644 --- a/xmodule/tests/__init__.py +++ b/xmodule/tests/__init__.py @@ -1,10 +1,5 @@ """ unittests for xmodule - -Run like this: - - paver test_lib -l ./xmodule - """