diff --git a/.deepsource.toml b/.deepsource.toml deleted file mode 100644 index 133b3d6963..0000000000 --- a/.deepsource.toml +++ /dev/null @@ -1,15 +0,0 @@ -version = 1 - -[[analyzers]] -name = "javascript" -enabled = true - - [analyzers.meta] - plugins = ["vue"] - -[[analyzers]] -name = "python" -enabled = true - - [analyzers.meta] - runtime_version = "3.x.x" diff --git a/.github/workflows/frontendtest.yml b/.github/workflows/frontendtest.yml index 142160a076..b9998ca44e 100644 --- a/.github/workflows/frontendtest.yml +++ b/.github/workflows/frontendtest.yml @@ -38,4 +38,4 @@ jobs: yarn --frozen-lockfile npm rebuild node-sass - name: Run tests - run: yarn run test-jest + run: yarn run test diff --git a/bin/run_minio.py b/bin/run_minio.py index 4578a754bf..42adf31562 100755 --- a/bin/run_minio.py +++ b/bin/run_minio.py @@ -19,15 +19,18 @@ run_type = os.getenv("MINIO_RUN_TYPE") - assert run_type in MINIO_RUN_TYPES, "MINIO_RUN_TYPE must be one of {}".format(MINIO_RUN_TYPES) + if run_type not in MINIO_RUN_TYPES: + raise AssertionError("MINIO_RUN_TYPE must be one of {}".format(MINIO_RUN_TYPES)) if run_type == "LOCAL": cmd = ["minio", "server", "-C", str(MINIO_CONFIG_DIR), str(MINIO_LOCAL_HOME_STORAGE)] elif run_type == "GCS_PROXY": - assert os.path.exists(GOOGLE_APPLICATION_CREDENTIALS_PATH), "the env var GOOGLE_APPLICATION_CREDENTIALS must be defined," " and pointing to a credentials file for your project." + if not os.path.exists(GOOGLE_APPLICATION_CREDENTIALS_PATH): + raise AssertionError("the env var GOOGLE_APPLICATION_CREDENTIALS must be defined," " and pointing to a credentials file for your project.") - assert GOOGLE_GCS_PROJECT_ID, "$GOOGLE_GCS_PROJECT_ID must be defined with the project" " id where you store your objects." + if not GOOGLE_GCS_PROJECT_ID: + raise AssertionError("$GOOGLE_GCS_PROJECT_ID must be defined with the project" " id where you store your objects.") cmd = ["minio", "gateway", "gcs", GOOGLE_GCS_PROJECT_ID] else: raise Exception("Unhandled run_type type: {}".format(run_type)) diff --git a/contentcuration/contentcuration/frontend/administration/mixins.js b/contentcuration/contentcuration/frontend/administration/mixins.js index 04f3314083..529c75ffae 100644 --- a/contentcuration/contentcuration/frontend/administration/mixins.js +++ b/contentcuration/contentcuration/frontend/administration/mixins.js @@ -84,7 +84,7 @@ export function generateFilterMixin(filterMap) { const query = transform( params, (result, value, key) => { - if (value != null) { + if (value !== null) { result[key] = value; } }, diff --git a/contentcuration/contentcuration/frontend/administration/pages/Users/UserDetails.vue b/contentcuration/contentcuration/frontend/administration/pages/Users/UserDetails.vue index d95801a513..686674af36 100644 --- a/contentcuration/contentcuration/frontend/administration/pages/Users/UserDetails.vue +++ b/contentcuration/contentcuration/frontend/administration/pages/Users/UserDetails.vue @@ -326,7 +326,10 @@ }, featureFlagValue() { return function(key) { - return this.loading ? false : this.details.feature_flags[key] || false; + return this.loading + ? false + : (this.details && this.details.feature_flags && this.details.feature_flags[key]) || + false; }; }, }, diff --git a/contentcuration/contentcuration/frontend/channelEdit/__tests__/CurrentTopicView.spec.js b/contentcuration/contentcuration/frontend/channelEdit/__tests__/CurrentTopicView.spec.js index c8ac555b62..7745554d28 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/__tests__/CurrentTopicView.spec.js +++ b/contentcuration/contentcuration/frontend/channelEdit/__tests__/CurrentTopicView.spec.js @@ -50,13 +50,12 @@ const makeWrapper = ({ store, topicId = TOPIC.id }) => { localVue, router, store, + stubs: { + NodePanel: true, + }, }); }; -function getNodeListItems(wrapper) { - return wrapper.findAll('[data-test="node-list-item"]'); -} - function hasEditSelectedBtn(wrapper) { return wrapper.contains('[data-test="edit-selected-btn"]'); } @@ -77,11 +76,8 @@ function hasDeleteSelectedBtn(wrapper) { return wrapper.contains('[data-test="delete-selected-btn"]'); } -function selectNode(wrapper, nodeIdx) { - const nodeCheckbox = getNodeListItems(wrapper) - .at(nodeIdx) - .find('input[type="checkbox"]'); - nodeCheckbox.setChecked(); +function selectNode(wrapper) { + wrapper.vm.selected = [NODE_1.id]; } describe('CurrentTopicView', () => { @@ -99,9 +95,6 @@ describe('CurrentTopicView', () => { global.CHANNEL_EDIT_GLOBAL.channel_id = CHANNEL.id; const storeConfig = cloneDeep(STORE_CONFIG); - // `loadChildren` call needs to be resolved for NodePanel - // to finish loading (see NodePanel's `created` hook) - jest.spyOn(storeConfig.modules.contentNode.actions, 'loadChildren').mockResolvedValue(); store = storeFactory(storeConfig); store.commit('channel/ADD_CHANNEL', CHANNEL); @@ -117,14 +110,6 @@ describe('CurrentTopicView', () => { jest.resetAllMocks(); }); - it('should display all nodes of a topic', () => { - const nodeListItems = getNodeListItems(wrapper); - - expect(nodeListItems.length).toBe(2); - expect(nodeListItems.at(0).text()).toContain('Test node 1'); - expect(nodeListItems.at(1).text()).toContain('Test node 2'); - }); - it("shouldn't display any nodes operations buttons when no nodes are selected", () => { expect(hasEditSelectedBtn(wrapper)).toBe(false); expect(hasCopySelectedToClipboardBtn(wrapper)).toBe(false); @@ -135,7 +120,7 @@ describe('CurrentTopicView', () => { describe("when a user can't edit a channel", () => { it('should display only copy to clipboard button when some nodes are selected', () => { - selectNode(wrapper, 0); + selectNode(wrapper); expect(hasCopySelectedToClipboardBtn(wrapper)).toBe(true); expect(hasEditSelectedBtn(wrapper)).toBe(false); @@ -151,7 +136,7 @@ describe('CurrentTopicView', () => { }); it('should display all nodes operations buttons when some nodes are selected', () => { - selectNode(wrapper, 0); + selectNode(wrapper); expect(hasCopySelectedToClipboardBtn(wrapper)).toBe(true); expect(hasEditSelectedBtn(wrapper)).toBe(true); diff --git a/contentcuration/contentcuration/frontend/channelEdit/components/AddRelatedResourcesModal.vue b/contentcuration/contentcuration/frontend/channelEdit/components/AddRelatedResourcesModal.vue index e55ed69205..0957336c96 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/components/AddRelatedResourcesModal.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/components/AddRelatedResourcesModal.vue @@ -85,7 +85,7 @@ diff --git a/contentcuration/contentcuration/frontend/channelEdit/components/edit/DetailsTabView.vue b/contentcuration/contentcuration/frontend/channelEdit/components/edit/DetailsTabView.vue index fdc9f3b151..0d5596bc89 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/components/edit/DetailsTabView.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/components/edit/DetailsTabView.vue @@ -343,10 +343,10 @@ }; } - function generateExtraFieldsGetterSetter(key) { + function generateExtraFieldsGetterSetter(key, defaultValue) { return { get() { - return this.getExtraFieldsValueFromNodes(key); + return this.getExtraFieldsValueFromNodes(key, defaultValue); }, set(value) { this.updateExtraFields({ [key]: value }); @@ -415,7 +415,7 @@ /* FORM FIELDS */ title: generateGetterSetter('title'), description: generateGetterSetter('description'), - randomizeOrder: generateExtraFieldsGetterSetter('randomize'), + randomizeOrder: generateExtraFieldsGetterSetter('randomize', true), author: generateGetterSetter('author'), provider: generateGetterSetter('provider'), aggregator: generateGetterSetter('aggregator'), @@ -621,14 +621,14 @@ let results = uniq(this.nodes.map(node => node[key] || null)); return getValueFromResults(results); }, - getExtraFieldsValueFromNodes(key) { + getExtraFieldsValueFromNodes(key, defaultValue = null) { if ( Object.prototype.hasOwnProperty.call(this.diffTracker, 'extra_fields') && Object.prototype.hasOwnProperty.call(this.diffTracker.extra_fields, key) ) { return this.diffTracker.extra_fields[key]; } - let results = uniq(this.nodes.map(node => node.extra_fields[key] || null)); + let results = uniq(this.nodes.map(node => node.extra_fields[key] || defaultValue)); return getValueFromResults(results); }, getPlaceholder(field) { diff --git a/contentcuration/contentcuration/frontend/channelEdit/components/move/MoveModal.vue b/contentcuration/contentcuration/frontend/channelEdit/components/move/MoveModal.vue index ee30724ef3..c6922d2a50 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/components/move/MoveModal.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/components/move/MoveModal.vue @@ -205,7 +205,7 @@ currentLocationId() { // If opening modal from inside TrashModal, begin navigation at root node if (this.movingFromTrash) { - return this.currentChannel.root_id; + return this.currentChannel && this.currentChannel.root_id; } const contentNode = this.getContentNode(this.moveNodeIds[0]); return contentNode && contentNode.parent; diff --git a/contentcuration/contentcuration/frontend/channelEdit/utils.js b/contentcuration/contentcuration/frontend/channelEdit/utils.js index d4b3323535..4c6b168c45 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/utils.js +++ b/contentcuration/contentcuration/frontend/channelEdit/utils.js @@ -125,7 +125,9 @@ export function updateAnswersToQuestionType(questionType, answers) { } export function isImportedContent(node) { - return !!(node && node.original_source_node_id && node.node_id !== node.original_source_node_id); + return Boolean( + node && node.original_source_node_id && node.node_id !== node.original_source_node_id + ); } export function importedChannelLink(node, router) { diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/trash/__tests__/trashModal.spec.js b/contentcuration/contentcuration/frontend/channelEdit/views/trash/__tests__/trashModal.spec.js index dd5c796b0e..a25c4e24ec 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/views/trash/__tests__/trashModal.spec.js +++ b/contentcuration/contentcuration/frontend/channelEdit/views/trash/__tests__/trashModal.spec.js @@ -55,6 +55,7 @@ function makeWrapper(items) { methods: { loadContentNodes: jest.fn(), loadAncestors: jest.fn(), + loadChildren: jest.fn(() => Promise.resolve()), }, stubs: { ResourceDrawer: true, diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js index f83efa438e..11e8c7d606 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js @@ -20,6 +20,12 @@ describe('contentNode actions', () => { return ContentNode.put(contentNodeDatum).then(newId => { id = newId; contentNodeDatum.id = newId; + jest + .spyOn(ContentNode, 'fetchCollection') + .mockImplementation(() => Promise.resolve([contentNodeDatum])); + jest + .spyOn(ContentNode, 'fetchModel') + .mockImplementation(() => Promise.resolve(contentNodeDatum)); return ContentNode.put({ title: 'notatest', parent: newId, lft: 2 }).then(() => { store = storeFactory({ modules: { @@ -34,6 +40,7 @@ describe('contentNode actions', () => { }); }); afterEach(() => { + jest.restoreAllMocks(); return ContentNode.table.toCollection().delete(); }); describe('loadContentNodes action', () => { diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/importFromChannels/actions.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/importFromChannels/actions.js index 030e13a700..21cee26437 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/importFromChannels/actions.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/importFromChannels/actions.js @@ -17,7 +17,7 @@ export function fetchResourceSearchResults(context, params) { export function loadChannels(context, params) { // Used for search channel filter dropdown params.page_size = 25; - return Channel.requestCollection({ deleted: false, ...params }).then(channelPage => { + return Channel.fetchCollection({ deleted: false, ...params }).then(channelPage => { return channelPage; }); } diff --git a/contentcuration/contentcuration/frontend/channelList/views/ChannelSet/__tests__/channelSetModal.spec.js b/contentcuration/contentcuration/frontend/channelList/views/ChannelSet/__tests__/channelSetModal.spec.js index 0a1d1efb2c..c63e71f18e 100644 --- a/contentcuration/contentcuration/frontend/channelList/views/ChannelSet/__tests__/channelSetModal.spec.js +++ b/contentcuration/contentcuration/frontend/channelList/views/ChannelSet/__tests__/channelSetModal.spec.js @@ -45,6 +45,13 @@ const NEW_CHANNEL_SET = { [NEW_OBJECT]: true, }; +const loadChannelSetMock = (cs, store) => { + return jest.fn().mockImplementation(() => { + store.commit('channelSet/ADD_CHANNELSET', cs); + return Promise.resolve(cs); + }); +}; + const makeWrapper = ({ store, channelSetId }) => { if (router.currentRoute.name !== RouteNames.CHANNEL_SET_DETAILS) { router.push({ @@ -55,21 +62,24 @@ const makeWrapper = ({ store, channelSetId }) => { }); } - return mount(ChannelSetModal, { + const loadChannelSet = loadChannelSetMock(CHANNEL_SET, store); + const loadChannelList = jest.fn().mockImplementation(() => Promise.resolve(CHANNEL_SET.channels)); + + const wrapper = mount(ChannelSetModal, { propsData: { channelSetId, }, + methods: { + loadChannelSet, + loadChannelList, + }, router, localVue, store, }); -}; - -const loadChannelSetMock = channelSet => { - return jest.fn().mockImplementation(({ commit }) => { - commit('ADD_CHANNELSET', channelSet); - return Promise.resolve(channelSet); - }); + wrapper.loadChannelSet = loadChannelSet; + wrapper.loadChannelList = loadChannelList; + return wrapper; }; const getCollectionNameInput = wrapper => { @@ -110,54 +120,47 @@ describe('ChannelSetModal', () => { }); describe('if there are no data for a channel set yet', () => { - let loadChannelSet, loadChannelList; - + let wrapper; beforeEach(() => { const storeConfig = cloneDeep(STORE_CONFIG); - loadChannelSet = loadChannelSetMock(CHANNEL_SET); - loadChannelList = jest.fn(); - storeConfig.modules.channelSet.actions.loadChannelSet = loadChannelSet; - storeConfig.modules.channel.actions.loadChannelList = loadChannelList; - const store = storeFactory(storeConfig); - makeWrapper({ store, channelSetId: CHANNEL_SET.id }); + wrapper = makeWrapper({ store, channelSetId: CHANNEL_SET.id }); }); it('should load the channel set', () => { - expect(loadChannelSet).toHaveBeenCalledTimes(1); - expect(loadChannelSet.mock.calls[0][1]).toBe(CHANNEL_SET.id); + expect(wrapper.loadChannelSet).toHaveBeenCalledTimes(1); + expect(wrapper.loadChannelSet.mock.calls[0][0]).toBe(CHANNEL_SET.id); }); it('should load channels of the channel set', () => { - expect(loadChannelList).toHaveBeenCalledTimes(1); - expect(loadChannelList.mock.calls[0][1]).toEqual({ id__in: [CHANNEL_1.id, CHANNEL_2.id] }); + expect(wrapper.loadChannelList).toHaveBeenCalledTimes(1); + expect(wrapper.loadChannelList.mock.calls[0][0]).toEqual({ + id__in: [CHANNEL_1.id, CHANNEL_2.id], + }); }); }); describe('if a channel set has been already loaded', () => { - let store, loadChannelSet, loadChannelList; + let store, wrapper; beforeEach(() => { const storeConfig = cloneDeep(STORE_CONFIG); - loadChannelSet = jest.fn(); - loadChannelList = jest.fn(); - storeConfig.modules.channelSet.actions.loadChannelSet = loadChannelSet; - storeConfig.modules.channel.actions.loadChannelList = loadChannelList; - store = storeFactory(storeConfig); store.commit('channelSet/ADD_CHANNELSET', CHANNEL_SET); - makeWrapper({ store, channelSetId: CHANNEL_SET.id }); + wrapper = makeWrapper({ store, channelSetId: CHANNEL_SET.id }); }); it("shouldn't load the channel set", () => { - expect(loadChannelSet).not.toHaveBeenCalled(); + expect(wrapper.loadChannelSet).not.toHaveBeenCalled(); }); it('should load channels from the channel set', () => { - expect(loadChannelList).toHaveBeenCalledTimes(1); - expect(loadChannelList.mock.calls[0][1]).toEqual({ id__in: [CHANNEL_1.id, CHANNEL_2.id] }); + expect(wrapper.loadChannelList).toHaveBeenCalledTimes(1); + expect(wrapper.loadChannelList.mock.calls[0][0]).toEqual({ + id__in: [CHANNEL_1.id, CHANNEL_2.id], + }); }); }); diff --git a/contentcuration/contentcuration/frontend/channelList/vuex/channelList/actions.js b/contentcuration/contentcuration/frontend/channelList/vuex/channelList/actions.js index e9abc9aefd..b31f68422e 100644 --- a/contentcuration/contentcuration/frontend/channelList/vuex/channelList/actions.js +++ b/contentcuration/contentcuration/frontend/channelList/vuex/channelList/actions.js @@ -8,7 +8,7 @@ export function searchCatalog(context, params) { params.published = true; let promise; if (context.rootGetters.loggedIn) { - promise = Channel.requestCollection(params); + promise = Channel.fetchCollection(params); } else { promise = Channel.searchCatalog(params); } diff --git a/contentcuration/contentcuration/frontend/shared/app.js b/contentcuration/contentcuration/frontend/shared/app.js index 178b1fe1c4..5b48f2486b 100644 --- a/contentcuration/contentcuration/frontend/shared/app.js +++ b/contentcuration/contentcuration/frontend/shared/app.js @@ -128,6 +128,9 @@ export default async function startApp({ store, router, index }) { } window.addEventListener('beforeunload', e => { + if (e.currentTarget.location.origin !== window.location.origin) { + return; + } const logoutConfirmed = window.sessionStorage.getItem('logoutConfirmed'); const areAllChangesSaved = store.getters['areAllChangesSaved']; diff --git a/contentcuration/contentcuration/frontend/shared/data/__mocks__/resources.js b/contentcuration/contentcuration/frontend/shared/data/__mocks__/resources.js index 8e3af22bfd..8b1e7a3a2c 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__mocks__/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/__mocks__/resources.js @@ -1,11 +1,11 @@ import * as resources from '../resources'; Object.values(resources).forEach(resource => { - if (resource.requestCollection) { - resource.requestCollection = () => Promise.resolve([]); + if (resource.fetchCollection) { + resource.fetchCollection = () => Promise.resolve([]); } - if (resource.requestModel) { - resource.requestModel = () => Promise.resolve({}); + if (resource.fetchModel) { + resource.fetchModel = () => Promise.resolve({}); } }); diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js index 87f22ee073..fe67e64708 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js @@ -625,7 +625,7 @@ describe('ContentNode methods', () => { describe('getByNodeIdChannelId method', () => { let node, collection, - requestCollection, + fetchCollection, table = {}; beforeEach(() => { @@ -640,7 +640,7 @@ describe('ContentNode methods', () => { }; collection = [Object.assign({}, node)]; - requestCollection = mockMethod('requestCollection', () => Promise.resolve(collection)); + fetchCollection = mockMethod('fetchCollection', () => Promise.resolve(collection)); mockProperty('table', table); }); @@ -650,17 +650,17 @@ describe('ContentNode methods', () => { node ); expect(table.get).toHaveBeenCalledWith({ '[node_id+channel_id]': [node_id, channel_id] }); - expect(requestCollection).not.toBeCalled(); + expect(fetchCollection).not.toBeCalled(); }); - it('should use call requestCollection when missing locally', async () => { + it('should use call fetchCollection when missing locally', async () => { const { node_id, channel_id } = node; node = null; await expect(ContentNode.getByNodeIdChannelId(node_id, channel_id)).resolves.toMatchObject( collection[0] ); expect(table.get).toHaveBeenCalledWith({ '[node_id+channel_id]': [node_id, channel_id] }); - expect(requestCollection).toHaveBeenCalledWith({ + expect(fetchCollection).toHaveBeenCalledWith({ _node_id_channel_id_: [node_id, channel_id], }); }); @@ -671,7 +671,7 @@ describe('ContentNode methods', () => { collection = []; await expect(ContentNode.getByNodeIdChannelId(node_id, channel_id)).resolves.toBeFalsy(); expect(table.get).toHaveBeenCalledWith({ '[node_id+channel_id]': [node_id, channel_id] }); - expect(requestCollection).toHaveBeenCalledWith({ + expect(fetchCollection).toHaveBeenCalledWith({ _node_id_channel_id_: [node_id, channel_id], }); }); diff --git a/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js b/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js index 088bf4a6b1..ee792d2e77 100644 --- a/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js +++ b/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js @@ -31,18 +31,18 @@ function bulkUpdate(table, changes) { .anyOf(keys) .raw() .each((obj, cursor) => { - map[cursor.primaryKey + ''] = obj; + map[String(cursor.primaryKey)] = obj; }) .then(() => { // Filter away changes whose key wasn't found in the local database // (we can't update them if we do not know the existing values) let updatesThatApply = changes.filter(c => - Object.prototype.hasOwnProperty.call(map, c.key + '') + Object.prototype.hasOwnProperty.call(map, String(c.key)) ); // Apply modifications onto each existing object (in memory) // and generate array of resulting objects to put using bulkPut(): let objsToPut = updatesThatApply.map(c => { - let curr = map[c.key + '']; + let curr = map[String(c.key)]; applyMods(curr, c.mods); return curr; }); diff --git a/contentcuration/contentcuration/frontend/shared/data/constants.js b/contentcuration/contentcuration/frontend/shared/data/constants.js index 923f2dcd2e..3d5bad5b25 100644 --- a/contentcuration/contentcuration/frontend/shared/data/constants.js +++ b/contentcuration/contentcuration/frontend/shared/data/constants.js @@ -30,17 +30,6 @@ export const TABLE_NAMES = { CHANGE_LOCKS_TABLE, }; -export const MESSAGES = { - FETCH_COLLECTION: 'FETCH_COLLECTION', - FETCH_MODEL: 'FETCH_MODEL', - REQUEST_RESPONSE: 'REQUEST_RESPONSE', -}; - -export const STATUS = { - SUCCESS: 'SUCCESS', - FAILURE: 'FAILURE', -}; - export const APP_ID = 'KolibriStudio'; // Transaction sources diff --git a/contentcuration/contentcuration/frontend/shared/data/index.js b/contentcuration/contentcuration/frontend/shared/data/index.js index 7935ed0cdf..512f8f8bde 100644 --- a/contentcuration/contentcuration/frontend/shared/data/index.js +++ b/contentcuration/contentcuration/frontend/shared/data/index.js @@ -1,6 +1,6 @@ import Dexie from 'dexie'; import mapValues from 'lodash/mapValues'; -import { createLeaderElection } from './leaderElection'; +import { createLeaderElection } from 'broadcast-channel'; import channel from './broadcastChannel'; import { CHANGE_LOCKS_TABLE, CHANGES_TABLE, IGNORED_SOURCE, TABLE_NAMES } from './constants'; import db from './db'; @@ -45,11 +45,11 @@ if (process.env.NODE_ENV !== 'production' && typeof window !== 'undefined') { function runElection() { const elector = createLeaderElection(channel); - elector.awaitLeadership({ - success: startSyncing, - cleanup: stopSyncing, - }); - return elector.waitForLeader(); + elector.awaitLeadership().then(startSyncing); + elector.onduplicate = () => { + stopSyncing(); + elector.die.then(runElection); + }; } export function initializeDB() { diff --git a/contentcuration/contentcuration/frontend/shared/data/leaderElection.js b/contentcuration/contentcuration/frontend/shared/data/leaderElection.js deleted file mode 100644 index 8551ea6800..0000000000 --- a/contentcuration/contentcuration/frontend/shared/data/leaderElection.js +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Vendored and modified from the excellent: - * https://github.com/pubkey/broadcast-channel/blob/master/src/leader-election.js - * So that we can add functionality: - * 1) The ability to report back when a leader has been elected - * 2) The ability for a tab to report itself has a dictator, which will depose - any previously elected leader and will only relinquish power when it dies. - */ - -import { add } from 'unload'; -import uuidv4 from 'uuid/v4'; -import isFunction from 'lodash/isFunction'; - -function sleep(time) { - if (!time) time = 0; - return new Promise(res => setTimeout(res, time)); -} - -const MESSAGES = { - APPLY: 'APPLY', - DEATH: 'DEATH', - TELL: 'TELL', -}; - -const LEADER_CONTEXT = 'LEADER_ELECTION_CONTEXT'; - -// This is defined in the broadcast channel source -// for the postInternal method and should not be changed. -const INTERNAL_CHANNEL = 'internal'; - -const LeaderElection = function(channel, options) { - this._channel = channel; - this._options = options; - - this.isLeader = false; - this.isDead = false; - this.token = uuidv4(); - - // Track whether any leader has been elected - this._leaderExists = false; - // A place to track a waiting for leader promise - this._waitingForLeaderPromise = null; - // A place to track a resolve callback for a waiting - // for leader promise - this._waitingForLeaderFn = null; - - this._isApl = false; // _isApplying - this._reApply = false; - - // things to clean up - this._unl = []; // _unloads - this._lstns = []; // _listeners - this._invs = []; // _intervals -}; - -LeaderElection.prototype = { - applyOnce() { - if (this.isLeader) return Promise.resolve(false); - if (this.isDead) return Promise.resolve(false); - - // do nothing if already running - if (this._isApl) { - this._reApply = true; - return Promise.resolve(false); - } - this._isApl = true; - - let stopCriteria = false; - - const isDictator = this._options.dictator; - - const handleMessage = msg => { - if (msg.context === LEADER_CONTEXT && msg.token != this.token) { - const submit = !isDictator && msg.dictator; - const ignore = isDictator && !msg.dictator; - if (!ignore) { - // Ignore any messages from other non-dictatorial leaders if - // this is a dictatorial context. - if (msg.action === MESSAGES.APPLY) { - // other is applying - if (submit || msg.token > this.token) { - // other has higher token, or is a dictator and we are not - // stop applying - stopCriteria = true; - } - } - - if (msg.action === MESSAGES.TELL) { - // other is already leader - stopCriteria = true; - } - } - } - }; - this._channel.addEventListener(INTERNAL_CHANNEL, handleMessage); - - const ret = _sendMessage(this, MESSAGES.APPLY) // send out that this one is applying - .then(() => sleep(this._options.responseTime)) // let others time to respond - .then(() => { - if (stopCriteria) return Promise.reject(new Error()); - else return _sendMessage(this, MESSAGES.APPLY); - }) - .then(() => sleep(this._options.responseTime)) // let others time to respond - .then(() => { - if (stopCriteria) return Promise.reject(new Error()); - else return _sendMessage(this); - }) - .then(() => _beLeader(this)) // no one disagreed -> this one is now leader - .then(() => true) - .catch(() => false) // apply not successfull - .then(success => { - this._channel.removeEventListener(INTERNAL_CHANNEL, handleMessage); - this._isApl = false; - if (!success && this._reApply) { - this._reApply = false; - return this.applyOnce(); - } else return success; - }); - return ret; - }, - - awaitLeadership({ success = null, cleanup = null } = {}) { - this.electedCallback = success; - this.deposedCallback = cleanup; - if ( - /* _awaitLeadershipPromise */ - !this._aLP - ) { - this._aLP = _awaitLeadershipOnce(this); - } - return this._aLP; - }, - - get leaderExists() { - return this._leaderExists; - }, - - set leaderExists(exists) { - if (this._waitingForLeaderFn && exists) { - this._waitingForLeaderFn(true); - this._waitingForLeaderFn = null; - } else if (this._leaderExists && !exists) { - this._waitingForLeaderPromise = null; - this._waitingForLeaderFn = null; - } - this._leaderExists = exists; - }, - - /* - * A function to wait until anything has been elected leader. - */ - waitForLeader() { - if (!this._waitingForLeaderPromise) { - this._waitingForLeaderPromise = new Promise(resolve => { - if (this._leaderExists) { - resolve(true); - } else { - this._waitingForLeaderFn = resolve; - } - }); - } - return this._waitingForLeaderPromise; - }, - - depose() { - this.isLeader = false; - if (isFunction(this.deposedCallback)) { - this.deposedCallback(); - } - this._lstns.forEach(listener => this._channel.removeEventListener(INTERNAL_CHANNEL, listener)); - this._invs.forEach(interval => clearInterval(interval)); - this._unl.forEach(uFn => { - uFn.remove(); - }); - }, - - die() { - if (this.isDead) return; - this.isDead = true; - this.depose(); - - return _sendMessage(this, MESSAGES.DEATH); - }, -}; - -function _awaitLeadershipOnce(leaderElector) { - if (leaderElector.isLeader) return Promise.resolve(); - - return new Promise(res => { - let resolved = false; - - const finish = () => { - // applyOnce has resolved, hence there is - // now a leader. - leaderElector.leaderExists = true; - if (resolved) return; - if (leaderElector.isLeader) { - resolved = true; - clearInterval(interval); - leaderElector._channel.removeEventListener(INTERNAL_CHANNEL, whenDeathListener); - res(true); - } - }; - - // try once now - leaderElector.applyOnce().then(finish); - - // try on fallbackInterval - const interval = setInterval(() => { - leaderElector.applyOnce().then(finish); - }, leaderElector._options.fallbackInterval); - leaderElector._invs.push(interval); - - // try when other leader dies - const whenDeathListener = msg => { - if (msg.context === LEADER_CONTEXT && msg.action === MESSAGES.DEATH) { - // Leader has died, so there is now no leader. - leaderElector.leaderExists = false; - leaderElector.applyOnce().then(finish); - } - }; - leaderElector._channel.addEventListener(INTERNAL_CHANNEL, whenDeathListener); - leaderElector._lstns.push(whenDeathListener); - }); -} - -/** - * sends an internal message over the broadcast-channel - */ -function _sendMessage(leaderElector, action) { - const msgJson = { - context: LEADER_CONTEXT, - action, - token: leaderElector.token, - dictator: leaderElector._options.dictator, - }; - return leaderElector._channel.postInternal(msgJson); -} - -function _beLeader(leaderElector) { - if (!leaderElector.isLeader) { - leaderElector.isLeader = true; - if (isFunction(leaderElector.electedCallback)) { - leaderElector.electedCallback(); - } - const unloadFn = add(() => leaderElector.die()); - leaderElector._unl.push(unloadFn); - - const isLeaderListener = msg => { - if (msg.context === LEADER_CONTEXT && msg.action === MESSAGES.APPLY) { - _sendMessage(leaderElector, MESSAGES.TELL); - } - }; - const isDictator = this._options.dictator; - if (!isDictator) { - const coupListener = msg => { - if ( - msg.context === LEADER_CONTEXT && - msg.action === MESSAGES.APPLY && - !isDictator && - msg.dictator - ) { - leaderElector.depose(); - } - }; - leaderElector._channel.addEventListener(INTERNAL_CHANNEL, coupListener); - leaderElector._lstns.push(coupListener); - } - leaderElector._channel.addEventListener(INTERNAL_CHANNEL, isLeaderListener); - leaderElector._lstns.push(isLeaderListener); - return _sendMessage(leaderElector, MESSAGES.TELL); - } - return Promise.resolve(); -} - -function fillOptionsWithDefaults(options, channel) { - if (!options) options = {}; - options = JSON.parse(JSON.stringify(options)); - - if (!options.fallbackInterval) { - options.fallbackInterval = 3000; - } - - if (!options.responseTime) { - options.responseTime = channel.method.averageResponseTime(channel.options); - } - - if (!options.dictator) { - options.dictator = false; - } - - return options; -} - -export function createLeaderElection(channel, options) { - if (channel._leaderElector) { - throw new Error('BroadcastChannel already has a leader-elector'); - } - - options = fillOptionsWithDefaults(options, channel); - const elector = new LeaderElection(channel, options); - channel._befC.push(() => elector.die()); - - channel._leaderElector = elector; - return elector; -} diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index e29de0ed3d..91bfb21b0a 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -15,14 +15,11 @@ import uniq from 'lodash/uniq'; import uniqBy from 'lodash/uniqBy'; import uuidv4 from 'uuid/v4'; -import channel from './broadcastChannel'; import { CHANGE_TYPES, CHANGES_TABLE, IGNORED_SOURCE, - MESSAGES, RELATIVE_TREE_POSITIONS, - STATUS, TABLE_NAMES, COPYING_FLAG, TASK_ID, @@ -188,44 +185,6 @@ class APIResource { fetchCollection(params) { return client.get(this.collectionUrl(), { params }); } - - makeRequest(request) { - return new Promise((resolve, reject) => { - const messageId = uuidv4(); - function handler(msg) { - if (msg.messageId === messageId && msg.type === MESSAGES.REQUEST_RESPONSE) { - channel.removeEventListener('message', handler); - if (msg.status === STATUS.SUCCESS) { - return resolve(msg.data); - } else if (msg.status === STATUS.FAILURE && msg.err) { - return reject(msg.err); - } - // Otherwise something unspecified happened - return reject(); - } - } - channel.addEventListener('message', handler); - channel.postMessage({ - ...request, - urlName: this.urlName, - messageId, - }); - }); - } - - requestModel(id) { - return this.makeRequest({ - type: MESSAGES.FETCH_MODEL, - id, - }); - } - - requestCollection(params) { - return this.makeRequest({ - type: MESSAGES.FETCH_COLLECTION, - params, - }); - } } class IndexedDBResource { @@ -658,9 +617,12 @@ class Resource extends mix(APIResource, IndexedDBResource) { let pageData; if (Array.isArray(response.data)) { itemData = response.data; - } else { + } else if (response.data && response.data.results) { pageData = response.data; itemData = pageData.results; + } else { + console.error(`Unexpected response from ${this.urlName}`, response); + itemData = []; } return this.setData(itemData).then(data => { // setData also applies any outstanding local change events to the data @@ -688,7 +650,7 @@ class Resource extends mix(APIResource, IndexedDBResource) { * @return {Promise} */ where(params = {}, doRefresh = true) { - if (process.env.NODE_ENV !== 'production' && !process.env.TRAVIS) { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { /* eslint-disable no-console */ console.groupCollapsed(`Getting data for ${this.tableName} table with params: `, params); console.trace(); @@ -700,7 +662,7 @@ class Resource extends mix(APIResource, IndexedDBResource) { return []; } if (!objs.length && !objs.count) { - return this.requestCollection(params); + return this.fetchCollection(params); } if (doRefresh) { // Only fetch new updates if we've finished syncing the changes table @@ -710,7 +672,7 @@ class Resource extends mix(APIResource, IndexedDBResource) { .toArray() .then(pendingChanges => { if (pendingChanges.length === 0) { - this.requestCollection(params); + this.fetchCollection(params); } }); } @@ -767,7 +729,7 @@ class Resource extends mix(APIResource, IndexedDBResource) { return Promise.reject('Only string ID format is supported'); } - if (process.env.NODE_ENV !== 'production' && !process.env.TRAVIS) { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { /* eslint-disable no-console */ console.groupCollapsed(`Getting instance for ${this.tableName} table with id: ${id}`); console.trace(); @@ -776,7 +738,7 @@ class Resource extends mix(APIResource, IndexedDBResource) { } return this.table.get(id).then(obj => { if (!obj || doRefresh) { - const request = this.requestModel(id); + const request = this.fetchModel(id); if (!obj) { return request; } @@ -885,8 +847,8 @@ export const Session = new IndexedDBResource({ uuid: false, listeners: { [CHANGE_TYPES.DELETED]: function() { - if (!window.location.pathname.endsWith(window.Urls.accounts())) { - window.location = window.Urls.accounts(); + if (!window.location.pathname.endsWith(urls.accounts())) { + window.location = urls.accounts(); } }, }, @@ -1068,7 +1030,7 @@ export const ContentNode = new TreeResource({ }, getRequisites(id) { - if (process.env.NODE_ENV !== 'production' && !process.env.TRAVIS) { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { /* eslint-disable no-console */ console.groupCollapsed(`Getting prerequisite data for ${this.tableName} table with id: `, id); console.trace(); @@ -1255,7 +1217,7 @@ export const ContentNode = new TreeResource({ * @return {Promise} */ copy(id, target, position = RELATIVE_TREE_POSITIONS.LAST_CHILD, excluded_descendants = null) { - if (process.env.NODE_ENV !== 'production' && !process.env.TRAVIS) { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { /* eslint-disable no-console */ console.groupCollapsed(`Copying contentnode from ${id} with target ${target}`); console.trace(); @@ -1313,7 +1275,7 @@ export const ContentNode = new TreeResource({ } return [node]; } - return this.requestCollection({ ancestors_of: id }); + return this.fetchCollection({ ancestors_of: id }); }); }, @@ -1329,7 +1291,7 @@ export const ContentNode = new TreeResource({ const values = [nodeId, channelId]; return this.table.get({ '[node_id+channel_id]': values }).then(node => { if (!node) { - return this.requestCollection({ _node_id_channel_id_: values }).then(nodes => nodes[0]); + return this.fetchCollection({ _node_id_channel_id_: values }).then(nodes => nodes[0]); } return node; }); @@ -1462,11 +1424,11 @@ export const ChannelUser = new APIResource({ return Promise.all([editorCollection.toArray(), viewerCollection.toArray()]).then( ([editors, viewers]) => { if (!editors.length && !viewers.length) { - return this.requestCollection(params); + return this.fetchCollection(params); } if (objectsAreStale(editors) || objectsAreStale(viewers)) { // Do a synchronous refresh instead of background refresh here. - return this.requestCollection(params); + return this.fetchCollection(params); } const editorSet = new Set(editors.map(editor => editor.user)); const viewerSet = new Set(viewers.map(viewer => viewer.user)); diff --git a/contentcuration/contentcuration/frontend/shared/data/serverSync.js b/contentcuration/contentcuration/frontend/shared/data/serverSync.js index 7c34ad3103..8e842d3760 100644 --- a/contentcuration/contentcuration/frontend/shared/data/serverSync.js +++ b/contentcuration/contentcuration/frontend/shared/data/serverSync.js @@ -2,19 +2,11 @@ import debounce from 'lodash/debounce'; import get from 'lodash/get'; import pick from 'lodash/pick'; import applyChanges from './applyRemoteChanges'; -import { createChannel } from './broadcastChannel'; import { hasActiveLocks, cleanupLocks } from './changes'; -import { - CHANGE_LOCKS_TABLE, - CHANGE_TYPES, - CHANGES_TABLE, - IGNORED_SOURCE, - MESSAGES, - STATUS, -} from './constants'; +import { CHANGE_LOCKS_TABLE, CHANGE_TYPES, CHANGES_TABLE, IGNORED_SOURCE } from './constants'; import db from './db'; import mergeAllChanges from './mergeChanges'; -import { API_RESOURCES, INDEXEDDB_RESOURCES } from './registry'; +import { INDEXEDDB_RESOURCES } from './registry'; import client from 'shared/client'; import urls from 'shared/urls'; @@ -25,76 +17,9 @@ const SYNC_BUFFER = 1000; // change being registered, sync changes! const SYNC_IF_NO_CHANGES_FOR = 2; -// In order to listen to messages being sent -// by all windows, including this one, for requests -// to fetch collections or models, we have to create -// a new channel instance, rather than using the one -// already instantiated in the broadcastChannel module. -const channel = createChannel(); - -// Stores last setTimeout in polling so we may clear it when we want -let unsyncedPollingTimeoutId; - // Flag to check if a sync is currently active. let syncActive = false; -function handleFetchMessages(msg) { - if (msg.type === MESSAGES.FETCH_COLLECTION && msg.urlName && msg.params) { - API_RESOURCES[msg.urlName] - .fetchCollection(msg.params) - .then(data => { - channel.postMessage({ - messageId: msg.messageId, - type: MESSAGES.REQUEST_RESPONSE, - status: STATUS.SUCCESS, - data, - }); - }) - .catch(err => { - try { - JSON.stringify(err); - } catch (e) { - // If can't convert err to JSON, postMessage will break - err = err.toString(); - } - channel.postMessage({ - messageId: msg.messageId, - type: MESSAGES.REQUEST_RESPONSE, - status: STATUS.FAILURE, - err, - }); - }); - } - if (msg.type === MESSAGES.FETCH_MODEL && msg.urlName && msg.id) { - API_RESOURCES[msg.urlName] - .fetchModel(msg.id) - .then(data => { - channel.postMessage({ - messageId: msg.messageId, - type: MESSAGES.REQUEST_RESPONSE, - status: STATUS.SUCCESS, - data, - }); - }) - .catch(err => { - channel.postMessage({ - messageId: msg.messageId, - type: MESSAGES.REQUEST_RESPONSE, - status: STATUS.FAILURE, - err, - }); - }); - } -} - -function startChannelFetchListener() { - channel.addEventListener('message', handleFetchMessages); -} - -function stopChannelFetchListener() { - channel.removeEventListener('message', handleFetchMessages); -} - function isSyncableChange(change) { const src = change.source || ''; @@ -301,9 +226,6 @@ const debouncedSyncChanges = debounce(() => { if (process.env.NODE_ENV !== 'production' && typeof window !== 'undefined') { window.forceServerSync = forceServerSync; - - window.stopPollingUnsyncedChanges = stopPollingUnsyncedChanges; - window.pollUnsyncedChanges = pollUnsyncedChanges; } async function handleChanges(changes) { @@ -332,47 +254,16 @@ async function handleChanges(changes) { } } -async function checkAndSyncChanges() { - // Get count of changes that we care about - const changes = await db[CHANGES_TABLE].toCollection() - // Only try to sync if we have at least one change that has - // not already errored on the backend. - .filter(c => !c.errors) - .count(); - - // If more than 0, sync the changes - if (changes > 0) { - debouncedSyncChanges(); - } -} - -async function pollUnsyncedChanges() { - await checkAndSyncChanges(); - unsyncedPollingTimeoutId = setTimeout(() => pollUnsyncedChanges(), SYNC_IF_NO_CHANGES_FOR * 1000); -} - -function stopPollingUnsyncedChanges() { - if (unsyncedPollingTimeoutId) { - clearTimeout(unsyncedPollingTimeoutId); - } -} - export function startSyncing() { - startChannelFetchListener(); cleanupLocks(); // Initiate a sync immediately in case any data // is left over in the database. debouncedSyncChanges(); - // Begin polling our CHANGES_TABLE - pollUnsyncedChanges(); db.on('changes', handleChanges); } export function stopSyncing() { - stopChannelFetchListener(); debouncedSyncChanges.cancel(); - // Stop pollUnsyncedChanges - stopPollingUnsyncedChanges(); // Dexie's slightly counterintuitive method for unsubscribing from events db.on('changes').unsubscribe(handleChanges); } diff --git a/contentcuration/contentcuration/frontend/shared/i18n/index.js b/contentcuration/contentcuration/frontend/shared/i18n/index.js index 3714f22b74..7dd2a57e53 100644 --- a/contentcuration/contentcuration/frontend/shared/i18n/index.js +++ b/contentcuration/contentcuration/frontend/shared/i18n/index.js @@ -202,11 +202,13 @@ function _setUpVueIntl() { } export function updateTabTitle(title) { - let site = titleStrings.$tr(window.libraryMode ? 'catalogTitle' : 'defaultTitle'); - if (title) { - document.title = titleStrings.$tr('tabTitle', { title, site }); - } else { - document.title = site; + if (document) { + let site = titleStrings.$tr(window.libraryMode ? 'catalogTitle' : 'defaultTitle'); + if (title) { + document.title = titleStrings.$tr('tabTitle', { title, site }); + } else { + document.title = site; + } } } diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue index 8cd0005fdb..71fae6553f 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue @@ -310,14 +310,12 @@ return ( el.previousSibling && el.previousSibling.textContent && - el.previousSibling.textContent.match(/\s$/) + /\s$/.test(el.previousSibling.textContent) ); }; const hasRightwardSpace = el => { return ( - el.nextSibling && - el.nextSibling.textContent && - el.nextSibling.textContent.match(/^\s/) + el.nextSibling && el.nextSibling.textContent && /^\s/.test(el.nextSibling.textContent) ); }; if (!hasLeftwardSpace(el)) { @@ -514,14 +512,14 @@ isCustomNode(getElementAtRelativeOffset(selection, -2)) && selection.startContainer.nodeType === document.TEXT_NODE && getCharacterAtRelativeOffset(selection, -1) && - !!getCharacterAtRelativeOffset(selection, -1).match(/^\s/); + /^\s/.test(getCharacterAtRelativeOffset(selection, -1)); const spacerAndCustomElementAreRightward = selection => selection && isCustomNode(getElementAtRelativeOffset(selection, 2)) && selection.startContainer.nodeType === document.TEXT_NODE && getCharacterAtRelativeOffset(selection, 0) && - !!getCharacterAtRelativeOffset(selection, 0).match(/\s$/); + /\s$/.test(getCharacterAtRelativeOffset(selection, 0)); const moveCursor = (selection, amount) => { let { element, offset } = squire.getSelectionInfoByOffset( diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js index 7f2a55cc89..5e1725b07f 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js @@ -14,7 +14,7 @@ export default VueComponent => { // Vue instance is created vueInstanceCreatedCallback() { // by default, `contenteditable` will be false - this.setAttribute('contenteditable', !!VueComponent.contentEditable); + this.setAttribute('contenteditable', Boolean(VueComponent.contentEditable)); // a hack to prevent squire from merging custom element spans // see here: https://github.com/nhn/tui.editor/blob/master/libs/squire/source/Node.js#L92-L101 diff --git a/contentcuration/contentcuration/frontend/shared/views/ResizableNavigationDrawer.vue b/contentcuration/contentcuration/frontend/shared/views/ResizableNavigationDrawer.vue index 377debb966..897fb7b0f0 100644 --- a/contentcuration/contentcuration/frontend/shared/views/ResizableNavigationDrawer.vue +++ b/contentcuration/contentcuration/frontend/shared/views/ResizableNavigationDrawer.vue @@ -85,7 +85,7 @@ return this.temporary ? this.maxWidth : this.width; }, drawerElement() { - return this.$refs.drawer.$el; + return this.$refs.drawer && this.$refs.drawer.$el; }, isRight() { return this.$isRTL ? !this.right : this.right; @@ -96,9 +96,11 @@ this.throttledUpdateWidth = animationThrottle((...args) => updateWidth(...args)); this.$nextTick(() => { - const drawerBorder = this.drawerElement.querySelector('.v-navigation-drawer__border'); - drawerBorder.addEventListener('mousedown', this.handleMouseDown, false); - document.addEventListener('mouseup', this.handleMouseUp, false); + if (this.drawerElement) { + const drawerBorder = this.drawerElement.querySelector('.v-navigation-drawer__border'); + drawerBorder.addEventListener('mousedown', this.handleMouseDown, false); + document.addEventListener('mouseup', this.handleMouseUp, false); + } }); }, methods: { @@ -135,7 +137,9 @@ }); if (event.offsetX < 12) { - this.drawerElement.style.transition = 'initial'; + if (this.drawerElement) { + this.drawerElement.style.transition = 'initial'; + } document.addEventListener('mousemove', this.resize, false); } }, @@ -147,8 +151,9 @@ this.dragging = false; this.throttledUpdateWidth.cancel(); this.updateWidth(event.clientX); - - this.drawerElement.style.transition = ''; + if (this.drawerElement) { + this.drawerElement.style.transition = ''; + } document.body.style.cursor = ''; document.body.style.pointerEvents = 'unset'; diff --git a/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js index 919742e3fe..b61cd28b54 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js @@ -12,7 +12,6 @@ import { SharingPermissions } from 'shared/constants'; import storeFactory from 'shared/vuex/baseStore'; import client from 'shared/client'; -jest.mock('shared/client'); jest.mock('shared/vuex/connectionPlugin'); const userId = 'testId'; @@ -38,22 +37,26 @@ describe('channel actions', () => { }); describe('loadChannelList action', () => { it('should call Channel.where', () => { - const whereSpy = jest.spyOn(Channel, 'where'); + const whereSpy = jest.spyOn(Channel, 'where').mockImplementation(() => Promise.resolve([])); return store.dispatch('channel/loadChannelList').then(() => { expect(whereSpy).toHaveBeenCalledWith({}); whereSpy.mockRestore(); }); }); it('should call Channel.where with a specific listType', () => { - const whereSpy = jest.spyOn(Channel, 'where'); + const whereSpy = jest.spyOn(Channel, 'where').mockImplementation(() => Promise.resolve([])); return store.dispatch('channel/loadChannelList', { listType: 'edit' }).then(() => { expect(whereSpy).toHaveBeenCalledWith({ edit: true }); whereSpy.mockRestore(); }); }); it('should set the returned data to the channels', () => { + const whereSpy = jest + .spyOn(Channel, 'where') + .mockImplementation(() => Promise.resolve([channelDatum])); return store.dispatch('channel/loadChannelList').then(() => { expect(store.getters['channel/channels']).toEqual([channelDatum]); + whereSpy.mockRestore(); }); }); }); @@ -69,15 +72,21 @@ describe('channel actions', () => { }); }); it('should call Channel.get if user is logged in', () => { - const getSpy = jest.spyOn(Channel, 'get'); + const getSpy = jest + .spyOn(Channel, 'get') + .mockImplementation(() => Promise.resolve(channelDatum)); return store.dispatch('channel/loadChannel', id).then(() => { expect(getSpy).toHaveBeenCalledWith(id); getSpy.mockRestore(); }); }); it('should set the returned data to the channels', () => { + const getSpy = jest + .spyOn(Channel, 'get') + .mockImplementation(() => Promise.resolve(channelDatum)); return store.dispatch('channel/loadChannel', id).then(() => { expect(store.getters['channel/channels']).toEqual([channelDatum]); + getSpy.mockRestore(); }); }); }); @@ -85,7 +94,6 @@ describe('channel actions', () => { it('should call client.get on get_channel_details', () => { return store.dispatch('channel/loadChannelDetails', id).then(() => { expect(client.get).toHaveBeenCalledWith('get_channel_details'); - client.get.mockRestore(); }); }); }); @@ -237,6 +245,20 @@ describe('Channel sharing vuex', () => { }; beforeEach(() => { + jest.spyOn(Channel, 'fetchModel').mockImplementation(() => Promise.resolve(channelDatum)); + jest + .spyOn(Channel, 'fetchCollection') + .mockImplementation(() => Promise.resolve([channelDatum])); + jest + .spyOn(Invitation, 'fetchModel') + .mockImplementation(() => Promise.resolve(makeInvitations(channelId)[0])); + jest + .spyOn(Invitation, 'fetchCollection') + .mockImplementation(() => Promise.resolve(makeInvitations(channelId))); + jest.spyOn(ChannelUser, 'fetchModel').mockImplementation(() => Promise.resolve(testUser)); + jest + .spyOn(ChannelUser, 'fetchCollection') + .mockImplementation(() => Promise.resolve([testUser])); return Channel.put(channelDatum).then(newId => { channelId = newId; const user = { @@ -263,6 +285,7 @@ describe('Channel sharing vuex', () => { }); }); afterEach(() => { + jest.restoreAllMocks(); return Promise.all([ Channel.table.toCollection().delete(), ViewerM2M.table.toCollection().delete(), @@ -280,8 +303,6 @@ describe('Channel sharing vuex', () => { }); it('getChannelInvitations should return pending invitations with the given permission', () => { const getter = store.getters['channel/getChannelInvitations']; - console.log(store.state.channel.invitationsMap); - console.log(getter(channelId, SharingPermissions.EDIT)); expect(getter(channelId, SharingPermissions.EDIT)[0]).toEqual({ ...testInvitations[0], channel: channelId, diff --git a/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js b/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js index 71885ec783..eabd5059a8 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js @@ -248,7 +248,7 @@ export async function sendInvitation(context, { channelId, email, shareMode }) { await Invitation.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { return Invitation.table.put(postedInvitation.data); }); - return await context.commit('ADD_INVITATION', postedInvitation.data); + context.commit('ADD_INVITATION', postedInvitation.data); } export function deleteInvitation(context, invitationId) { diff --git a/contentcuration/contentcuration/frontend/shared/vuex/connectionPlugin/index.js b/contentcuration/contentcuration/frontend/shared/vuex/connectionPlugin/index.js index 0b43834a76..7a098fe9d4 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/connectionPlugin/index.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/connectionPlugin/index.js @@ -29,5 +29,5 @@ const ConnectionPlugin = store => { export default ConnectionPlugin; function isNetworkError(err) { - return !!err.isAxiosError && !err.response; + return Boolean(err.isAxiosError) && !err.response; } diff --git a/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js index 353a54e5fa..d9b74e6823 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js @@ -2,7 +2,6 @@ import storeFactory from 'shared/vuex/baseStore'; import { File } from 'shared/data/resources'; import client from 'shared/client'; -jest.mock('shared/client'); jest.mock('shared/vuex/connectionPlugin'); const contentnode = 'testnode'; @@ -23,6 +22,8 @@ describe('file store', () => { let store; let id; beforeEach(() => { + jest.spyOn(File, 'fetchCollection').mockImplementation(() => Promise.resolve([testFile])); + jest.spyOn(File, 'fetchModel').mockImplementation(() => Promise.resolve(testFile)); return File.put(testFile).then(newId => { id = newId; store = storeFactory(); @@ -31,6 +32,7 @@ describe('file store', () => { }); }); afterEach(() => { + jest.restoreAllMocks(); return File.table.toCollection().delete(); }); describe('file getters', () => { diff --git a/contentcuration/contentcuration/management/commands/garbage_collect.py b/contentcuration/contentcuration/management/commands/garbage_collect.py index 3c7f473183..39bca5c3c5 100644 --- a/contentcuration/contentcuration/management/commands/garbage_collect.py +++ b/contentcuration/contentcuration/management/commands/garbage_collect.py @@ -4,11 +4,18 @@ tree" (i.e. `settings.ORPHANAGE_ROOT_ID`). Also delete the associated Files in the database and in object storage. """ +import logging as logmodule + from django.core.management.base import BaseCommand from contentcuration.utils.garbage_collect import clean_up_contentnodes from contentcuration.utils.garbage_collect import clean_up_deleted_chefs from contentcuration.utils.garbage_collect import clean_up_feature_flags +from contentcuration.utils.garbage_collect import clean_up_tasks + + +logmodule.basicConfig(level=logmodule.INFO) +logging = logmodule.getLogger('command') class Command(BaseCommand): @@ -20,6 +27,11 @@ def handle(self, *args, **options): # clean up contentnodes, files and file objects on storage that are associated # with the orphan tree + logging.info("Cleaning up contentnodes from the orphan tree") clean_up_contentnodes() + logging.info("Cleaning up deleted chef nodes") clean_up_deleted_chefs() + logging.info("Cleaning up feature flags") clean_up_feature_flags() + logging.info("Cleaning up tasks") + clean_up_tasks() diff --git a/contentcuration/contentcuration/management/commands/mark_incomplete.py b/contentcuration/contentcuration/management/commands/mark_incomplete.py index f0b2962f86..95b4702834 100644 --- a/contentcuration/contentcuration/management/commands/mark_incomplete.py +++ b/contentcuration/contentcuration/management/commands/mark_incomplete.py @@ -25,7 +25,7 @@ def handle(self, *args, **options): # Mark invalid titles titlestart = time.time() logging.info('Marking blank titles...') - count = ContentNode.objects.exclude(complete=False).filter(title='').order_by().update(complete=False) + count = ContentNode.objects.exclude(complete=False).filter(title='', parent__isnull=False).order_by().update(complete=False) logging.info('Marked {} invalid titles (finished in {})'.format(count, time.time() - titlestart)) # Mark invalid licenses diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 3b718da5b5..ea8fae573b 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -606,8 +606,7 @@ def generate_new_token(cls): token = proquint.generate() if SecretToken.exists(token): continue - else: - break + break # after TRIALS attempts and we didn't get a unique token, # just raise an error. # See https://stackoverflow.com/a/9980160 on what for-else loop does. @@ -854,7 +853,8 @@ def on_create(self): ) # Ensure that locust or unit tests raise if there are any concurrency issues with tree ids. if settings.DEBUG: - assert ContentNode.objects.filter(parent=None, tree_id=self.main_tree.tree_id).count() == 1 + if ContentNode.objects.filter(parent=None, tree_id=self.main_tree.tree_id).count() != 1: + raise AssertionError if not self.trash_tree: self.trash_tree = ContentNode.objects.create( @@ -2124,8 +2124,8 @@ def clean(self, *args, **kwargs): if self.target_node == self.prerequisite: raise IntegrityError('Cannot self reference as prerequisite.') # immediate cyclic exception - elif PrerequisiteContentRelationship.objects.using(self._state.db) \ - .filter(target_node=self.prerequisite, prerequisite=self.target_node): + if PrerequisiteContentRelationship.objects.using(self._state.db) \ + .filter(target_node=self.prerequisite, prerequisite=self.target_node): raise IntegrityError( 'Note: Prerequisite relationship is directional! %s and %s cannot be prerequisite of each other!' % (self.target_node, self.prerequisite)) @@ -2157,8 +2157,8 @@ def save(self, *args, **kwargs): if self.contentnode_1 == self.contentnode_2: raise IntegrityError('Cannot self reference as related.') # handle immediate cyclic - elif RelatedContentRelationship.objects.using(self._state.db) \ - .filter(contentnode_1=self.contentnode_2, contentnode_2=self.contentnode_1): + if RelatedContentRelationship.objects.using(self._state.db) \ + .filter(contentnode_1=self.contentnode_2, contentnode_2=self.contentnode_1): return # silently cancel the save super(RelatedContentRelationship, self).save(*args, **kwargs) diff --git a/contentcuration/contentcuration/node_metadata/query.py b/contentcuration/contentcuration/node_metadata/query.py index e22d80683f..95b6d9f87f 100644 --- a/contentcuration/contentcuration/node_metadata/query.py +++ b/contentcuration/contentcuration/node_metadata/query.py @@ -59,7 +59,7 @@ def get(self, node_pk): for row in query: self.metadata.update({row.pop('id'): row}) - return self.metadata.get(node_pk, None) + return self.metadata.get(node_pk) def build(self): """ diff --git a/contentcuration/contentcuration/perftools/objective.py b/contentcuration/contentcuration/perftools/objective.py index f54aac085d..8777e5a089 100644 --- a/contentcuration/contentcuration/perftools/objective.py +++ b/contentcuration/contentcuration/perftools/objective.py @@ -61,7 +61,8 @@ def create_content_nodes(self, num_nodes=100): parent = node elapsed = time.time() - start - assert ContentNode.objects.count() == current_nodes + num_nodes + if ContentNode.objects.count() != current_nodes + num_nodes: + raise AssertionError return elapsed def create_files(self, num_files=100): @@ -78,7 +79,8 @@ def create_files(self, num_files=100): file_obj = File.objects.create() elapsed = time.time() - start - assert File.objects.count() == current_files + num_files + if File.objects.count() != current_files + num_files: + raise AssertionError return elapsed def get_object_creation_stats(self, object_type, num_objects=100, num_runs=10): diff --git a/contentcuration/contentcuration/settings.py b/contentcuration/contentcuration/settings.py index ec5b181457..134b8d907b 100644 --- a/contentcuration/contentcuration/settings.py +++ b/contentcuration/contentcuration/settings.py @@ -14,11 +14,11 @@ import os import re import sys -from datetime import datetime from datetime import timedelta from tempfile import gettempdir import pycountry +from django.utils.timezone import now from contentcuration.utils.incidents import INCIDENTS from contentcuration.utils.secretmanagement import get_secret @@ -364,7 +364,7 @@ def gettext(s): # When cleaning up orphan nodes, only clean up any that have been last modified # since this date # our default threshold is two weeks ago -TWO_WEEKS_AGO = datetime.now() - timedelta(days=14) +TWO_WEEKS_AGO = now() - timedelta(days=14) ORPHAN_DATE_CLEAN_UP_THRESHOLD = TWO_WEEKS_AGO # CLOUD STORAGE SETTINGS diff --git a/contentcuration/contentcuration/tasks.py b/contentcuration/contentcuration/tasks.py index e603aaedf3..e6dbe1530c 100644 --- a/contentcuration/contentcuration/tasks.py +++ b/contentcuration/contentcuration/tasks.py @@ -219,9 +219,12 @@ class CustomEmailMessage(EmailMessage): bytes when it comes to encoding the attachment as base64 """ def attach(self, filename=None, content=None, mimetype=None): - assert filename is not None - assert content is not None - assert mimetype is not None + if filename is None: + raise AssertionError + if content is None: + raise AssertionError + if mimetype is None: + raise AssertionError self.attachments.append((filename, content, mimetype)) @@ -329,7 +332,7 @@ def get_or_create_async_task(task_name, user, **task_args): qs = Task.objects.filter( task_type=task_name, status__in=[STATE_QUEUED, states.PENDING, states.RECEIVED, states.STARTED], - channel_id=task_args.get("channel_id", None), + channel_id=task_args.get("channel_id"), metadata={"args": task_args}, ) @@ -373,7 +376,7 @@ def create_async_task(task_name, user, apply_async=True, **task_args): task_type=task_name, status=STATE_QUEUED, user=user, - channel_id=task_args.get("channel_id", None), + channel_id=task_args.get("channel_id"), metadata={"args": task_args}, ) task_sig = async_task.signature( diff --git a/contentcuration/contentcuration/tests/test_chef_pipeline.py b/contentcuration/contentcuration/tests/test_chef_pipeline.py index 9e93512770..3a0e0ea0c9 100644 --- a/contentcuration/contentcuration/tests/test_chef_pipeline.py +++ b/contentcuration/contentcuration/tests/test_chef_pipeline.py @@ -88,7 +88,7 @@ def test_authenticate_user_internal(self): response = self.post(self.authenticate_user_internal_url, None) assert response.status_code == 200 data = json.loads(response.content) - assert data["success"] + assert data["success"] is True assert data["username"] == user().email def test_check_version_bad_request(self): diff --git a/contentcuration/contentcuration/tests/test_createchannel.py b/contentcuration/contentcuration/tests/test_createchannel.py index 693d181ee2..854cb21276 100644 --- a/contentcuration/contentcuration/tests/test_createchannel.py +++ b/contentcuration/contentcuration/tests/test_createchannel.py @@ -10,7 +10,6 @@ from .base import BaseTestCase from .testdata import create_studio_file from contentcuration import models -from contentcuration import models as cc ### # Test helper functions @@ -60,8 +59,8 @@ def setUpClass(cls): def setUp(self): super(CreateChannelTestCase, self).setUp() - self.topic = cc.ContentKind.objects.get(kind="topic") - self.license = cc.License.objects.all()[0] + self.topic = models.ContentKind.objects.get(kind="topic") + self.license = models.License.objects.all()[0] self.fileinfo_audio = create_studio_file("abc", preset='audio', ext='mp3') self.fileinfo_video = create_studio_file("def", preset='high_res_video', ext='mp4') self.fileinfo_video_webm = create_studio_file("ghi", preset='high_res_video', ext='webm') diff --git a/contentcuration/contentcuration/tests/test_exportchannel.py b/contentcuration/contentcuration/tests/test_exportchannel.py index ecb3062e38..2184030af6 100644 --- a/contentcuration/contentcuration/tests/test_exportchannel.py +++ b/contentcuration/contentcuration/tests/test_exportchannel.py @@ -200,7 +200,7 @@ def test_convert_channel_thumbnail_encoding_valid(self): def test_convert_channel_thumbnail_encoding_invalid(self): with patch("contentcuration.utils.publish.get_thumbnail_encoding", return_value="this is a test"): channel = cc.Channel.objects.create(thumbnail="/content/kolibri_flapping_bird.png", thumbnail_encoding={}) - self.assertEquals("this is a test", convert_channel_thumbnail(channel)) + self.assertEqual("this is a test", convert_channel_thumbnail(channel)) def test_create_slideshow_manifest(self): content_channel = cc.Channel.objects.create() diff --git a/contentcuration/contentcuration/tests/test_garbage_collect.py b/contentcuration/contentcuration/tests/test_garbage_collect.py deleted file mode 100755 index 1c6f8694fc..0000000000 --- a/contentcuration/contentcuration/tests/test_garbage_collect.py +++ /dev/null @@ -1,215 +0,0 @@ -#!/usr/bin/env python -from __future__ import absolute_import - -from builtins import range -from datetime import datetime -from datetime import timedelta - -import requests -from django.conf import settings -from django.core.files.base import ContentFile -from django.core.files.storage import default_storage -from le_utils.constants import content_kinds - -from .base import BaseTestCase -from .base import StudioTestCase -from contentcuration.models import ContentNode -from contentcuration.models import File -from contentcuration.utils.garbage_collect import clean_up_contentnodes -from contentcuration.utils.garbage_collect import clean_up_feature_flags - - -THREE_MONTHS_AGO = datetime.now() - timedelta(days=93) - - -def _create_expired_contentnode(creation_date=THREE_MONTHS_AGO): - c = ContentNode.objects.create( - kind_id=content_kinds.TOPIC, - title="test", - modified=creation_date, - created=creation_date, - parent_id=settings.ORPHANAGE_ROOT_ID, - ) - # Use q queryset.update() to bypass auto_now's forced setting of - # created to now() - ContentNode.objects.filter(pk=c.pk).update( - created=creation_date, - modified=creation_date, - ) - return c - - -class CleanUpContentNodesTestCase(StudioTestCase): - - def test_delete_all_contentnodes_in_orphanage_tree(self): - """ - Make sure that by default, all nodes created with a timestamp of 3 months - ago doesn't exist anymore. - """ - - # create our contentnodes that will go under our garbage tree - num_contentnodes = 3 - for _ in range(num_contentnodes): - _create_expired_contentnode() - - garbage_tree = ContentNode.objects.get(pk=settings.ORPHANAGE_ROOT_ID) - - # sanity check to see if we have X contentnodes under the garbage tree - assert garbage_tree.get_descendant_count() == num_contentnodes - - # now clean up our contentnodes, and check that our descendant count is indeed 0 now - clean_up_contentnodes() - garbage_tree.refresh_from_db() - assert garbage_tree.get_descendant_count() == 0 - - def test_deletes_associated_files(self): - c = _create_expired_contentnode() - f = File.objects.create( - contentnode_id=c.pk, - file_on_disk=ContentFile(b"test"), - checksum="aaa", - ) - - f.file_on_disk.save("aaa.jpg", ContentFile("aaa")) - file_url = f.file_on_disk.url - - # check that file_url exists before cleaning up - requests.head(file_url).raise_for_status() - clean_up_contentnodes() - - # there should be no file object in the DB - assert File.objects.count() == 0 - - def test_doesnt_delete_shared_files(self): - """ - Make sure that a file shared between two file objects doesn't - get deleted when one of the file objects gets deleted - """ - c = _create_expired_contentnode() - file_on_disk = ContentFile(b"test") - f = File.objects.create( - contentnode_id=c.pk, - file_on_disk=file_on_disk, - checksum="aaa", - ) - f.file_on_disk.save("aaa.jpg", file_on_disk) - file_url = f.file_on_disk.url - - c2 = ContentNode.objects.create(kind_id=content_kinds.TOPIC, title="test") - f2 = File.objects.create( - contentnode_id=c2.pk, - file_on_disk=file_on_disk, - checksum="aaa", - ) - f2.file_on_disk.save("aaa.jpg", file_on_disk) - - # check that file_url exists before cleaning up - requests.head(file_url).raise_for_status() - clean_up_contentnodes() - - # the file should still be available - response = requests.head(file_url) - assert response.status_code == 200 - - def test_doesnt_delete_nonorphan_files_and_contentnodes(self): - """ - Make sure that clean_up_contentnodes doesn't touch non-orphan files and - contentnodes. Bad things will happen if we do. - """ - # this legit tree, since it's not attached to our - # orphan tree, should still exist after cleanup - legit_tree = ContentNode.objects.create( - kind_id=content_kinds.TOPIC, - ) - # this file should still be here too since we attach - # it to our legit tree - f = File.objects.create( - contentnode=legit_tree, - ) - - # this node should be gone - expired_node = _create_expired_contentnode() - - # do our cleanup! - clean_up_contentnodes() - - # assert that out expired node doesn't exist - assert not ContentNode.objects.filter(pk=expired_node.pk).exists() - - # assert that our legit tree still exists - assert ContentNode.objects.filter(pk=legit_tree.pk).exists() - assert File.objects.filter(pk=f.pk).exists() - - def test_doesnt_delete_old_legit_tree(self): - """ - Make sure we don't delete an old content tree, as long as it's not under the - orphan tree. - """ - - # orphan node. This shouldn't exist anymore at the end of our test. - orphan_node = _create_expired_contentnode() - - # our old, but not orphaned tree. This should exist at the end of our test. - legit_node = ContentNode.objects.create( - kind_id=content_kinds.TOPIC, - ) - # mark the legit_node as old - ContentNode.objects.filter(pk=legit_node.pk).update( - created=THREE_MONTHS_AGO, - modified=THREE_MONTHS_AGO, - ) - - clean_up_contentnodes() - - # is our orphan gone? :( - assert not ContentNode.objects.filter(pk=orphan_node.pk).exists() - # is our senior, legit node still around? :) - assert ContentNode.objects.filter(pk=legit_node.pk).exists() - - def test_doesnt_delete_file_referenced_by_orphan_and_nonorphan_nodes(self): - """ - Make sure we don't delete a file, as long as it's referenced - by a non-orphan node. - """ - - # Our orphan, to be taken soon from this world - orphan_node = _create_expired_contentnode() - - # our legit node, standing proud and high with its non-orphaned status - legit_node = ContentNode.objects.create( - kind_id=content_kinds.VIDEO, - ) - - f = File.objects.create( - contentnode=legit_node, - checksum="aaa", - ) - forphan = File.objects.create( - contentnode=orphan_node, - checksum="aaa", - ) - - # The file they both share. This has the same checksum and contents. - # Alas, a file cannot have an orphan and non-orphan reference. This must - # not be deleted. - f.file_on_disk.save("aaa.jpg", ContentFile("aaa")) - forphan.file_on_disk.save("aaa.jpg", ContentFile("aaa")) - - # check that our file exists in object storage - assert default_storage.exists("storage/a/a/aaa.jpg") - - clean_up_contentnodes() - - assert default_storage.exists("storage/a/a/aaa.jpg") - - -class CleanUpFeatureFlagsTestCase(BaseTestCase): - def test_clean_up(self): - key = "feature_flag_does_not_exist" - self.user.feature_flags = { - key: True - } - self.user.save() - clean_up_feature_flags() - self.user.refresh_from_db() - self.assertNotIn(key, self.user.feature_flags) diff --git a/contentcuration/contentcuration/tests/test_garbage_collection.py b/contentcuration/contentcuration/tests/test_garbage_collection.py deleted file mode 100644 index e8fc482fab..0000000000 --- a/contentcuration/contentcuration/tests/test_garbage_collection.py +++ /dev/null @@ -1,156 +0,0 @@ -from __future__ import absolute_import - -import json - -import pytest -from django.conf import settings -from django.urls import reverse_lazy -from le_utils.constants import content_kinds - -from .base import BaseAPITestCase -from .testdata import tree -from contentcuration import models as cc -from contentcuration.api import activate_channel -from contentcuration.utils.garbage_collect import clean_up_deleted_chefs -from contentcuration.utils.garbage_collect import get_deleted_chefs_root -from contentcuration.views.internal import api_commit_channel -from contentcuration.views.internal import create_channel - -pytestmark = pytest.mark.django_db - - -class NodeSettingTestCase(BaseAPITestCase): - def setUp(self): - super(NodeSettingTestCase, self).setUp() - # Set up ricecooker trees - self.channel.staging_tree = cc.ContentNode( - kind_id=content_kinds.TOPIC, title="test", node_id="aaa" - ) - self.channel.staging_tree.save() - self.channel.previous_tree = cc.ContentNode( - kind_id=content_kinds.TOPIC, title="test", node_id="bbb" - ) - self.channel.previous_tree.save() - self.channel.chef_tree = cc.ContentNode( - kind_id=content_kinds.TOPIC, title="test", node_id="ccc" - ) - self.channel.chef_tree.save() - self.channel.save() - - self.contentnode = cc.ContentNode.objects.create(kind_id="video") - - def test_garbage_node_created(self): - # Make sure loadconstants created the garbage node - self.assertTrue( - cc.ContentNode.objects.filter(pk=settings.ORPHANAGE_ROOT_ID).exists() - ) - - def test_file_move(self): - node = self.contentnode - - # Move node and check if it's still in the garbage tree - node.parent_id = self.channel.main_tree.pk - node.save() - garbage_node = cc.ContentNode.objects.get(pk=settings.ORPHANAGE_ROOT_ID) - - # Node shouldn't be in garbage tree - self.assertFalse(garbage_node.get_descendants().filter(pk=node.pk).exists()) - self.assertNotEqual(garbage_node.tree_id, node.tree_id) - - def test_old_chef_tree(self): - # make an actual tree for deletion tests - tree(parent=self.channel.chef_tree) - chef_tree = self.channel.chef_tree - self.assertTrue(chef_tree.get_descendant_count() > 0) - garbage_node = get_deleted_chefs_root() - - self.assertNotEqual(chef_tree, self.channel.staging_tree) - # Chef tree shouldn't be in garbage tree until create_channel is called - self.assertFalse( - garbage_node.get_descendants().filter(pk=chef_tree.pk).exists() - ) - create_channel(self.channel.__dict__, self.user) - garbage_node.refresh_from_db() - chef_tree.refresh_from_db() - self.channel.refresh_from_db() - - # We can't use MPTT methods to test the deleted chefs tree because we are not running the sort code - # for performance reasons, so just do a parent test instead. - self.assertEquals(chef_tree.parent.pk, garbage_node.pk) - - # New staging tree should not be in garbage tree - self.assertFalse(self.channel.chef_tree.parent) - self.assertNotEqual(garbage_node.tree_id, self.channel.chef_tree.tree_id) - - child_pk = chef_tree.children.first().pk - - clean_up_deleted_chefs() - - self.assertFalse(cc.ContentNode.objects.filter(parent=garbage_node).exists()) - self.assertFalse(cc.ContentNode.objects.filter(pk=child_pk).exists()) - - def test_old_staging_tree(self): - staging_tree = self.channel.staging_tree - garbage_node = get_deleted_chefs_root() - - tree(parent=staging_tree) - self.assertTrue(staging_tree.get_descendant_count() > 0) - - # Staging tree shouldn't be in garbage tree until api_commit_channel is called - self.assertFalse( - garbage_node.get_descendants().filter(pk=staging_tree.pk).exists() - ) - request = self.create_post_request( - reverse_lazy("api_finish_channel"), - data=json.dumps({"channel_id": self.channel.pk}), - content_type="application/json", - ) - response = api_commit_channel(request) - self.assertEqual(response.status_code, 200) - garbage_node.refresh_from_db() - staging_tree.refresh_from_db() - self.channel.refresh_from_db() - - # We can't use MPTT methods on the deleted chefs tree because we are not running the sort code - # for performance reasons, so just do a parent test instead. - self.assertEqual(staging_tree.parent, garbage_node) - - # New staging tree should not be in garbage tree - self.assertFalse(self.channel.main_tree.parent) - self.assertNotEqual(garbage_node.tree_id, self.channel.main_tree.tree_id) - - child_pk = staging_tree.children.first().pk - - clean_up_deleted_chefs() - - self.assertFalse(cc.ContentNode.objects.filter(parent=garbage_node).exists()) - self.assertFalse(cc.ContentNode.objects.filter(pk=child_pk).exists()) - - def test_activate_channel(self): - previous_tree = self.channel.previous_tree - tree(parent=previous_tree) - garbage_node = get_deleted_chefs_root() - - # Previous tree shouldn't be in garbage tree until activate_channel is called - self.assertFalse( - garbage_node.get_descendants().filter(pk=previous_tree.pk).exists() - ) - activate_channel(self.channel, self.user) - garbage_node.refresh_from_db() - previous_tree.refresh_from_db() - self.channel.refresh_from_db() - - # We can't use MPTT methods on the deleted chefs tree because we are not running the sort code - # for performance reasons, so just do a parent test instead. - self.assertTrue(previous_tree.parent == garbage_node) - - # New previous tree should not be in garbage tree - self.assertFalse(self.channel.previous_tree.parent) - self.assertNotEqual(garbage_node.tree_id, self.channel.previous_tree.tree_id) - - child_pk = previous_tree.children.first().pk - - clean_up_deleted_chefs() - - self.assertFalse(cc.ContentNode.objects.filter(parent=garbage_node).exists()) - self.assertFalse(cc.ContentNode.objects.filter(pk=child_pk).exists()) diff --git a/contentcuration/contentcuration/tests/test_utils.py b/contentcuration/contentcuration/tests/test_utils.py index 66b4286973..a07fe025d6 100644 --- a/contentcuration/contentcuration/tests/test_utils.py +++ b/contentcuration/contentcuration/tests/test_utils.py @@ -92,7 +92,6 @@ def test_guess_format_from_extension(self): Make sure that we can guess file types listed in le_utils.file_formats.choices. Note: if this test fails, it's likely because le_utils file formats aren't synced. """ - from le_utils.constants import file_formats known_extensions = list(dict(file_formats.choices).keys()) for ext in known_extensions: diff --git a/contentcuration/contentcuration/tests/utils/test_garbage_collect.py b/contentcuration/contentcuration/tests/utils/test_garbage_collect.py new file mode 100644 index 0000000000..1cc14460b9 --- /dev/null +++ b/contentcuration/contentcuration/tests/utils/test_garbage_collect.py @@ -0,0 +1,388 @@ +import json +from datetime import datetime +from datetime import timedelta + +import pytest +import requests +from celery import states +from django.conf import settings +from django.core.files.base import ContentFile +from django.core.files.storage import default_storage +from django.urls import reverse_lazy +from le_utils.constants import content_kinds + +from contentcuration import models as cc +from contentcuration.api import activate_channel +from contentcuration.models import ContentNode +from contentcuration.models import File +from contentcuration.models import Task +from contentcuration.tests.base import BaseAPITestCase +from contentcuration.tests.base import BaseTestCase +from contentcuration.tests.base import StudioTestCase +from contentcuration.tests.testdata import tree +from contentcuration.utils.garbage_collect import clean_up_contentnodes +from contentcuration.utils.garbage_collect import clean_up_deleted_chefs +from contentcuration.utils.garbage_collect import clean_up_feature_flags +from contentcuration.utils.garbage_collect import clean_up_tasks +from contentcuration.utils.garbage_collect import get_deleted_chefs_root +from contentcuration.views.internal import api_commit_channel +from contentcuration.views.internal import create_channel + +pytestmark = pytest.mark.django_db + + +class NodeSettingTestCase(BaseAPITestCase): + def setUp(self): + super(NodeSettingTestCase, self).setUp() + # Set up ricecooker trees + self.channel.staging_tree = cc.ContentNode( + kind_id=content_kinds.TOPIC, title="test", node_id="aaa" + ) + self.channel.staging_tree.save() + self.channel.previous_tree = cc.ContentNode( + kind_id=content_kinds.TOPIC, title="test", node_id="bbb" + ) + self.channel.previous_tree.save() + self.channel.chef_tree = cc.ContentNode( + kind_id=content_kinds.TOPIC, title="test", node_id="ccc" + ) + self.channel.chef_tree.save() + self.channel.save() + + self.contentnode = cc.ContentNode.objects.create(kind_id="video") + + def test_garbage_node_created(self): + # Make sure loadconstants created the garbage node + self.assertTrue( + cc.ContentNode.objects.filter(pk=settings.ORPHANAGE_ROOT_ID).exists() + ) + + def test_file_move(self): + node = self.contentnode + + # Move node and check if it's still in the garbage tree + node.parent_id = self.channel.main_tree.pk + node.save() + garbage_node = cc.ContentNode.objects.get(pk=settings.ORPHANAGE_ROOT_ID) + + # Node shouldn't be in garbage tree + self.assertFalse(garbage_node.get_descendants().filter(pk=node.pk).exists()) + self.assertNotEqual(garbage_node.tree_id, node.tree_id) + + def test_old_chef_tree(self): + # make an actual tree for deletion tests + tree(parent=self.channel.chef_tree) + chef_tree = self.channel.chef_tree + self.assertTrue(chef_tree.get_descendant_count() > 0) + garbage_node = get_deleted_chefs_root() + + self.assertNotEqual(chef_tree, self.channel.staging_tree) + # Chef tree shouldn't be in garbage tree until create_channel is called + self.assertFalse( + garbage_node.get_descendants().filter(pk=chef_tree.pk).exists() + ) + create_channel(self.channel.__dict__, self.user) + garbage_node.refresh_from_db() + chef_tree.refresh_from_db() + self.channel.refresh_from_db() + + # We can't use MPTT methods to test the deleted chefs tree because we are not running the sort code + # for performance reasons, so just do a parent test instead. + self.assertEquals(chef_tree.parent.pk, garbage_node.pk) + + # New staging tree should not be in garbage tree + self.assertFalse(self.channel.chef_tree.parent) + self.assertNotEqual(garbage_node.tree_id, self.channel.chef_tree.tree_id) + + child_pk = chef_tree.children.first().pk + + clean_up_deleted_chefs() + + self.assertFalse(cc.ContentNode.objects.filter(parent=garbage_node).exists()) + self.assertFalse(cc.ContentNode.objects.filter(pk=child_pk).exists()) + + def test_old_staging_tree(self): + staging_tree = self.channel.staging_tree + garbage_node = get_deleted_chefs_root() + + tree(parent=staging_tree) + self.assertTrue(staging_tree.get_descendant_count() > 0) + + # Staging tree shouldn't be in garbage tree until api_commit_channel is called + self.assertFalse( + garbage_node.get_descendants().filter(pk=staging_tree.pk).exists() + ) + request = self.create_post_request( + reverse_lazy("api_finish_channel"), + data=json.dumps({"channel_id": self.channel.pk}), + content_type="application/json", + ) + response = api_commit_channel(request) + self.assertEqual(response.status_code, 200) + garbage_node.refresh_from_db() + staging_tree.refresh_from_db() + self.channel.refresh_from_db() + + # We can't use MPTT methods on the deleted chefs tree because we are not running the sort code + # for performance reasons, so just do a parent test instead. + self.assertEqual(staging_tree.parent, garbage_node) + + # New staging tree should not be in garbage tree + self.assertFalse(self.channel.main_tree.parent) + self.assertNotEqual(garbage_node.tree_id, self.channel.main_tree.tree_id) + + child_pk = staging_tree.children.first().pk + + clean_up_deleted_chefs() + + self.assertFalse(cc.ContentNode.objects.filter(parent=garbage_node).exists()) + self.assertFalse(cc.ContentNode.objects.filter(pk=child_pk).exists()) + + def test_activate_channel(self): + previous_tree = self.channel.previous_tree + tree(parent=previous_tree) + garbage_node = get_deleted_chefs_root() + + # Previous tree shouldn't be in garbage tree until activate_channel is called + self.assertFalse( + garbage_node.get_descendants().filter(pk=previous_tree.pk).exists() + ) + activate_channel(self.channel, self.user) + garbage_node.refresh_from_db() + previous_tree.refresh_from_db() + self.channel.refresh_from_db() + + # We can't use MPTT methods on the deleted chefs tree because we are not running the sort code + # for performance reasons, so just do a parent test instead. + self.assertTrue(previous_tree.parent == garbage_node) + + # New previous tree should not be in garbage tree + self.assertFalse(self.channel.previous_tree.parent) + self.assertNotEqual(garbage_node.tree_id, self.channel.previous_tree.tree_id) + + child_pk = previous_tree.children.first().pk + + clean_up_deleted_chefs() + + self.assertFalse(cc.ContentNode.objects.filter(parent=garbage_node).exists()) + self.assertFalse(cc.ContentNode.objects.filter(pk=child_pk).exists()) + + +THREE_MONTHS_AGO = datetime.now() - timedelta(days=93) + + +def _create_expired_contentnode(creation_date=THREE_MONTHS_AGO): + c = ContentNode.objects.create( + kind_id=content_kinds.TOPIC, + title="test", + modified=creation_date, + created=creation_date, + parent_id=settings.ORPHANAGE_ROOT_ID, + ) + # Use q queryset.update() to bypass auto_now's forced setting of + # created to now() + ContentNode.objects.filter(pk=c.pk).update( + created=creation_date, + modified=creation_date, + ) + return c + + +class CleanUpContentNodesTestCase(StudioTestCase): + + def test_delete_all_contentnodes_in_orphanage_tree(self): + """ + Make sure that by default, all nodes created with a timestamp of 3 months + ago doesn't exist anymore. + """ + + # create our contentnodes that will go under our garbage tree + num_contentnodes = 3 + for _ in range(num_contentnodes): + _create_expired_contentnode() + + # sanity check to see if we have X contentnodes under the garbage tree + assert ContentNode.objects.filter(parent_id=settings.ORPHANAGE_ROOT_ID).count() == num_contentnodes + + # now clean up our contentnodes, and check that our descendant count is indeed 0 now + clean_up_contentnodes() + assert ContentNode.objects.filter(parent_id=settings.ORPHANAGE_ROOT_ID).count() == 0 + + def test_deletes_associated_files(self): + c = _create_expired_contentnode() + f = File.objects.create( + contentnode_id=c.pk, + file_on_disk=ContentFile(b"test"), + checksum="aaa", + ) + + f.file_on_disk.save("aaa.jpg", ContentFile("aaa")) + file_url = f.file_on_disk.url + + # check that file_url exists before cleaning up + requests.head(file_url).raise_for_status() + clean_up_contentnodes() + + # there should be no file object in the DB + assert File.objects.count() == 0 + + def test_doesnt_delete_shared_files(self): + """ + Make sure that a file shared between two file objects doesn't + get deleted when one of the file objects gets deleted + """ + c = _create_expired_contentnode() + file_on_disk = ContentFile(b"test") + f = File.objects.create( + contentnode_id=c.pk, + file_on_disk=file_on_disk, + checksum="aaa", + ) + f.file_on_disk.save("aaa.jpg", file_on_disk) + file_url = f.file_on_disk.url + + c2 = ContentNode.objects.create(kind_id=content_kinds.TOPIC, title="test") + f2 = File.objects.create( + contentnode_id=c2.pk, + file_on_disk=file_on_disk, + checksum="aaa", + ) + f2.file_on_disk.save("aaa.jpg", file_on_disk) + + # check that file_url exists before cleaning up + requests.head(file_url).raise_for_status() + clean_up_contentnodes() + + # the file should still be available + response = requests.head(file_url) + assert response.status_code == 200 + + def test_doesnt_delete_nonorphan_files_and_contentnodes(self): + """ + Make sure that clean_up_contentnodes doesn't touch non-orphan files and + contentnodes. Bad things will happen if we do. + """ + # this legit tree, since it's not attached to our + # orphan tree, should still exist after cleanup + legit_tree = ContentNode.objects.create( + kind_id=content_kinds.TOPIC, + ) + # this file should still be here too since we attach + # it to our legit tree + f = File.objects.create( + contentnode=legit_tree, + ) + + # this node should be gone + expired_node = _create_expired_contentnode() + + # do our cleanup! + clean_up_contentnodes() + + # assert that out expired node doesn't exist + assert not ContentNode.objects.filter(pk=expired_node.pk).exists() + + # assert that our legit tree still exists + assert ContentNode.objects.filter(pk=legit_tree.pk).exists() + assert File.objects.filter(pk=f.pk).exists() + + def test_doesnt_delete_old_legit_tree(self): + """ + Make sure we don't delete an old content tree, as long as it's not under the + orphan tree. + """ + + # orphan node. This shouldn't exist anymore at the end of our test. + orphan_node = _create_expired_contentnode() + + # our old, but not orphaned tree. This should exist at the end of our test. + legit_node = ContentNode.objects.create( + kind_id=content_kinds.TOPIC, + ) + # mark the legit_node as old + ContentNode.objects.filter(pk=legit_node.pk).update( + created=THREE_MONTHS_AGO, + modified=THREE_MONTHS_AGO, + ) + + clean_up_contentnodes() + + # is our orphan gone? :( + assert not ContentNode.objects.filter(pk=orphan_node.pk).exists() + # is our senior, legit node still around? :) + assert ContentNode.objects.filter(pk=legit_node.pk).exists() + + def test_doesnt_delete_file_referenced_by_orphan_and_nonorphan_nodes(self): + """ + Make sure we don't delete a file, as long as it's referenced + by a non-orphan node. + """ + + # Our orphan, to be taken soon from this world + orphan_node = _create_expired_contentnode() + + # our legit node, standing proud and high with its non-orphaned status + legit_node = ContentNode.objects.create( + kind_id=content_kinds.VIDEO, + ) + + f = File.objects.create( + contentnode=legit_node, + checksum="aaa", + ) + forphan = File.objects.create( + contentnode=orphan_node, + checksum="aaa", + ) + + # The file they both share. This has the same checksum and contents. + # Alas, a file cannot have an orphan and non-orphan reference. This must + # not be deleted. + f.file_on_disk.save("aaa.jpg", ContentFile("aaa")) + forphan.file_on_disk.save("aaa.jpg", ContentFile("aaa")) + + # check that our file exists in object storage + assert default_storage.exists("storage/a/a/aaa.jpg") + + clean_up_contentnodes() + + assert default_storage.exists("storage/a/a/aaa.jpg") + + +class CleanUpFeatureFlagsTestCase(BaseTestCase): + def test_clean_up(self): + key = "feature_flag_does_not_exist" + self.user.feature_flags = { + key: True + } + self.user.save() + clean_up_feature_flags() + self.user.refresh_from_db() + self.assertNotIn(key, self.user.feature_flags) + + +class CleanupTaskTestCase(StudioTestCase): + + def setUp(self): + user = self.admin_user + created = datetime.now() - timedelta(days=8) + self.pruned_task = Task.objects.create(status=states.SUCCESS, task_type="pruned_task", created=created, metadata={}, user_id=user.id) + self.failed_task = Task.objects.create(status=states.FAILURE, task_type="failed_task", created=created, metadata={}, user_id=user.id) + self.recent_task = Task.objects.create(status=states.SUCCESS, task_type="recent_task", created=datetime.now(), metadata={}, user_id=user.id) + clean_up_tasks() + + def test_pruned_task(self): + with self.assertRaises(Task.DoesNotExist): + Task.objects.get(pk=self.pruned_task.id) + + def test_failed_task(self): + try: + Task.objects.get(pk=self.failed_task.id) + except Task.DoesNotExist: + self.fail("Task was removed") + + def test_recent_task(self): + try: + Task.objects.get(pk=self.recent_task.id) + except Task.DoesNotExist: + self.fail("Task was removed") diff --git a/contentcuration/contentcuration/utils/db_tools.py b/contentcuration/contentcuration/utils/db_tools.py index 44c1af9265..b47a730bfc 100644 --- a/contentcuration/contentcuration/utils/db_tools.py +++ b/contentcuration/contentcuration/utils/db_tools.py @@ -26,7 +26,7 @@ LICENSE_DESCRIPTION = "Sample text for content with special permissions" SORT_ORDER = 0 -multi_lang = set(p.id for p in format_presets.PRESETLIST if p.multi_language) +multi_lang = {p.id for p in format_presets.PRESETLIST if p.multi_language} def create_user(email, password, first_name, last_name, admin=False): diff --git a/contentcuration/contentcuration/utils/files.py b/contentcuration/contentcuration/utils/files.py index 588e7aff1e..0c1d658bf2 100644 --- a/contentcuration/contentcuration/utils/files.py +++ b/contentcuration/contentcuration/utils/files.py @@ -106,7 +106,8 @@ def get_thumbnail_encoding(filename, dimension=THUMBNAIL_WIDTH): else: inbuffer = open(filename, 'rb') - assert inbuffer + if not inbuffer: + raise AssertionError with Image.open(inbuffer) as image: image_format = image.format @@ -152,7 +153,8 @@ def write_base64_to_file(encoding, fpath_out): encoding_match = get_base64_encoding(encoding) - assert encoding_match, "Error writing to file: Invalid base64 encoding" + if not encoding_match: + raise AssertionError("Error writing to file: Invalid base64 encoding") with open(fpath_out, "wb") as target_file: target_file.write(base64.decodestring(encoding_match.group(2).encode('utf-8'))) diff --git a/contentcuration/contentcuration/utils/garbage_collect.py b/contentcuration/contentcuration/utils/garbage_collect.py index 3a7b435dc7..91e8f1806d 100755 --- a/contentcuration/contentcuration/utils/garbage_collect.py +++ b/contentcuration/contentcuration/utils/garbage_collect.py @@ -2,16 +2,22 @@ """ Studio garbage collection utilities. Clean up all these old, unused records! """ +import datetime +import logging + +from celery import states from django.conf import settings from django.db.models.expressions import CombinedExpression from django.db.models.expressions import F from django.db.models.expressions import Value +from django.utils.timezone import now from le_utils.constants import content_kinds from contentcuration.constants import feature_flags from contentcuration.db.models.functions import JSONObjectKeys from contentcuration.models import ContentNode from contentcuration.models import File +from contentcuration.models import Task from contentcuration.models import User @@ -32,10 +38,15 @@ def clean_up_deleted_chefs(): nodes_to_clean_up = ContentNode.objects.filter(parent=deleted_chefs_node) # don't delete files until we can ensure files are not referenced anywhere. - for node in nodes_to_clean_up: - node.delete() - - assert not ContentNode.objects.filter(parent=deleted_chefs_node).exists() + # disable mptt updates as they are disabled when we insert nodes into this tree + with ContentNode.objects.disable_mptt_updates(): + for i, node in enumerate(nodes_to_clean_up): + try: + node.delete() + except ContentNode.DoesNotExist: + # If it doesn't exist, job done! + pass + logging.info("Deleted {} node(s) from the deleted chef tree".format(i + 1)) def clean_up_contentnodes(delete_older_than=settings.ORPHAN_DATE_CLEAN_UP_THRESHOLD): @@ -48,20 +59,19 @@ def clean_up_contentnodes(delete_older_than=settings.ORPHAN_DATE_CLEAN_UP_THRESH it's deleted. Default is two weeks from datetime.now(). """ - garbage_node = ContentNode.objects.get(pk=settings.ORPHANAGE_ROOT_ID) - nodes_to_clean_up = garbage_node.get_descendants().filter( - modified__lt=delete_older_than, + nodes_to_clean_up = ContentNode.objects.filter( + modified__lt=delete_older_than, parent_id=settings.ORPHANAGE_ROOT_ID ) - tree_id = garbage_node.tree_id # delete all files first clean_up_files(nodes_to_clean_up) # Use _raw_delete for fast bulk deletions - nodes_to_clean_up.delete() - # tell MPTT to rebuild our tree values, so descendant counts - # will be right again. - ContentNode._tree_manager.partial_rebuild(tree_id) + try: + count, _ = nodes_to_clean_up.delete() + logging.info("Deleted {} node(s) from the orphanage tree".format(count)) + except ContentNode.DoesNotExist: + pass def clean_up_files(contentnode_ids): @@ -106,3 +116,11 @@ def clean_up_feature_flags(): for remove_flag in (set(existing_flag_keys) - set(current_flag_keys)): User.objects.filter(feature_flags__has_key=remove_flag) \ .update(feature_flags=CombinedExpression(F("feature_flags"), "-", Value(remove_flag))) + + +def clean_up_tasks(): + """ + Removes completed tasks that are older than a week + """ + count, _ = Task.objects.filter(created__lt=now() - datetime.timedelta(days=7), status=states.SUCCESS).delete() + logging.info("Deleted {} successful task(s) from the task queue".format(count)) diff --git a/contentcuration/contentcuration/utils/gcs_storage.py b/contentcuration/contentcuration/utils/gcs_storage.py index 6b7ab3a6b1..bef04a7798 100644 --- a/contentcuration/contentcuration/utils/gcs_storage.py +++ b/contentcuration/contentcuration/utils/gcs_storage.py @@ -20,7 +20,6 @@ class GoogleCloudStorage(Storage): def __init__(self, client=None): - from django.conf import settings self.client = client if client else self._create_default_client() self.bucket = self.client.get_bucket(settings.AWS_S3_BUCKET_NAME) @@ -43,10 +42,11 @@ def open(self, name, mode="rb", blob_object=None): """ # We don't have any logic for returning the file object in write # so just raise an error if we get any mode other than rb - assert mode == "rb", ( - "Sorry, we can't handle any open mode other than rb." - " Please use Storage.save() instead." - ) + if mode != "rb": + raise AssertionError( + "Sorry, we can't handle any open mode other than rb." + " Please use Storage.save() instead." + ) if not blob_object: # the old studio storage had a prefix if /contentworkshop_content/ diff --git a/contentcuration/contentcuration/utils/nodes.py b/contentcuration/contentcuration/utils/nodes.py index 2a321bafdf..c538a4e7f4 100644 --- a/contentcuration/contentcuration/utils/nodes.py +++ b/contentcuration/contentcuration/utils/nodes.py @@ -36,9 +36,12 @@ def map_files_to_node(user, node, data): """ if settings.DEBUG: # assert that our parameters match expected values - assert isinstance(user, User) - assert isinstance(node, ContentNode) - assert isinstance(data, list) + if not isinstance(user, User): + raise AssertionError + if not isinstance(node, ContentNode): + raise AssertionError + if not isinstance(data, list): + raise AssertionError # filter out file that are empty valid_data = filter_out_nones(data) @@ -96,9 +99,12 @@ def map_files_to_assessment_item(user, assessment_item, data): """ if settings.DEBUG: # assert that our parameters match expected values - assert isinstance(user, User) - assert isinstance(assessment_item, AssessmentItem) - assert isinstance(data, list) + if not isinstance(user, User): + raise AssertionError + if not isinstance(assessment_item, AssessmentItem): + raise AssertionError + if not isinstance(data, list): + raise AssertionError # filter out file that are empty valid_data = filter_out_nones(data) diff --git a/contentcuration/contentcuration/utils/publish.py b/contentcuration/contentcuration/utils/publish.py index 409070bd22..70232a55ac 100644 --- a/contentcuration/contentcuration/utils/publish.py +++ b/contentcuration/contentcuration/utils/publish.py @@ -22,6 +22,7 @@ from django.db.models import Count from django.db.models import Q from django.db.models import Sum +from django.db.utils import IntegrityError from django.template.loader import render_to_string from django.utils import timezone from django.utils.translation import gettext_lazy as _ @@ -125,7 +126,7 @@ def assign_license_to_contentcuration_nodes(channel, license): channel.main_tree.get_family().update(license_id=license.pk) -def map_content_nodes( +def map_content_nodes( # noqa: C901 root_node, default_language, channel_id, @@ -140,6 +141,9 @@ def map_content_nodes( # make sure we process nodes higher up in the tree first, or else when we # make mappings the parent nodes might not be there + if not root_node.complete: + raise ValueError("Attempted to publish a channel with an incomplete root node") + node_queue = collections.deque() node_queue.append(root_node) @@ -498,7 +502,7 @@ def write_assessment_item(assessment_item, zf, channel_id): # noqa C901 answer['answer'], answer_images = process_image_strings(answer['answer'], zf, channel_id) answer.update({'images': answer_images}) - answer_data = list([a for a in answer_data if a['answer'] or a['answer'] == 0]) # Filter out empty answers, but not 0 + answer_data = [a for a in answer_data if a['answer'] or a['answer'] == 0] # Filter out empty answers, but not 0 hint_data = json.loads(assessment_item.hints) for hint in hint_data: hint['hint'] = process_formulas(hint['hint']) @@ -585,6 +589,8 @@ def map_prerequisites(root_node): target_node.has_prerequisite.add(n['prerequisite__node_id']) except kolibrimodels.ContentNode.DoesNotExist as e: logging.error('Unable to find prerequisite {}'.format(str(e))) + except IntegrityError as e: + logging.error('Unable to find source node for prerequisite relationship {}'.format(str(e))) def map_channel_to_kolibri_channel(channel): diff --git a/contentcuration/contentcuration/viewsets/assessmentitem.py b/contentcuration/contentcuration/viewsets/assessmentitem.py index 5db084a670..978208b5f1 100644 --- a/contentcuration/contentcuration/viewsets/assessmentitem.py +++ b/contentcuration/contentcuration/viewsets/assessmentitem.py @@ -107,17 +107,15 @@ def set_files(self, all_objects, all_validated_data=None): # noqa C901 if all_validated_data: # If this is an update operation, check the validated data for which items # have had these fields modified. - md_fields_modified = set( - [ - self.id_value_lookup(ai) for ai in all_validated_data + md_fields_modified = { + self.id_value_lookup(ai) for ai in all_validated_data if "question" in ai or "hints" in ai or "answers" in ai - ] - ) + } else: # If this is a create operation, just check if these fields are not null. - md_fields_modified = set( - [self.id_value_lookup(ai) for ai in all_objects if ai.question or ai.hints or ai.answers] - ) + md_fields_modified = { + self.id_value_lookup(ai) for ai in all_objects if ai.question or ai.hints or ai.answers + } all_objects = [ai for ai in all_objects if self.id_value_lookup(ai) in md_fields_modified] @@ -129,8 +127,8 @@ def set_files(self, all_objects, all_validated_data=None): # noqa C901 for aitem in all_objects: current_files = current_files_by_aitem.get(aitem.id, []) filenames = get_filenames_from_assessment(aitem) - set_checksums = set([filename.split(".")[0] for filename in filenames]) - current_checksums = set([f.checksum for f in current_files]) + set_checksums = {filename.split(".")[0] for filename in filenames} + current_checksums = {f.checksum for f in current_files} missing_checksums = set_checksums.difference(current_checksums) diff --git a/contentcuration/contentcuration/viewsets/base.py b/contentcuration/contentcuration/viewsets/base.py index 424869d2dd..7c4b12f88c 100644 --- a/contentcuration/contentcuration/viewsets/base.py +++ b/contentcuration/contentcuration/viewsets/base.py @@ -142,8 +142,7 @@ def update(self, instance, validated_data): for attr, value in validated_data.items(): if attr in info.relations and info.relations[attr].to_many: raise ValueError("Many to many fields must be explicitly handled", attr) - else: - setattr(instance, attr, value) + setattr(instance, attr, value) if hasattr(instance, "on_update") and callable(instance.on_update): instance.on_update() @@ -169,7 +168,7 @@ def create(self, validated_data): raise ValueError( "Many to many fields must be explicitly handled", field_name ) - elif not relation_info.reverse and (field_name in validated_data): + if not relation_info.reverse and (field_name in validated_data): if not isinstance( validated_data[field_name], relation_info.related_model ): @@ -255,9 +254,7 @@ def to_internal_value(self, data): return ret def update(self, queryset, all_validated_data): - concrete_fields = set( - f.name for f in self.child.Meta.model._meta.concrete_fields - ) + concrete_fields = {f.name for f in self.child.Meta.model._meta.concrete_fields} all_validated_data_by_id = {} @@ -464,7 +461,7 @@ def values_from_key(cls, key): # to create key, value pairs for a dict # Order in the key matters, and must match the "update_lookup_field" # property of the serializer. - return [(attr, value) for attr, value in zip(id_attr, key)] + return list(zip(id_attr, key)) return [] @classmethod @@ -484,7 +481,7 @@ def filter_queryset_from_keys(cls, queryset, keys): # improvements welcome! query = Q() for key in keys: - query |= Q(**{attr: value for attr, value in zip(id_attr, key)}) + query |= Q(**dict(zip(id_attr, key))) return queryset.filter(query) return queryset.none() @@ -513,12 +510,13 @@ def get_edit_queryset(self): def _get_lookup_filter(self): lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field - assert lookup_url_kwarg in self.kwargs, ( - "Expected view %s to be called with a URL keyword argument " - 'named "%s". Fix your URL conf, or set the `.lookup_field` ' - "attribute on the view correctly." - % (self.__class__.__name__, lookup_url_kwarg) - ) + if lookup_url_kwarg not in self.kwargs: + raise AssertionError( + "Expected view %s to be called with a URL keyword argument " + 'named "%s". Fix your URL conf, or set the `.lookup_field` ' + "attribute on the view correctly." + % (self.__class__.__name__, lookup_url_kwarg) + ) return {self.lookup_field: self.kwargs[lookup_url_kwarg]} @@ -603,7 +601,7 @@ def retrieve(self, request, *args, **kwargs): class CreateModelMixin(object): def _map_create_change(self, change): return dict( - [(k, v) for k, v in change["obj"].items()] + list(change["obj"].items()) + self.values_from_key(change["key"]) ) @@ -678,7 +676,7 @@ def delete_from_changes(self, changes): class UpdateModelMixin(object): def _map_update_change(self, change): return dict( - [(k, v) for k, v in change["mods"].items()] + list(change["mods"].items()) + self.values_from_key(change["key"]) ) diff --git a/contentcuration/contentcuration/viewsets/common.py b/contentcuration/contentcuration/viewsets/common.py index c378bb5734..94209b2df8 100644 --- a/contentcuration/contentcuration/viewsets/common.py +++ b/contentcuration/contentcuration/viewsets/common.py @@ -123,7 +123,7 @@ def unnest_dict(dictionary): class DotPathValueMixin(object): def get_value(self, dictionary): # get just field name - value = dictionary.get(self.field_name, dict()) + value = dictionary.get(self.field_name, {}) if value is None: return empty @@ -139,7 +139,7 @@ def get_value(self, dictionary): ) value.update(html_value) - return value if len(value.keys()) else empty + return value if value.keys() else empty class JSONFieldDictSerializer(DotPathValueMixin, serializers.Serializer): @@ -196,7 +196,7 @@ def to_internal_value(self, data): if self.child_relation.pk_field is not None: pks = [self.child_relation.pk_field.to_internal_value(d) for d in data] else: - pks = [d for d in data] + pks = list(data) valid_pks = ( self.child_relation.get_queryset() .filter(pk__in=pks) diff --git a/contentcuration/contentcuration/viewsets/invitation.py b/contentcuration/contentcuration/viewsets/invitation.py index c46107f7ac..5ee6838506 100644 --- a/contentcuration/contentcuration/viewsets/invitation.py +++ b/contentcuration/contentcuration/viewsets/invitation.py @@ -56,7 +56,7 @@ def update(self, instance, validated_data): def get_fields(self): fields = super().get_fields() - request = self.context.get("request", None) + request = self.context.get("request") # allow invitation state to be modified under the right conditions if request and request.user and self.instance: diff --git a/contentcuration/contentcuration/viewsets/sync/endpoint.py b/contentcuration/contentcuration/viewsets/sync/endpoint.py index 4e26a665e1..503c23e8b9 100644 --- a/contentcuration/contentcuration/viewsets/sync/endpoint.py +++ b/contentcuration/contentcuration/viewsets/sync/endpoint.py @@ -224,12 +224,11 @@ def sync(request): if changes_to_return: return Response({"changes": changes_to_return}) return Response({}) - elif len(errors) < len(data) or len(changes_to_return): + if len(errors) < len(data) or changes_to_return: # If there are some errors, but not all, or all errors and some changes return a mixed response return Response( {"changes": changes_to_return, "errors": errors}, status=HTTP_207_MULTI_STATUS, ) - else: - # If the errors are total, and there are no changes reject the response outright! - return Response({"errors": errors}, status=HTTP_400_BAD_REQUEST) + # If the errors are total, and there are no changes reject the response outright! + return Response({"errors": errors}, status=HTTP_400_BAD_REQUEST) diff --git a/contentcuration/contentcuration/viewsets/sync/utils.py b/contentcuration/contentcuration/viewsets/sync/utils.py index 622b264c42..6e7ed7405c 100644 --- a/contentcuration/contentcuration/viewsets/sync/utils.py +++ b/contentcuration/contentcuration/viewsets/sync/utils.py @@ -95,6 +95,5 @@ def log_sync_exception(e): if getattr(settings, "DEBUG", False) or getattr(settings, "TEST_ENV", False): raise - else: - # make sure we leave a record in the logs just in case. - logging.error(e) + # make sure we leave a record in the logs just in case. + logging.error(e) diff --git a/contentcuration/kolibri_content/router.py b/contentcuration/kolibri_content/router.py index 5a9a13f646..b7b2e69b70 100644 --- a/contentcuration/kolibri_content/router.py +++ b/contentcuration/kolibri_content/router.py @@ -81,7 +81,7 @@ def _get_db(self, model, **hints): return None # if the model is already associated with a database, use that database - if hasattr(hints.get("instance", None), "_state"): + if hasattr(hints.get("instance"), "_state"): return hints["instance"]._state.db # determine the currently active content database, and return the alias diff --git a/deploy/probers/postmark_api_probe.py b/deploy/probers/postmark_api_probe.py index ffa545a5c3..1c2d514493 100755 --- a/deploy/probers/postmark_api_probe.py +++ b/deploy/probers/postmark_api_probe.py @@ -27,13 +27,12 @@ def do_probe(self): if passing: continue - else: - raise Exception("Postmark's `%s` service has status %s, but we require one of the following: %s" % ( - service['name'], - service['status'], - allowed_statuses - ) + raise Exception("Postmark's `%s` service has status %s, but we require one of the following: %s" % ( + service['name'], + service['status'], + allowed_statuses ) + ) if __name__ == "__main__": diff --git a/jest_config/broadcastChannelMock.js b/jest_config/globalMocks/broadcastChannelMock.js similarity index 100% rename from jest_config/broadcastChannelMock.js rename to jest_config/globalMocks/broadcastChannelMock.js diff --git a/contentcuration/contentcuration/frontend/shared/__mocks__/client.js b/jest_config/globalMocks/client.js similarity index 93% rename from contentcuration/contentcuration/frontend/shared/__mocks__/client.js rename to jest_config/globalMocks/client.js index b32749d5b5..5bd6f15e47 100644 --- a/contentcuration/contentcuration/frontend/shared/__mocks__/client.js +++ b/jest_config/globalMocks/client.js @@ -41,3 +41,7 @@ client.__setResponse = (method, options) => { }; export default client; + +export function paramsSerializer(params) { + return JSON.stringify(params); +} diff --git a/jest_config/fileMock.js b/jest_config/globalMocks/fileMock.js similarity index 100% rename from jest_config/fileMock.js rename to jest_config/globalMocks/fileMock.js diff --git a/jest_config/globalMocks/resources.js b/jest_config/globalMocks/resources.js new file mode 100644 index 0000000000..9377a4d05a --- /dev/null +++ b/jest_config/globalMocks/resources.js @@ -0,0 +1,12 @@ +import * as resources from '../../contentcuration/contentcuration/frontend/shared/data/resources'; + +Object.values(resources).forEach(resource => { + if (resource.fetchCollection) { + resource.fetchCollection = () => new Promise(); + } + if (resource.fetchModel) { + resource.fetchModel = () => new Promise(); + } +}); + +export * from '../../contentcuration/contentcuration/frontend/shared/data/resources'; diff --git a/contentcuration/contentcuration/frontend/shared/__mocks__/urls.js b/jest_config/globalMocks/urls.js similarity index 100% rename from contentcuration/contentcuration/frontend/shared/__mocks__/urls.js rename to jest_config/globalMocks/urls.js diff --git a/jest_config/jest.conf.js b/jest_config/jest.conf.js index 67f018b0e3..7c380e0f0a 100644 --- a/jest_config/jest.conf.js +++ b/jest_config/jest.conf.js @@ -12,10 +12,12 @@ module.exports = { '^static/(.*)': '/contentcuration/contentcuration/static/$1', '\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$': path.resolve( __dirname, - './fileMock.js' + './globalMocks/fileMock.js' ), - 'broadcast-channel$': path.resolve(__dirname, './broadcastChannelMock.js'), - '\\.worker.min.js': path.resolve(__dirname, './fileMock.js'), + 'broadcast-channel$': path.resolve(__dirname, './globalMocks/broadcastChannelMock.js'), + '\\.worker.min.js': path.resolve(__dirname, './globalMocks/fileMock.js'), + 'shared/client': path.resolve(__dirname, './globalMocks/client.js'), + 'shared/urls': path.resolve(__dirname, './globalMocks/urls.js'), }, testMatch: ['**/?(*.)+(spec|test).[jt]s?(x)'], testURL: 'http://studio.time', diff --git a/jest_config/setup.js b/jest_config/setup.js index 014ffef4d4..c96391f83f 100644 --- a/jest_config/setup.js +++ b/jest_config/setup.js @@ -12,6 +12,7 @@ import 'fake-indexeddb/auto'; import jquery from 'jquery'; import AnalyticsPlugin from 'shared/analytics/plugin'; import { setupSchema } from 'shared/data'; +import * as resources from 'shared/data/resources'; import icons from 'shared/vuetify/icons'; import ActionLink from 'shared/views/ActionLink'; import { i18nSetup } from 'shared/i18n'; @@ -70,7 +71,16 @@ global.window.Urls = new Proxy( }, } ); -jest.mock('shared/urls'); + +Object.values(resources).forEach(resource => { + if (resource.fetchCollection) { + resource.fetchCollection = () => Promise.resolve([]); + } + if (resource.fetchModel) { + resource.fetchModel = () => Promise.resolve({}); + } +}); + jest.setTimeout(10000); // 10 sec Object.defineProperty(window, 'scrollTo', { value: () => {}, writable: true }); diff --git a/k8s/templates/garbage-collect-cronjob.yaml b/k8s/templates/garbage-collect-cronjob.yaml new file mode 100644 index 0000000000..4395732541 --- /dev/null +++ b/k8s/templates/garbage-collect-cronjob.yaml @@ -0,0 +1,78 @@ +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ template "studio.fullname" . }}-garbage-collect-job-config + labels: + tier: job + app: {{ template "studio.fullname" . }} + chart: {{ .Chart.Name }} + release: {{ .Release.Name }} +data: + DJANGO_LOG_FILE: /var/log/django.log + DATA_DB_HOST: {{ template "cloudsql-proxy.fullname" . }} + DATA_DB_PORT: "5432" + MPLBACKEND: PS + RUN_MODE: k8s + RELEASE_COMMIT_SHA: {{ .Values.studioApp.releaseCommit | default "" }} + BRANCH_ENVIRONMENT: {{ .Release.Name }} + AWS_BUCKET_NAME: {{ .Values.studioApp.gcs.bucketName }} +--- +apiVersion: v1 +kind: Secret +metadata: + name: {{ template "studio.fullname" . }}-garbage-collect-job-secret + labels: + app: {{ template "studio.fullname" . }} + chart: {{ .Chart.Name }} + release: {{ .Release.Name }} +type: Opaque +data: + DATA_DB_USER: {{ index .Values "cloudsql-proxy" "credentials" "username" | b64enc }} + DATA_DB_PASS: {{ index .Values "cloudsql-proxy" "credentials" "password" | b64enc }} + DATA_DB_NAME: {{ index .Values "cloudsql-proxy" "credentials" "dbname" | b64enc }} + SENTRY_DSN_KEY: {{ .Values.sentry.dsnKey | b64enc }} +--- +apiVersion: batch/v1beta1 +kind: CronJob +metadata: + name: {{ template "studio.fullname" . }}-garbage-collect-cronjob + labels: + tier: job + chart: {{ .Chart.Name }} + release: {{ .Release.Name }} +spec: + schedule: "@midnight" + jobTemplate: + spec: + template: + spec: + restartPolicy: OnFailure + containers: + - name: app + image: {{ .Values.studioApp.imageName }} + command: + - python + - contentcuration/manage.py + - garbage_collect + env: + - name: DJANGO_SETTINGS_MODULE + value: contentcuration.production_settings + envFrom: + - configMapRef: + name: {{ template "studio.fullname" . }}-garbage-collect-job-config + - secretRef: + name: {{ template "studio.fullname" . }}-garbage-collect-job-secret + resources: + requests: + cpu: 0.5 + memory: 1Gi + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: full-gcp-access-scope + operator: In + values: + - "true" diff --git a/package.json b/package.json index b4eae3ad47..a7d64edda6 100644 --- a/package.json +++ b/package.json @@ -25,13 +25,12 @@ "unittests": "yarn run build && yarn run pytest && yarn run test-jest", "unittests:reusedb": "npm-run-all --parallel --race services pytest:reusedb", "apptests": "npm-run-all --parallel --race services cypress:test", - "test": "npm-run-all unittests apptests", + "test": "jest --config jest_config/jest.conf.js", "build:dev": "webpack-dev-server --env.dev --config webpack.config.js --watch --progress --host 0.0.0.0", "build:dev:hot": "yarn run build:dev --hot --env.hot", "test-jest:dev": "yarn run test-jest --watch", - "test-jest": "jest --config jest_config/jest.conf.js", + "test-jest": "yarn run test", "test-jest:debug": "node --inspect node_modules/.bin/jest --runInBand --watch", - "test-jest-cov": "jest --config jest_config/jest.conf.js --coverage", "minio:test": "rm -rf ~/.minio_test && minio server ~/.minio_test/ || true", "minio": "MINIO_API_CORS_ALLOW_ORIGIN='http://localhost:8080,http://127.0.0.1:8080' MINIO_ACCESS_KEY=development MINIO_SECRET_KEY=development minio server ~/.minio_data/ || true", "runserver": "cd contentcuration && python manage.py runserver --settings=contentcuration.dev_settings 0.0.0.0:8080", diff --git a/yarn.lock b/yarn.lock index 6bb4fc0852..6c3e36512c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1543,7 +1543,7 @@ core-js-pure "^3.0.0" regenerator-runtime "^0.13.4" -"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.0", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": +"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.0", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": version "7.10.2" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.2.tgz#d103f21f2602497d38348a32e008637d506db839" integrity sha512-6sF3uQw2ivImfVIl62RZ7MXhO2tap69WeWK57vAaimT6AZbE4FbqjdEJIN1UqoD6wI6B+1n9UiagafH1sxjOtg== @@ -1557,6 +1557,13 @@ dependencies: regenerator-runtime "^0.13.4" +"@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2": + version "7.14.8" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.14.8.tgz#7119a56f421018852694290b9f9148097391b446" + integrity sha512-twj3L8Og5SaCRCErB4x4ajbvBIVV77CGeFglHpeg5WC5FF8TZzBWXtTJ4MqaD9QszLYTtr+IsaAL2rEUevb+eg== + dependencies: + regenerator-runtime "^0.13.4" + "@babel/template@^7.10.1", "@babel/template@^7.3.3": version "7.10.1" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.10.1.tgz#e167154a94cb5f14b28dc58f5356d2162f539811" @@ -5130,9 +5137,9 @@ balanced-match@^0.4.2: integrity sha1-yz8+PHMtwPAe5wtAPzAuYddwmDg= balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== base64-arraybuffer-es6@0.6.0: version "0.6.0" @@ -5356,15 +5363,16 @@ braces@^3.0.1, braces@~3.0.2: fill-range "^7.0.1" broadcast-channel@^3.0.3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/broadcast-channel/-/broadcast-channel-3.1.0.tgz#b4a6970fc72c4d68fc859321a6af850e66cb2dfa" - integrity sha512-zrjTunJRva1aFW9UlLtoMnB05tu8hbb7qbv3PxXXGnxp3t9VA/KcTIwcC0+u7oLBdlXSnv0yy7pB+UemLjANyQ== + version "3.7.0" + resolved "https://registry.yarnpkg.com/broadcast-channel/-/broadcast-channel-3.7.0.tgz#2dfa5c7b4289547ac3f6705f9c00af8723889937" + integrity sha512-cIAKJXAxGJceNZGTZSBzMxzyOn72cVgPnKx4dc6LRjQgbaJUQqhy5rzL3zbMxkMWsGKkv2hSFkPRMEXfoMZ2Mg== dependencies: "@babel/runtime" "^7.7.2" - detect-node "^2.0.4" + detect-node "^2.1.0" js-sha3 "0.8.0" microseconds "0.2.0" nano-time "1.0.0" + oblivious-set "1.0.0" rimraf "3.0.2" unload "2.2.0" @@ -7444,10 +7452,10 @@ detect-newline@^3.0.0: resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== -detect-node@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" - integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== +detect-node@^2.0.4, detect-node@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== detect-port-alt@1.1.6: version "1.1.6" @@ -9330,7 +9338,7 @@ glob@5.0.15: once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.1.1: +glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.4, glob@^7.1.6, glob@~7.1.1: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== @@ -9342,6 +9350,18 @@ glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, gl once "^1.3.0" path-is-absolute "^1.0.0" +glob@^7.1.3: + version "7.1.7" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" + integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + "glob@~ 3.2.1": version "3.2.11" resolved "https://registry.yarnpkg.com/glob/-/glob-3.2.11.tgz#4a973f635b9190f715d10987d5c00fd2815ebe3d" @@ -14358,6 +14378,11 @@ object.values@^1.1.0, object.values@^1.1.1: function-bind "^1.1.1" has "^1.0.3" +oblivious-set@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/oblivious-set/-/oblivious-set-1.0.0.tgz#c8316f2c2fb6ff7b11b6158db3234c49f733c566" + integrity sha512-z+pI07qxo4c2CulUHCDf9lcqDlMSo72N/4rLUpRXf6fu+q8vjt8y0xS+Tlf8NTJDdTXHbdeO1n3MlbctwEoXZw== + obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" @@ -14874,9 +14899,9 @@ path-key@^3.0.0, path-key@^3.1.0: integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-parse@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" - integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" @@ -16668,11 +16693,16 @@ regenerator-runtime@^0.11.0: resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== -regenerator-runtime@^0.13.2, regenerator-runtime@^0.13.3, regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.5: +regenerator-runtime@^0.13.2, regenerator-runtime@^0.13.3, regenerator-runtime@^0.13.5: version "0.13.5" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz#d878a1d094b4306d10b9096484b33ebd55e26697" integrity sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA== +regenerator-runtime@^0.13.4: + version "0.13.9" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + regenerator-transform@^0.10.0: version "0.10.1" resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.10.1.tgz#1e4996837231da8b7f3cf4114d71b5691a0680dd"