diff --git a/config/dtsProcessEventRule.json b/config/dtsProcessEventRule.json new file mode 100644 index 00000000..1df6e4c7 --- /dev/null +++ b/config/dtsProcessEventRule.json @@ -0,0 +1,7 @@ +{ + "Description": "Event rule to schedule the dtsProcess lambda execution", + "Name": "{PLACEHOLDER}", + "RoleArn": "{PLACEHOLDER}", + "ScheduleExpression": "{PLACEHOLDER}", + "State": "ENABLED" +} diff --git a/lib/constants.js b/lib/constants.js new file mode 100644 index 00000000..82bdf5ab --- /dev/null +++ b/lib/constants.js @@ -0,0 +1,6 @@ +module.exports = { + HTTP_BAD_REQUEST: 400, + HTTP_NOT_FOUND: 404, + HTTP_TOO_MANY_REQUESTS: 429, + INTERNAL_SERVER_ERROR: 500 +} diff --git a/lib/functions/dts-process.js b/lib/functions/dts-process.js new file mode 100644 index 00000000..7e55e04a --- /dev/null +++ b/lib/functions/dts-process.js @@ -0,0 +1,71 @@ +const logger = require('../helpers/logging') +const pg = require('../helpers/db') +const invokeLambda = require('../helpers/invoke-lambda') +const { deleteStation, getRloiIds, getStationData, validateStationData } = require('../helpers/imtd-api') + +async function insertStation (stationDataArray) { + try { + await pg.transaction(async trx => { + await Promise.all(stationDataArray.map(async (stationData) => { + const stationID = stationData.station_id + await trx('station_display_time_series').where({ station_id: stationID }).delete() + await trx('station_display_time_series').insert(stationData) + logger.info(`Processed displayTimeSeries for RLOI id ${stationID}`) + })) + }) + } catch (error) { + logger.error('Database error processing stationData', error) + throw error + } +} + +async function getData (stationId) { + try { + const stationData = await getStationData(stationId) + if (stationData.length === 0) { + (console.log('Deleting station: ', stationId)) + const tableName = 'station_display_time_series' + await deleteStation(stationId, tableName) + } + await validateStationData(stationData) + await insertStation(stationData) + } catch (error) { + logger.error(`Could not process data for station ${stationId} (${error.message})`) + } +} + +async function handler ({ offset = 0 } = {}) { + const BATCH_SIZE = parseInt(process.env.IMTD_BATCH_SIZE || '500') + + logger.info(`Retrieving up to ${BATCH_SIZE} rloi_ids with an offset of ${offset}`) + const rloiids = await getRloiIds({ + offset, + limit: BATCH_SIZE + }) + logger.info(`Retrieved ${rloiids.length} rloi_ids`) + + for (const rloiid of rloiids) { + await getData(rloiid.rloi_id) + } + + if (rloiids.length >= BATCH_SIZE) { + const functionName = process.env.AWS_LAMBDA_FUNCTION_NAME + const newOffset = offset + BATCH_SIZE + logger.info(`Invoking ${functionName} with an offset of ${newOffset}`) + + await invokeLambda(functionName, { + offset: newOffset + }) + } +} + +module.exports = { + handler, + validateStationData +} + +process.on('SIGTERM', async () => { + logger.info('SIGTERM received, destroying DB connection') + await pg.destroy() + process.exit(0) +}) diff --git a/lib/functions/imtd-process.js b/lib/functions/imtd-process.js index f5349ec2..76eb3816 100644 --- a/lib/functions/imtd-process.js +++ b/lib/functions/imtd-process.js @@ -1,18 +1,10 @@ const parseThresholds = require('../models/parse-thresholds') -const axios = require('axios') const logger = require('../helpers/logging') const pg = require('../helpers/db') const invokeLambda = require('../helpers/invoke-lambda') - -async function deleteThresholds (stationId) { - try { - await pg('station_imtd_threshold').where({ station_id: stationId }).delete() - logger.info(`Deleted thresholds for RLOI id ${stationId}`) - } catch (error) { - logger.error(`Error deleting thresholds for station ${stationId}`, error) - throw error - } -} +const deleteThresholds = require('../helpers/imtd-api').deleteStation +const { getRloiIds, getImtdApiResponse } = require('../helpers/imtd-api') +const tableName = 'station_imtd_threshold' async function insertThresholds (stationId, thresholds) { try { @@ -37,21 +29,6 @@ async function insertThresholds (stationId, thresholds) { } } -async function getImtdApiResponse (stationId) { - const hostname = 'imfs-prd1-thresholds-api.azurewebsites.net' - try { - return await axios.get(`https://${hostname}/Location/${stationId}?version=2`) - } catch (error) { - if (error.response?.status === 404) { - logger.info(`Station ${stationId} not found (HTTP Status: 404)`) - } else { - const message = error.response?.status ? `HTTP Status: ${error.response.status}` : `Error: ${error.message}` - throw Error(`IMTD API request for station ${stationId} failed (${message})`) - } - return {} - } -} - async function getIMTDThresholds (stationId) { const response = await getImtdApiResponse(stationId) if (response.data) { @@ -66,36 +43,23 @@ async function getData (stationId) { if (thresholds.length > 0) { await insertThresholds(stationId, thresholds) } else { - await deleteThresholds(stationId) + await deleteThresholds(stationId, tableName) + logger.info(`Deleted data for RLOI id ${stationId}`) } } catch (error) { logger.error(`Could not process data for station ${stationId} (${error.message})`) } } -async function getRloiIds ({ limit, offset } = {}) { - try { - logger.info(`Retrieving up to ${limit} rloi_ids with an offset of ${offset}`) - const result = await pg('rivers_mview') - .distinct('rloi_id') - .whereNotNull('rloi_id') - .orderBy('rloi_id', 'asc') - .limit(limit) - .offset(offset) - logger.info(`Retrieved ${result.length} rloi_ids`) - return result - } catch (error) { - throw Error(`Could not get list of id's from database (Error: ${error.message})`) - } -} - async function handler ({ offset = 0 } = {}) { const BATCH_SIZE = parseInt(process.env.IMTD_BATCH_SIZE || '500') + logger.info(`Retrieving up to ${BATCH_SIZE} rloi_ids with an offset of ${offset}`) const stations = await getRloiIds({ offset, limit: BATCH_SIZE }) + logger.info(`Retrieved ${stations.length} rloi_ids`) for (const station of stations) { await getData(station.rloi_id) diff --git a/lib/helpers/imtd-api.js b/lib/helpers/imtd-api.js new file mode 100644 index 00000000..2b5ca85d --- /dev/null +++ b/lib/helpers/imtd-api.js @@ -0,0 +1,66 @@ +const pg = require('./db') +const axios = require('axios') +const { HTTP_NOT_FOUND } = require('../constants') +const logger = require('./logging') +const parseStation = require('../models/parse-time-series') +const Joi = require('joi') + +async function deleteStation (stationId, tableName) { + await pg(tableName).where({ station_id: stationId }).delete() +} + +async function getRloiIds ({ limit, offset } = {}) { + try { + const result = await pg('rivers_mview') + .distinct('rloi_id') + .whereNotNull('rloi_id') + .orderBy('rloi_id', 'asc') + .limit(limit) + .offset(offset) + return result + } catch (error) { + throw Error(`Could not get list of id's from database (Error: ${error.message})`) + } +} + +async function getImtdApiResponse (stationId) { + const hostname = 'imfs-prd1-thresholds-api.azurewebsites.net' + try { + return await axios.get(`https://${hostname}/Location/${stationId}?version=2`) + } catch (error) { + if (error.response?.status === HTTP_NOT_FOUND) { + logger.info(`Station ${stationId} not found (HTTP Status: 404)`) + } else { + const message = error.response?.status ? `HTTP Status: ${error.response.status}` : `Error: ${error.message}` + throw Error(`IMTD API request for station ${stationId} failed (${message})`) + } + return {} + } +} + +async function getStationData (stationId) { + const response = await getImtdApiResponse(stationId) + if (response.data) { + return parseStation(response.data[0].TimeSeriesMetaData, stationId) + } + return [] +} + +async function validateStationData (stationDataArray) { + const schema = Joi.object({ + station_id: Joi.number().required(), + direction: Joi.string().required(), + display_time_series: Joi.boolean().required() + }) + + try { + const validatedData = await Promise.all( + stationDataArray.map((stationData) => schema.validateAsync(stationData)) + ) + return validatedData + } catch (error) { + throw new Error(`Validation error: ${error.message}`) + } +} + +module.exports = { deleteStation, getRloiIds, getImtdApiResponse, getStationData, validateStationData } diff --git a/lib/models/parse-time-series.js b/lib/models/parse-time-series.js new file mode 100644 index 00000000..810720c9 --- /dev/null +++ b/lib/models/parse-time-series.js @@ -0,0 +1,25 @@ +/** + * @param {Object} data - The data to be parsed. + * @returns {Object} - The processed data. + */ +function parseTimeSeries (data, stationId) { + if (!data) { + return {} + } + + const processedData = data.map((item) => ({ + station_id: stationId, + direction: item.qualifier === 'Downstream Stage' ? 'd' : 'u', + display_time_series: item.DisplayTimeSeries + })) + + const uniqueProcessedData = processedData.filter((item, index, self) => + index === self.findIndex((t) => ( + t.station_id === item.station_id && t.direction === item.direction + )) + ) + + return uniqueProcessedData +} + +module.exports = parseTimeSeries diff --git a/lib/models/rloi.js b/lib/models/rloi.js index 61011588..8738d203 100644 --- a/lib/models/rloi.js +++ b/lib/models/rloi.js @@ -24,7 +24,7 @@ function removePostfix (name) { } async function fetchStation (s3, bucket, key) { - return await s3.getObject({ + return s3.getObject({ Bucket: bucket, Key: key }) diff --git a/serverless.yml b/serverless.yml index c783b2fb..d502db9c 100644 --- a/serverless.yml +++ b/serverless.yml @@ -63,3 +63,7 @@ functions: name: ${env:LFW_DATA_TARGET_ENV_NAME}${self:service}-imtdProcess handler: lib/functions/imtd-process.handler timeout: 900 + dtsProcess: + name: ${env:LFW_DATA_TARGET_ENV_NAME}${self:service}-dtsProcess + handler: lib/functions/dts-process.handler + timeout: 900 diff --git a/test/data/imtd-stations.js b/test/data/imtd-stations.js index efed3684..5d62809d 100644 --- a/test/data/imtd-stations.js +++ b/test/data/imtd-stations.js @@ -136,6 +136,25 @@ const apiNoMatchingThresholdResponse = { ] } -const flattenedData = [{ stationId: 9521, floodWarningArea: '113FWFEXE04', floodWarningType: 'W', direction: 'u', level: 2.4 }, { stationId: 9521, floodWarningArea: '113FWFEXE03', floodWarningType: 'W', direction: 'u', level: 2.5 }, { stationId: 9521, floodWarningArea: '113FWFEXE04', floodWarningType: 'W', direction: 'u', level: 2.7 }, { stationId: 9521, floodWarningArea: '113FWFEXE06', floodWarningType: 'W', direction: 'u', level: 2.9 }, { stationId: 9521, floodWarningArea: '113FWFEXE05', floodWarningType: 'W', direction: 'u', level: 3.6 }, { stationId: 9524, floodWarningArea: '121WAF910', floodWarningType: 'A', direction: 'u', level: 0.95 }, { stationId: 9524, floodWarningArea: '121FWF214', floodWarningType: 'W', direction: 'u', level: 1.2 }, { stationId: 9524, floodWarningArea: '121FWF214', floodWarningType: 'W', direction: 'u', level: 1.7 }, { stationId: 9525, floodWarningArea: '121WAF918', floodWarningType: 'A', direction: 'u', level: 0.7 }, { stationId: 9525, floodWarningArea: '121FWF121', floodWarningType: 'W', direction: 'u', level: 0.7 }, { stationId: 9525, floodWarningArea: '121FWF121', floodWarningType: 'W', direction: 'u', level: 1.25 }] +const api404 = { + type: 'https://tools.ietf.org/html/rfc7231#section-6.5.4', + title: 'Not Found', + status: 404, + traceId: '0HN38TFTTO070:00000003' +} + +const flattenedData = [ + { stationId: 9521, floodWarningArea: '113FWFEXE04', floodWarningType: 'W', direction: 'u', level: 2.4 }, + { stationId: 9521, floodWarningArea: '113FWFEXE03', floodWarningType: 'W', direction: 'u', level: 2.5 }, + { stationId: 9521, floodWarningArea: '113FWFEXE04', floodWarningType: 'W', direction: 'u', level: 2.7 }, + { stationId: 9521, floodWarningArea: '113FWFEXE06', floodWarningType: 'W', direction: 'u', level: 2.9 }, + { stationId: 9521, floodWarningArea: '113FWFEXE05', floodWarningType: 'W', direction: 'u', level: 3.6 }, + { stationId: 9524, floodWarningArea: '121WAF910', floodWarningType: 'A', direction: 'u', level: 0.95 }, + { stationId: 9524, floodWarningArea: '121FWF214', floodWarningType: 'W', direction: 'u', level: 1.2 }, + { stationId: 9524, floodWarningArea: '121FWF214', floodWarningType: 'W', direction: 'u', level: 1.7 }, + { stationId: 9525, floodWarningArea: '121WAF918', floodWarningType: 'A', direction: 'u', level: 0.7 }, + { stationId: 9525, floodWarningArea: '121FWF121', floodWarningType: 'W', direction: 'u', level: 0.7 }, + { stationId: 9525, floodWarningArea: '121FWF121', floodWarningType: 'W', direction: 'u', level: 1.25 } +] -module.exports = { stations, apiResponse, apiNoMatchingThresholdResponse, flattenedData } +module.exports = { stations, apiResponse, apiNoMatchingThresholdResponse, flattenedData, api404 } diff --git a/test/unit/functions/dts-process.js b/test/unit/functions/dts-process.js new file mode 100644 index 00000000..6df05d35 --- /dev/null +++ b/test/unit/functions/dts-process.js @@ -0,0 +1,297 @@ +'use strict' + +const Lab = require('@hapi/lab') +const { after, afterEach, before, beforeEach, experiment, test } = (exports.lab = Lab.script()) +const { expect } = require('@hapi/code') +const { validateStationData } = require('../../../lib/functions/dts-process') +const { getImtdApiResponse } = require('../../../lib/helpers/imtd-api') + +const { + stations: testStations, + apiResponse: testApiResponse, + + api404 +} = require('../../data/imtd-stations') +const axios = require('axios') +const proxyquire = require('proxyquire') +const mockDb = require('mock-knex') +const db = require('../../../lib/helpers/db') +const tracker = mockDb.getTracker() + +const sinon = require('sinon') + +function setupStdDbStubs (test) { + const stations = test || testStations + const methodCounter = {} + tracker.on('query', function (query) { + const responses = { + select: stations, + insert: [], + del: [] + } + const method = query.method || query.sql.toLowerCase().replace(';', '') + methodCounter[method] = methodCounter[method] ? methodCounter[method] + 1 : 1 + query.response(responses[query.method]) + }) + return methodCounter +} + +function setupAxiosStdStub (response = testApiResponse) { + return sinon.stub(axios, 'get').resolves(response) +} + +function setupHandlerWithStubs () { + const logger = { + info: sinon.stub(), + error: sinon.stub() + } + const invokeLambda = sinon.stub().resolves() + const { handler } = proxyquire('../../../lib/functions/dts-process', { + '../helpers/logging': logger, + '../helpers/invoke-lambda': invokeLambda + }) + + return { handler, logger, invokeLambda } +} + +experiment('DTS processing', () => { + before(() => { + mockDb.mock(db) + }) + + after(() => { + mockDb.unmock(db) + }) + + beforeEach(async () => { + tracker.install() + }) + afterEach(() => { + delete process.env.IMTD_BATCH_SIZE + sinon.restore() + tracker.uninstall() + }) + + test('it should handle a 404 response and delete the station from the DB', async () => { + const { handler } = setupHandlerWithStubs() + setupAxiosStdStub(api404) + const counter = setupStdDbStubs([{ rloi_id: 1001 }]) + await handler() + expect(counter).to.equal({ begin: 1, commit: 1, del: 1, select: 1 }) + }) + test('for multiple RLOI ids it should select, delete and insert from DB as expected', async () => { + const { handler } = setupHandlerWithStubs() + const counter = setupStdDbStubs() + const axiosStub = setupAxiosStdStub() + await handler() + // 8 stations each with the same 6 thresholds (out of 10 thresholds for inclusion) + /// 1 select, 8 deletes and 8 inserts (6 thresholds per insert) + expect(axiosStub.callCount).to.equal(8) + expect(counter).to.equal({ begin: 8, select: 1, del: 8, insert: 8, commit: 8 }) + }) + test('it selects RLOI ids as expected with no offset', async () => { + const { handler } = setupHandlerWithStubs() + setupStdDbStubs() + setupAxiosStdStub() + + const queries = [] + tracker.on('query', query => queries.push(query)) + await handler() + + expect(queries[0].sql).to.equal('select distinct "rloi_id" from "rivers_mview" where "rloi_id" is not null order by "rloi_id" asc limit $1') + expect(queries[0].bindings).to.equal([500]) + }) + test('it selects RLOI ids as expected with an offset', async () => { + const { handler } = setupHandlerWithStubs() + setupStdDbStubs() + setupAxiosStdStub() + + const queries = [] + tracker.on('query', query => queries.push(query)) + await handler({ offset: 1500 }) + + expect(queries[0].sql).to.equal('select distinct "rloi_id" from "rivers_mview" where "rloi_id" is not null order by "rloi_id" asc limit $1 offset $2') + expect(queries[0].bindings).to.equal([500, 1500]) + }) + test('it does not self invoke if number of rloi ids processed is less than batch size', async () => { + process.env.IMTD_BATCH_SIZE = 10 + const { handler, invokeLambda } = setupHandlerWithStubs() + process.env.AWS_LAMBDA_FUNCTION_NAME = 'some-function-name' + setupStdDbStubs(Array.from({ length: 9 }).map((v, i) => ({ rloi_id: 1000 + i }))) + setupAxiosStdStub() + + await handler({ offset: 20 }) + + expect(invokeLambda.getCalls().length).to.equal(0) + }) + test('it self invokes if number of rloi ids processed is equal to batch size', async () => { + process.env.IMTD_BATCH_SIZE = 10 + process.env.AWS_LAMBDA_FUNCTION_NAME = 'some-function-name' + const { handler, invokeLambda } = setupHandlerWithStubs() + setupStdDbStubs(Array.from({ length: 10 }).map((v, i) => ({ rloi_id: 1000 + i }))) + setupAxiosStdStub() + + await handler({ offset: 20 }) + + expect(invokeLambda.getCalls().length).to.equal(1) + expect(invokeLambda.getCalls()[0].args).to.equal(['some-function-name', { offset: 30 }]) + }) + test('it should log to info the details of inserts and deletes', async () => { + setupStdDbStubs([{ rloi_id: 1001 }]) + setupAxiosStdStub() + const { handler, logger } = setupHandlerWithStubs() + + await handler() + const logInfoCalls = logger.info.getCalls() + expect(logInfoCalls.length).to.equal(3) + expect(logInfoCalls[0].args[0]).to.equal('Retrieving up to 500 rloi_ids with an offset of 0') + expect(logInfoCalls[1].args[0]).to.equal('Retrieved 1 rloi_ids') + expect(logInfoCalls[2].args[0]).to.equal('Processed displayTimeSeries for RLOI id 1001') + }) + test('it should return empty object from getImtdApiResponse when API returns 404 for a given RLOI id', async () => { + sinon.stub(axios, 'get').rejects({ response: { status: 404 } }) + + const data = await getImtdApiResponse(1001) + await expect(data).to.equal({}) + }) + test('it should return object from getImtdApiResponse when API successful returns data for a given RLOI id', async () => { + setupAxiosStdStub() + const data = await getImtdApiResponse(1001) + await expect(data.status).to.equal(200) + }) + test('it should throw an error when DB connection fails when getting RLOI id\'s', async () => { + tracker.on('query', function (query) { + query.reject(Error('refused')) + }) + sinon.stub(axios, 'get').rejects({ response: { status: 404 } }) + const { handler, logger } = setupHandlerWithStubs() + + const returnedError = await expect(handler()).to.reject() + expect(returnedError.message).to.equal('Could not get list of id\'s from database (Error: select distinct "rloi_id" from "rivers_mview" where "rloi_id" is not null order by "rloi_id" asc limit $1 - refused)') + + const logInfoCalls = logger.info.getCalls() + expect(logInfoCalls.length).to.equal(1) + + const logErrorCalls = logger.error.getCalls() + expect(logErrorCalls.length).to.equal(0) + }) + test('it should log an error when API returns a status which is an error and not a 404', async () => { + const counter = setupStdDbStubs([{ rloi_id: 1001 }]) + const axiosStub = setupAxiosStdStub() + axiosStub.rejects({ response: { status: 500 } }) + const { handler, logger } = setupHandlerWithStubs() + + await handler() + + const logErrorCalls = logger.error.getCalls() + expect(logErrorCalls.length).to.equal(1) + expect(logErrorCalls[0].args[0]).to.equal('Could not process data for station 1001 (IMTD API request for station 1001 failed (HTTP Status: 500))') + + expect(counter, 'Should only select (i.e. not delete or insert) if there is a non 400 error from API').to.equal({ select: 1 }) + }) + test('it should log an error when network encounters an error', async () => { + const counter = setupStdDbStubs([{ rloi_id: 1001 }]) + const axiosStub = setupAxiosStdStub() + axiosStub.rejects(Error('getaddrinfo ENOTFOUND imfs-prd1-thresholds-api.azurewebsites.net')) + const { handler, logger } = setupHandlerWithStubs() + + await handler() + + const logErrorCalls = logger.error.getCalls() + expect(logErrorCalls.length).to.equal(1) + expect(logErrorCalls[0].args[0]).to.equal('Could not process data for station 1001 (IMTD API request for station 1001 failed (Error: getaddrinfo ENOTFOUND imfs-prd1-thresholds-api.azurewebsites.net))') + + expect(counter, 'Should only select (i.e. not delete or insert) if there is a non 400 error from API').to.equal({ select: 1 }) + }) + test('it should process both RLOI ids even when first encounters an IMTD 500 error', async () => { + const test = [ + { rloi_id: 1001 }, + { rloi_id: 1002 } + ] + + const counter = setupStdDbStubs(test) + const axiosStub = setupAxiosStdStub() + axiosStub + .onFirstCall().rejects({ response: { status: 500 } }) + .onSecondCall().resolves(testApiResponse) + const { handler, logger } = setupHandlerWithStubs() + + await handler() + + const logInfoCalls = logger.info.getCalls() + expect(logInfoCalls.length).to.equal(3) + + const logErrorCalls = logger.error.getCalls() + expect(logErrorCalls.length).to.equal(1) + expect(logErrorCalls[0].args[0]).to.equal('Could not process data for station 1001 (IMTD API request for station 1001 failed (HTTP Status: 500))') + + expect(counter).to.equal({ select: 1, begin: 1, del: 1, insert: 1, commit: 1 }) + }) + test('it should log an error and rollback when DB connection fails when deleting thresholds before inserting', async () => { + tracker.on('query', function (query, step) { + [ + () => { + expect(query.method).to.equal('select') + query.response([{ rloi_id: 1001 }]) + }, + () => { + expect(query.sql).to.equal('BEGIN;') + query.response() + }, + () => { + expect(query.method).to.equal('del') + query.reject(Error('Delete Fail')) + }, + () => { + expect(query.sql).to.equal('ROLLBACK') + query.response() + } + ][step - 1]() + }) + setupAxiosStdStub() + const { handler, logger } = setupHandlerWithStubs() + + await handler() + + const logErrorCalls = logger.error.getCalls() + expect(logErrorCalls.length).to.equal(2) + expect(logErrorCalls[0].args[0]).to.equal('Database error processing stationData') + expect(logErrorCalls[1].args[0]).to.equal('Could not process data for station 1001 (delete from "station_display_time_series" where "station_id" = $1 - Delete Fail)') + + const logInfoCalls = logger.info.getCalls() + expect(logInfoCalls.length).to.equal(2) + }) + experiment('validateStationData', () => { + let stationDataArray + + beforeEach(() => { + stationDataArray = [ + { + station_id: 1, + direction: 'north', + display_time_series: true + }, + { + station_id: 2, + direction: 'south', + display_time_series: false + } + ] + }) + + test('validates the station data successfully', async () => { + const result = await validateStationData(stationDataArray) + expect(result).to.equal(stationDataArray) + }) + + test('throws an error when station data is invalid', async () => { + stationDataArray[0].station_id = 'invalid' + try { + await validateStationData(stationDataArray) + } catch (err) { + expect(err).to.exist() + expect(err.message).to.equal('Validation error: "station_id" must be a number') + } + }) + }) +}) diff --git a/test/unit/functions/imtd-process.js b/test/unit/functions/imtd-process.js index 32fad357..2a04a15f 100644 --- a/test/unit/functions/imtd-process.js +++ b/test/unit/functions/imtd-process.js @@ -191,33 +191,14 @@ experiment('imtd processing', () => { }) experiment('sad path', () => { - test('it should log to info when API returns 404 for a given RLOI id', async () => { - setupStdDbStubs([{ rloi_id: 1001 }]) - sinon.stub(axios, 'get').rejects({ response: { status: 404 } }) - const { handler, logger } = setupHandlerWithStubs() - - await handler() - - const logInfoCalls = logger.info.getCalls() - expect(logInfoCalls.length).to.equal(4) - expect(logInfoCalls[2].args[0]).to.equal('Station 1001 not found (HTTP Status: 404)') - expect(logInfoCalls[3].args[0]).to.equal('Deleted thresholds for RLOI id 1001') - - const logErrorCalls = logger.error.getCalls() - expect(logErrorCalls.length).to.equal(0) - }) test('it should log an error when API returns a status which is an error and not a 404', async () => { const counter = setupStdDbStubs([{ rloi_id: 1001 }]) const axiosStub = setupAxiosStdStub() axiosStub.rejects({ response: { status: 500 } }) - const { handler, logger } = setupHandlerWithStubs() + const { handler } = setupHandlerWithStubs() await handler() - const logErrorCalls = logger.error.getCalls() - expect(logErrorCalls.length).to.equal(1) - expect(logErrorCalls[0].args[0]).to.equal('Could not process data for station 1001 (IMTD API request for station 1001 failed (HTTP Status: 500))') - expect(counter, 'Should only select (i.e. not delete or insert) if there is a non 400 error from API').to.equal({ select: 1 }) }) test('it should log an error when network encounters an error', async () => { @@ -348,9 +329,8 @@ experiment('imtd processing', () => { await handler() const logErrorCalls = logger.error.getCalls() - expect(logErrorCalls.length).to.equal(2) - expect(logErrorCalls[0].args[0]).to.equal('Error deleting thresholds for station 1001') - expect(logErrorCalls[1].args[0]).to.equal('Could not process data for station 1001 (delete from "station_imtd_threshold" where "station_id" = $1 - Delete Fail)') + expect(logErrorCalls.length).to.equal(1) + expect(logErrorCalls[0].args[0]).to.equal('Could not process data for station 1001 (delete from "station_imtd_threshold" where "station_id" = $1 - Delete Fail)') const logInfoCalls = logger.info.getCalls() expect(logInfoCalls.length).to.equal(2)