Skip to content

Commit

Permalink
Merge branch 'develop' into clinical_id_service_integration
Browse files Browse the repository at this point in the history
* develop:
  fix to resolve incorrect merging of therapies into treatments (#1187)
  Chore / Move Clinical Service Types (#1185)
  ❌ Sort Invalid Clinical Records first (#1184)
  RC 1.87.1
  Remove unused argument (#1179)
  🏷️ 1141 Add Exception Manifest to Donor tsv (#1178)
  Fixed Resolver (#1177)
  Add Exceptions to Program TSV (#1175)
  • Loading branch information
UmmulkiramR committed Jun 11, 2024
2 parents f893255 + eb1babb commit 5bcf37b
Show file tree
Hide file tree
Showing 16 changed files with 5,491 additions and 5,284 deletions.
12 changes: 6 additions & 6 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 5 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "argo-clinical",
"version": "1.87.0",
"version": "1.87.1",
"description": "Clinical submission system and repo.",
"scripts": {
"start": "npm run serve",
Expand Down Expand Up @@ -97,13 +97,13 @@
"typescript": "^5.0.0"
},
"dependencies": {
"@apollo/subgraph": "2.5.2",
"apollo-server-core": "^3.12.0",
"@apollo/server": "4.0.0",
"@overturebio-stack/lectern-client": "1.4.0",
"@types/mongoose-paginate-v2": "^1.3.11",
"@apollo/subgraph": "2.5.2",
"@icgc-argo/ego-token-utils": "^8.2.0",
"@overturebio-stack/lectern-client": "^1.5.0",
"@types/mongoose-paginate-v2": "^1.3.11",
"adm-zip": "^0.4.16",
"apollo-server-core": "^3.12.0",
"async": "^3.0.1",
"bcrypt-nodejs": "^0.0.3",
"bluebird": "^3.5.5",
Expand Down
72 changes: 60 additions & 12 deletions src/clinical/api/clinical-api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import { Request, Response } from 'express';
import * as service from '../clinical-service';
import { ClinicalDataQuery } from '../clinical-service';
import { ClinicalDataQuery } from '../types';
import { getExceptionManifestRecords } from '../../submission/exceptions/exceptions';
import { ExceptionManifestRecord } from '../../exception/exception-manifest/types';
import {
Expand All @@ -30,7 +30,7 @@ import {
} from '../../decorators';
import { ControllerUtils, DonorUtils, TsvUtils } from '../../utils';
import AdmZip from 'adm-zip';
import { ClinicalEntityData, Donor } from '../clinical-entities';
import { ClinicalEntityData, ClinicalInfo, Donor } from '../clinical-entities';
import { omit } from 'lodash';
import { DeepReadonly } from 'deep-freeze';
import {
Expand Down Expand Up @@ -59,22 +59,19 @@ export const parseDonorIdList = (donorIds: string) =>

export const createClinicalZipFile = (
data: ClinicalEntityData[],
exceptionManifest?: {
programShortName: string;
exceptions: ExceptionManifestRecord[];
},
exceptions?: ExceptionManifestRecord[],
) => {
const zip = new AdmZip();
data.forEach((entityData) => {
const tsvData = TsvUtils.convertJsonRecordsToTsv(entityData.records, entityData.entityFields);
zip.addFile(`${entityData.entityName}.tsv`, Buffer.alloc(tsvData.length, tsvData));
});
if (exceptionManifest && exceptionManifest.exceptions.length) {
const headers = exceptionManifest.exceptions
if (exceptions?.length) {
const headers = exceptions
.map((exception) => Object.keys(exception))
.flat()
.filter((key, index, keyArray) => keyArray.indexOf(key) === index);
const tsvData = TsvUtils.convertJsonRecordsToTsv(exceptionManifest.exceptions, headers);
const tsvData = TsvUtils.convertJsonRecordsToTsv(exceptions, headers);
zip.addFile(`exceptions_manifest.tsv`, Buffer.alloc(tsvData.length, tsvData));
}
return zip;
Expand All @@ -94,14 +91,24 @@ class ClinicalController {
}

const data = await service.getClinicalData(programId);
const donors = await service.getDonors(programId);

const donorIds = donors.map((donor: DeepReadonly<Donor>) => donor.donorId);
const submitterDonorIds = donors.map((donor: DeepReadonly<Donor>) => donor.submitterId);

const exceptions =
(await getExceptionManifestRecords(programId, {
donorIds,
submitterDonorIds,
})) || [];

const todaysDate = currentDateFormatted();
res
.status(200)
.contentType('application/zip')
.attachment(`${programId}_Clinical_Data_${todaysDate}.zip`);

const zip = createClinicalZipFile(data);
const zip = createClinicalZipFile(data, exceptions);

res.send(zip.toBuffer());
}
Expand Down Expand Up @@ -160,7 +167,7 @@ class ClinicalController {
.attachment(fileName)
.setHeader('content-disposition', fileName);

const zip = createClinicalZipFile(entityData, { programShortName, exceptions });
const zip = createClinicalZipFile(entityData, exceptions);

res.send(zip.toBuffer());
}
Expand All @@ -186,15 +193,56 @@ class ClinicalController {

const donorEntityData = await service.getDonorEntityData(donorIds);

const allExceptions: ExceptionManifestRecord[][] = [];

const date = currentDateFormatted();
const fileName = `filename=Donor_Clinical_Data_${date}.zip`;

if (donorEntityData.length) {
const entityRecords = donorEntityData[0].records;

// File Table can request multiple programs
const donorPrograms = entityRecords
.map((record) => record.program_id)
.filter(
// Remove duplicate program names
(programId, index, array): programId is string =>
typeof programId === 'string' && array.indexOf(programId) === index,
);

for (const programShortName of donorPrograms) {
// Get Donor Ids + Exceptions for each program
const programDonors = entityRecords
.filter(
(record): record is ClinicalInfo & { donor_id: number; program_id: string } =>
typeof record.donor_id === 'number' && record.program_id === programShortName,
)
.map((record) => record.donor_id)
.filter(
(donorId, index, array) =>
// Remove duplicate donorIds
array.indexOf(donorId) === index,
);

const programExceptions =
(await getExceptionManifestRecords(programShortName, {
donorIds: programDonors,
submitterDonorIds: [],
})) || [];

allExceptions.push(programExceptions);
}
}

const exceptions = allExceptions.flat();

res
.status(200)
.contentType('application/zip')
.attachment(fileName)
.setHeader('content-disposition', fileName);

const zip = createClinicalZipFile(donorEntityData);
const zip = createClinicalZipFile(donorEntityData, exceptions);

res.send(zip.toBuffer());
}
Expand Down
66 changes: 32 additions & 34 deletions src/clinical/clinical-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ import {
ClinicalEntityErrorRecord,
ClinicalEntitySchemaNames,
ClinicalErrorsResponseRecord,
EntityAlias,
aliasEntityNames,
allEntityNames,
} from '../common-model/entities';
Expand All @@ -48,44 +47,20 @@ import {
import { migrationRepo } from '../submission/migration/migration-repo';
import { prepareForSchemaReProcessing } from '../submission/submission-service';
import { Errors, notEmpty } from '../utils';
import { ClinicalEntityData, ClinicalInfo, Donor, Sample } from './clinical-entities';
import { ClinicalEntityData, Donor, Sample } from './clinical-entities';
import { DONOR_DOCUMENT_FIELDS, donorDao } from './donor-repo';
import { runTaskInWorkerThread } from './service-worker-thread/runner';
import { WorkerTasks } from './service-worker-thread/tasks';
import { CompletionState } from './api/types';
import {
ClinicalDataQuery,
ClinicalDataSortType,
ClinicalDataSortTypes,
ClinicalDonorEntityQuery,
PaginationQuery,
} from './types';

const L = loggerFor(__filename);

// Base type for Clinical Data Queries
export type ClinicalDonorEntityQuery = {
donorIds: number[];
submitterDonorIds: string[];
entityTypes: EntityAlias[];
};

export type PaginationQuery = {
page: number;
pageSize?: number;
sort: string;
};

type ClinicalDataPaginatedQuery = ClinicalDonorEntityQuery & PaginationQuery;

export type ClinicalDataQuery = ClinicalDataPaginatedQuery & {
completionState?: {};
};

// GQL Query Arguments
// Submitted Data Table, SearchBar, Sidebar, etc.
export type ClinicalDataApiFilters = ClinicalDataPaginatedQuery & {
completionState?: CompletionState;
};

export type ClinicalDataVariables = {
programShortName: string;
filters: ClinicalDataApiFilters;
};

export async function updateDonorSchemaMetadata(
donor: DeepReadonly<Donor>,
migrationId: string,
Expand Down Expand Up @@ -231,8 +206,31 @@ export const getPaginatedClinicalData = async (programId: string, query: Clinica
// Get all donors + records for given entity
const { donors, totalDonors } = await donorDao.findByPaginatedProgramId(programId, query);

const donorIds = donors.map((donor) => donor.donorId);

const isDefaultDonorSort = query.sort.includes('completionStats.coreCompletionPercentage');
const isInvalidSort = query.sort.includes('schemaMetadata.isValid');

const clinicalErrors = isInvalidSort
? (await getClinicalErrors(programId, donorIds)).clinicalErrors
: [];

const sortType: ClinicalDataSortType = isDefaultDonorSort
? ClinicalDataSortTypes.defaultDonor
: isInvalidSort
? ClinicalDataSortTypes.invalidEntity
: ClinicalDataSortTypes.columnSort;

const taskToRun = WorkerTasks.ExtractEntityDataFromDonors;
const taskArgs = [donors as Donor[], totalDonors, allSchemasWithFields, query.entityTypes, query];
const taskArgs = [
donors as Donor[],
totalDonors,
allSchemasWithFields,
query.entityTypes,
query,
sortType,
clinicalErrors,
];

// Return paginated data
const data = await runTaskInWorkerThread<{ clinicalEntities: ClinicalEntityData[] }>(
Expand Down
2 changes: 1 addition & 1 deletion src/clinical/donor-repo.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
*/

import { Donor } from './clinical-entities';
import { ClinicalDataQuery, ClinicalDonorEntityQuery } from './clinical-service';
import { ClinicalDataQuery, ClinicalDonorEntityQuery } from './types';
import { getRequiredDonorFieldsForEntityTypes } from '../common-model/functions';
import mongoose, { PaginateModel } from 'mongoose';
import mongoosePaginate from 'mongoose-paginate-v2';
Expand Down
Loading

0 comments on commit 5bcf37b

Please sign in to comment.