From 64ddb4996192a404e6e0f84285d22adb46851580 Mon Sep 17 00:00:00 2001 From: Monika Tercjak <57092960+sheenaze@users.noreply.github.com> Date: Thu, 23 Mar 2023 19:12:02 +0100 Subject: [PATCH] Frm4sm release 2 (#640) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Updated submodule testdata for SMOSL2 * Fixed tests and integrated for SMOSL2 * Update for SMOSL2 * Added test for SMOS L2 column combine * set up updated to include a new parameterized filter * Updated preprocess. version in environment * parameter updated * Fixed tests * Updated pytesmo version * Upload data to frm4sm release (#523) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * User upload data (#489) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * User upload data (#490) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * User upload data (#491) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * User upload data (#492) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * User upload data (#493) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * User upload data (#494) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * User upload data (#495) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * User upload data (#496) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * User upload data (#497) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * some additional form fields added and handled * some code refinement done * variable and dimension part commented, as we are going to retrieve information from the file * UserDataFileDto updated according to the model changes * user dataset list updated with the possibility of editing variable and dimension names (only front-end) * model updated and appropriate migration added * retrieving dimensions and variable names from the file added * cancel button added and styled * upload date field added along with appropriate migration * user-data-row component added to develop each row separately * my-datsets component updated to use user-data-row instead of user-dataset-list * uploading form modified to consist of fewer fields * updateMetadata method added * updateMetadata url added * function for updating metadata updated to include also dimensions * toast added, some code refinement done * variable long name included on the list * Some code refinement done; styling and date format improved * preprocessing package updated * tooltips added * some changes in retrieving coordinate names from the file applied to use the function from the preprocessing package * migrations updated * model updated, appropriate migration added * list of front-end urls updated * api views for uploading file updated to be optimal and consistent * user data file updated according to changes in the model * user dataset row updated according to changes in the back-end * serializer updated; printing removed; * Information about user dataset managing options added; edit and tooltip icons added; tooltip text edited * editing dataset and version names added * scroll to top added, typo fixed * a property added to the user-data model to check if it has been used in a validation; removed button disabled for such a case; * some styling updated * retrieving variable name updated * User upload data (#499) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * some additional form fields added and handled * some code refinement done * variable and dimension part commented, as we are going to retrieve information from the file * UserDataFileDto updated according to the model changes * user dataset list updated with the possibility of editing variable and dimension names (only front-end) * model updated and appropriate migration added * retrieving dimensions and variable names from the file added * cancel button added and styled * upload date field added along with appropriate migration * user-data-row component added to develop each row separately * my-datsets component updated to use user-data-row instead of user-dataset-list * uploading form modified to consist of fewer fields * updateMetadata method added * updateMetadata url added * function for updating metadata updated to include also dimensions * toast added, some code refinement done * variable long name included on the list * Some code refinement done; styling and date format improved * preprocessing package updated * tooltips added * some changes in retrieving coordinate names from the file applied to use the function from the preprocessing package * migrations updated * model updated, appropriate migration added * list of front-end urls updated * api views for uploading file updated to be optimal and consistent * user data file updated according to changes in the model * user dataset row updated according to changes in the back-end * serializer updated; printing removed; * Information about user dataset managing options added; edit and tooltip icons added; tooltip text edited * editing dataset and version names added * scroll to top added, typo fixed * a property added to the user-data model to check if it has been used in a validation; removed button disabled for such a case; * some styling updated * retrieving variable name updated * user dataset file model added to the admin panel * User upload data (#500) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * some additional form fields added and handled * some code refinement done * variable and dimension part commented, as we are going to retrieve information from the file * UserDataFileDto updated according to the model changes * user dataset list updated with the possibility of editing variable and dimension names (only front-end) * model updated and appropriate migration added * retrieving dimensions and variable names from the file added * cancel button added and styled * upload date field added along with appropriate migration * user-data-row component added to develop each row separately * my-datsets component updated to use user-data-row instead of user-dataset-list * uploading form modified to consist of fewer fields * updateMetadata method added * updateMetadata url added * function for updating metadata updated to include also dimensions * toast added, some code refinement done * variable long name included on the list * Some code refinement done; styling and date format improved * preprocessing package updated * tooltips added * some changes in retrieving coordinate names from the file applied to use the function from the preprocessing package * migrations updated * model updated, appropriate migration added * list of front-end urls updated * api views for uploading file updated to be optimal and consistent * user data file updated according to changes in the model * user dataset row updated according to changes in the back-end * serializer updated; printing removed; * Information about user dataset managing options added; edit and tooltip icons added; tooltip text edited * editing dataset and version names added * scroll to top added, typo fixed * a property added to the user-data model to check if it has been used in a validation; removed button disabled for such a case; * some styling updated * retrieving variable name updated * user dataset file model added to the admin panel * fields names updated * User upload data (#501) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * some additional form fields added and handled * some code refinement done * variable and dimension part commented, as we are going to retrieve information from the file * UserDataFileDto updated according to the model changes * user dataset list updated with the possibility of editing variable and dimension names (only front-end) * model updated and appropriate migration added * retrieving dimensions and variable names from the file added * cancel button added and styled * upload date field added along with appropriate migration * user-data-row component added to develop each row separately * my-datsets component updated to use user-data-row instead of user-dataset-list * uploading form modified to consist of fewer fields * updateMetadata method added * updateMetadata url added * function for updating metadata updated to include also dimensions * toast added, some code refinement done * variable long name included on the list * Some code refinement done; styling and date format improved * preprocessing package updated * tooltips added * some changes in retrieving coordinate names from the file applied to use the function from the preprocessing package * migrations updated * model updated, appropriate migration added * list of front-end urls updated * api views for uploading file updated to be optimal and consistent * user data file updated according to changes in the model * user dataset row updated according to changes in the back-end * serializer updated; printing removed; * Information about user dataset managing options added; edit and tooltip icons added; tooltip text edited * editing dataset and version names added * scroll to top added, typo fixed * a property added to the user-data model to check if it has been used in a validation; removed button disabled for such a case; * some styling updated * retrieving variable name updated * user dataset file model added to the admin panel * fields names updated * problem with pretty_name vs short_name of variable fixed * User upload data (#502) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work… * Upload data to frm4sm release (#525) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * User upload data (#489) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * User upload data (#490) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * User upload data (#491) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * User upload data (#492) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * User upload data (#493) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * User upload data (#494) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * User upload data (#495) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * User upload data (#496) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * User upload data (#497) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * some additional form fields added and handled * some code refinement done * variable and dimension part commented, as we are going to retrieve information from the file * UserDataFileDto updated according to the model changes * user dataset list updated with the possibility of editing variable and dimension names (only front-end) * model updated and appropriate migration added * retrieving dimensions and variable names from the file added * cancel button added and styled * upload date field added along with appropriate migration * user-data-row component added to develop each row separately * my-datsets component updated to use user-data-row instead of user-dataset-list * uploading form modified to consist of fewer fields * updateMetadata method added * updateMetadata url added * function for updating metadata updated to include also dimensions * toast added, some code refinement done * variable long name included on the list * Some code refinement done; styling and date format improved * preprocessing package updated * tooltips added * some changes in retrieving coordinate names from the file applied to use the function from the preprocessing package * migrations updated * model updated, appropriate migration added * list of front-end urls updated * api views for uploading file updated to be optimal and consistent * user data file updated according to changes in the model * user dataset row updated according to changes in the back-end * serializer updated; printing removed; * Information about user dataset managing options added; edit and tooltip icons added; tooltip text edited * editing dataset and version names added * scroll to top added, typo fixed * a property added to the user-data model to check if it has been used in a validation; removed button disabled for such a case; * some styling updated * retrieving variable name updated * User upload data (#499) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * some additional form fields added and handled * some code refinement done * variable and dimension part commented, as we are going to retrieve information from the file * UserDataFileDto updated according to the model changes * user dataset list updated with the possibility of editing variable and dimension names (only front-end) * model updated and appropriate migration added * retrieving dimensions and variable names from the file added * cancel button added and styled * upload date field added along with appropriate migration * user-data-row component added to develop each row separately * my-datsets component updated to use user-data-row instead of user-dataset-list * uploading form modified to consist of fewer fields * updateMetadata method added * updateMetadata url added * function for updating metadata updated to include also dimensions * toast added, some code refinement done * variable long name included on the list * Some code refinement done; styling and date format improved * preprocessing package updated * tooltips added * some changes in retrieving coordinate names from the file applied to use the function from the preprocessing package * migrations updated * model updated, appropriate migration added * list of front-end urls updated * api views for uploading file updated to be optimal and consistent * user data file updated according to changes in the model * user dataset row updated according to changes in the back-end * serializer updated; printing removed; * Information about user dataset managing options added; edit and tooltip icons added; tooltip text edited * editing dataset and version names added * scroll to top added, typo fixed * a property added to the user-data model to check if it has been used in a validation; removed button disabled for such a case; * some styling updated * retrieving variable name updated * user dataset file model added to the admin panel * User upload data (#500) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * some additional form fields added and handled * some code refinement done * variable and dimension part commented, as we are going to retrieve information from the file * UserDataFileDto updated according to the model changes * user dataset list updated with the possibility of editing variable and dimension names (only front-end) * model updated and appropriate migration added * retrieving dimensions and variable names from the file added * cancel button added and styled * upload date field added along with appropriate migration * user-data-row component added to develop each row separately * my-datsets component updated to use user-data-row instead of user-dataset-list * uploading form modified to consist of fewer fields * updateMetadata method added * updateMetadata url added * function for updating metadata updated to include also dimensions * toast added, some code refinement done * variable long name included on the list * Some code refinement done; styling and date format improved * preprocessing package updated * tooltips added * some changes in retrieving coordinate names from the file applied to use the function from the preprocessing package * migrations updated * model updated, appropriate migration added * list of front-end urls updated * api views for uploading file updated to be optimal and consistent * user data file updated according to changes in the model * user dataset row updated according to changes in the back-end * serializer updated; printing removed; * Information about user dataset managing options added; edit and tooltip icons added; tooltip text edited * editing dataset and version names added * scroll to top added, typo fixed * a property added to the user-data model to check if it has been used in a validation; removed button disabled for such a case; * some styling updated * retrieving variable name updated * user dataset file model added to the admin panel * fields names updated * User upload data (#501) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work around added to add scroll-to-section option * user data uploading form added * user data uploading form updated * draft of user data file added * uploading file form updated, so now first the file has to be chosen and then metadata form shows up * file-upload-form component not needed * draft back-end function for uploading file added * a blank line added * some updates in the uploading file serializer done * uploading data view created, uploading view and serializers moved to the new file, urls imports updated * getting list of user data files added to the back end * user-data-file.dto.ts added to map the file model from the back end * user-dataset-list component added to present list of files uploaded by the user * user-dataset service added and respective code moved there from the validationrun service file * user-file-upload updated to use appropriate service * my-datasets site update to include list of user datasets * deleting user data added on both front and back end sides * my datasets table updated, some columns for metadata added * condition updated so it doesn't send two requests to the back-end * condition updated, so it doesn't show the table when there are no entries * dataset model updated to include the user that the data belongs to; the appropriate api view updated * user_dataset_file model added and upload_user_data_view updated * getting datasets into front end updated with the possibility of showing user data * urls updated, because id is now a string not int * id instead of pk taken * tests updated * refreshing page added when a file is uploaded * bug fixed, now it returns united data * uploading data updated, now first the metadata is sent, appropriate dataset, its version and variable are created and then file is uploaded * uploading file and metadata functions updated * removing file when the instance is removed added * my-datasets list updated to present information from db * caching updated to remember if also user datasets should be taken or not * user data are not taken for the reference datasets * position removed as not needed * ? added to the async pipe * a spinner added to inform user that the data is being uploaded * user data file id changed to string * a button added to run some tests on the uploaded file; instead of passing formData, only the file is passed, what fixes the problem with opening uploaded netCDF * some code commented as it's only for testing purposes * migration added * dataset_view.py updated with the default userData parameter; dataset fixture updated with the user entry * user data path added and set as the default for data storage * docker compose for test instance updated * upload file paths updated to the prod one and appropriate migration added * appropriate migration added * direction changed, first the file and then the metadata is saved * getting raw file path added as property * a very first draft of the user data reader added * model updated to store dimension names; appropriate migration added * some code refinement done * environment updated * function name updated * dataset storage path updated to not include the file name; get_raw_file_path property updated to strip also '/' * env updated to include a test version of qa4sm-preprocessing package * setdatasetpaths updated to not to change user dataset storage directories * file path updated * celery bound with data folder * some additional form fields added and handled * some code refinement done * variable and dimension part commented, as we are going to retrieve information from the file * UserDataFileDto updated according to the model changes * user dataset list updated with the possibility of editing variable and dimension names (only front-end) * model updated and appropriate migration added * retrieving dimensions and variable names from the file added * cancel button added and styled * upload date field added along with appropriate migration * user-data-row component added to develop each row separately * my-datsets component updated to use user-data-row instead of user-dataset-list * uploading form modified to consist of fewer fields * updateMetadata method added * updateMetadata url added * function for updating metadata updated to include also dimensions * toast added, some code refinement done * variable long name included on the list * Some code refinement done; styling and date format improved * preprocessing package updated * tooltips added * some changes in retrieving coordinate names from the file applied to use the function from the preprocessing package * migrations updated * model updated, appropriate migration added * list of front-end urls updated * api views for uploading file updated to be optimal and consistent * user data file updated according to changes in the model * user dataset row updated according to changes in the back-end * serializer updated; printing removed; * Information about user dataset managing options added; edit and tooltip icons added; tooltip text edited * editing dataset and version names added * scroll to top added, typo fixed * a property added to the user-data model to check if it has been used in a validation; removed button disabled for such a case; * some styling updated * retrieving variable name updated * user dataset file model added to the admin panel * fields names updated * problem with pretty_name vs short_name of variable fixed * User upload data (#502) * user-file-upload component added * user-file-upload component place on the user-profile page * userFileUpload added to the validationrun service * userFileUpload added to the app.modules * api url for uploading added, a draft view for serving for it added * tsconfig updated * user file component updated * userFileUpload updated and some styling improved * my-datasets page added * my-dataset page added to the navigation menu * user-file-upload component moved to my datasets page * some styling applied * my dataset page updated * user-file-upload component updated * class 'center' moved to the global css file * directory update done * prime icons updated * upload file changed to select file button and new button for uploading added * file-upload-form component added * help page updated - added a section for used data standard * work… * SMOSL2/3 filtering updates (#527) * Applied patch on SMOSL3 filtering * Applied changes to SMOS L2 and L3 filtering * Smapl2 integration (#531) * Update datasets.json resolution added to SMOS_L3 dataset * Added SMAP L2 * Fixed tests Co-authored-by: Monika Tercjak <57092960+sheenaze@users.noreply.github.com> * Adapted timestamp is SMAP L2 and SMAP L3 v6 * Updated env, SMOSL2 filters * improved error handling in QA4SM * adapted tests (#536) * adapted tests * bump pytesmo to 0.14.3 for metric fixes * Temporal reference (#538) * validation run model updated to include new reference fields; proper admin site updated; proper migrations added * duplicated id field removed * admin page for validationRun updated with new models * code styling improved * is_only_reference changed to is_spatial_reference to better describe the field; appropriate migration added; fixture updated * appropriate files updated to replace is_only_reference with is_spatial_reference * appropriate files updated to replace reference_configuration with spatial_reference_configuration (backend) * appropriate files updated to replace reference_configuration with spatial_reference_configuration (api) * appropriate files updated to replace reference_configuration with spatial_reference_configuration (frontend) * new model fields added to the validationrun model in the frontend * mail about finished validation updated to include information about both spatial and temporal reference datastes. * function name updated to be precise * function name updated to be precise * api for validation configuration updated to include new names and new fields * validation run interface updated along with the function for reloading validation settings * scaling_reference_removed as it is duplication of an existing field; migration updated * code updated to remove scaling_reference * reference section removed in the frontend * new component for defining validation reference added * changes applied to the reference model, dividing reference into two parts, temporal and spatial * tcol condition updated * front-end changes applied -> reference dataset selector removed; added two selectors for spatial and temporal reference; appropriate model changes introduced; styling updated * model updated and appropriate migration added * dtos updated according updates in the backend * is_scaling_reference field added to the DatasetConfiguration model; proper migration updated; properties added to the ValidationRun model, preparing for removal respective fields. * some front end updates done to include layout change * scaling methods imported to globals to pass it further to frontend via api * api for passing scaling methods added * function get_scaling_methods moved to the validation_config_view as the output refers to the validation configuration * view updated * reference type removed as not used * scaling component updated - hard coded methods replaced with ones taken from the backend, component placed next to dataset, all datasets enabled as reference; respective components updated * condition added not to show scale to when no scaling method is chosen * added a function for verification if the chosen reference dataset is still on the list of possibilities. * small code refinement done on the scaling component * small code refinement done on the scaling component * validate-reference component updated to take care of its functionality * validate component updated - validate-reference component functionality removed * updating reference datasets added when one dataset is removed * code refinement done; bug fixes applied * information about dataset being a reference updated to include scaling ref * blocking ISMN to only one choice added * not used styling removed * styling of datasets marked as reference updated * added setting ISMN as the spatial reference when changing dataset to it * not needed printing removed * scaling method set onInit * scaling property added * starting validation code uncommented * scaling_reference_config field added * not needed properties removed * some code refinement done * reloading data updated according to temporal reference changes * scaling method model * function for returning string with reference information moved to the proper service, since it's used in more than one place * reference column removed, information on reference added to each dataset * dto updated * name of the function updated to better describe what the function returns, fields added to serializer * some front-end updates done to include reference information * 'Reference dataset' replaced with 'Spatial reference dataset' * 'reference' replaced with 'spatial reference' * dataset info page updated, to not distinguish between regular and reference only datasets * fixture updated to include proper information about ISMN dataset * validation summary improved * condition improved, so there is no dataset reference set when the method is none * condition added to display information on the scaling reference only when method is not none * conditions removed * some code refinement done * scaling updated to include remove scaling datasets if none method is chosen * some improvements regarding scaling reference done * verification of the chosen value retrieved to properly set the reference on the dataset removal * verification of the chosen Scale model retrieved to properly set the reference on the dataset removal * not needed code removed; spatial reference dataset forced to be the last one * validation summary updated to properly show scaling information * comparison_label property updated with the proper field name * update done to adjust the component to the current 'reference' approach * publishing form updated * improvements on reloading settings done * temporal reference added to validation; names updated to be consistent with the new approach * printing removed * temporal reference set * field name updated * temporal reference field added; reference changed to spatial_reference * testdata updated to the current version * proper name changes introduce to better describe parameters; missing values added * commented code removed * ignoring exception added * is_spatial_reference and is_temporal_reference fileds added to be compared when two validations are being compared * temporal reference set * get back to C3S version V202012 * default scaling reference removed * default scaling reference removed * default scaling method set to none; proper migration added * verifying configuration in terms of reference settings added * changes in tests added to set properly reference in both validationRun and datasetConfiguration models * scaling methods set to mean/std to retrieve previous settings * setting grid size for irregular grids moved to be outside scaling_ref condition * verification of reference configuration updated to be more precise in raising errors * test for the function verifying reference configuration added * check validation configuration function name changed not to be private; verification moved to the proper api view * function for checking validation configuration consistency moved to test_validation as a part of checking results procedure; tests for the function removed * verifying configuration consistency moved to the serializer validate method; tests updated to check proper handling of inproper configuration * self parameter added * Fix gldas versio order (#540) * commented code removed, not needed printing removed * setting the last element for variable added * irregular grids set in degrees instead of km (#541) * Added ISMN data set for Ver. exercise (#543) * Update versions.json pretty name of the verification data shorten. * Add new ISMN dataset and filter * Add frm filter * New ismn data * Add manual CI trigger * Updated uploading form (#548) * Update upload form (#544) * taking user data fixed * error messages updated * preprocessing package updated * accepting only netCDFs and zips added * some updates done to include zip files * dimension names removed from the frontend * dimension names related functions removed * dimension names removed from the user data model; uploading data updated to fully use preprocessing package, also for retrieval variable names * printing and not needed comments removed * tests and reader updated to remove dimension names * preprocessing package updated * attr removed as the entry is a dict and not a dataset anymore * test updated with the correct error message * readers for user data updated * env updated * Update upload form (#545) * taking user data fixed * error messages updated * preprocessing package updated * accepting only netCDFs and zips added * some updates done to include zip files * dimension names removed from the frontend * dimension names related functions removed * dimension names removed from the user data model; uploading data updated to fully use preprocessing package, also for retrieval variable names * printing and not needed comments removed * tests and reader updated to remove dimension names * preprocessing package updated * attr removed as the entry is a dict and not a dataset anymore * test updated with the correct error message * readers for user data updated * env updated * proper migration added * Update upload form (#546) * taking user data fixed * error messages updated * preprocessing package updated * accepting only netCDFs and zips added * some updates done to include zip files * dimension names removed from the frontend * dimension names related functions removed * dimension names removed from the user data model; uploading data updated to fully use preprocessing package, also for retrieval variable names * printing and not needed comments removed * tests and reader updated to remove dimension names * preprocessing package updated * attr removed as the entry is a dict and not a dataset anymore * test updated with the correct error message * readers for user data updated * env updated * proper migration added * full message passed to the front end for debugging * Update upload form (#547) * taking user data fixed * error messages updated * preprocessing package updated * accepting only netCDFs and zips added * some updates done to include zip files * dimension names removed from the frontend * dimension names related functions removed * dimension names removed from the user data model; uploading data updated to fully use preprocessing package, also for retrieval variable names * printing and not needed comments removed * tests and reader updated to remove dimension names * preprocessing package updated * attr removed as the entry is a dict and not a dataset anymore * test updated with the correct error message * readers for user data updated * env updated * proper migration added * full message passed to the front end for debugging * dealing with id added * information about data belonging to the users added * admin panel for dataset, version and variable developed * condition added in case the db is empty (which is the case when tests are running) * Update qa4sm_env.yml temporarily switch to the master branch * Update tests * Fix db conflicts * Update testdata * Update env * Update tests for metadata plots and frm class * Remove parameterized package * Update tests * Remove unnecessary import * Update networks fixtures (#550) * Filter ismn metadata in bounding box * Set reader version correctly * Updated uploading form (#553) * taking user data fixed * error messages updated * preprocessing package updated * accepting only netCDFs and zips added * some updates done to include zip files * dimension names removed from the frontend * dimension names related functions removed * dimension names removed from the user data model; uploading data updated to fully use preprocessing package, also for retrieval variable names * printing and not needed comments removed * tests and reader updated to remove dimension names * preprocessing package updated * attr removed as the entry is a dict and not a dataset anymore * test updated with the correct error message * readers for user data updated * env updated * proper migration added * full message passed to the front end for debugging * dealing with id added * information about data belonging to the users added * admin panel for dataset, version and variable developed * condition added in case the db is empty (which is the case when tests are running) * deleting associated dataset, version and variable moved to the model, so it is possible to remove everything at once from the admin panel; reference names changed in the model * proper testdata set * tests updated to pass with the newest version of the reader * tests updated to pass with the newest version of the reader * tests updated to pass with the newest version of the reader * set qa4sm-reader to the master branch * Block publishing for user data (#554) * blocking publishing for validation containing user dataset added * property checking if validation contains user dataset added * Status plots (#556) * status plots added * status plots added * test updated to include 'status' case; not needed printing removed * graphs tests updated to include status metric * information updated * number of variables expressed with a proper formula for the 'status' metric * small frontend improvements done: (#557) - newest ISMN version set by default; - information on the my-dataset page has been updated * Adapted SMOSL2 filtering to ignore NaNs * Upload data guidelines (#562) * upload data help page created (#558) * User data guidelines (#559) * upload data help page created * proper component added * Frm4sm release2 data upload file requirements (#560) * Add user data requirements text * Update user data page * Upload data guidelines (#561) * uploading data procedure described on the help page * some styling improved * proper screenshots added --------- Co-authored-by: Wolfgang Preimesberger * workaround for SMOS * removed ignore_nans keyword * fixed tests * added filtering, to make sure that only those user datasets are shown which have a file assigned. (#564) * save validation also if all points failed * copying and reloading settings disabled if validation contains someone else's data (#566) * List of validation for user data (#568) * list of validation run with user data added to the user data panel * List of validations added to the user data row * user data row label updated * method for verifying content of the uploaded zip file added (#569) * ISMN removed as default temporal reference, but not removed from the list, just marked as not recommended (#571) * removed SMOS L2 reader adapter * Updated testdata * removed 0.0 as valid lower thres. for ISMN * anomalies dates fixed to be passed to backend (#576) * Add tooltip to reference (#577) * tooltips for spatial and temporal reference dropdowns added * problem with too long dropdown list solved * Change visibility of validate page (#578) * validate page available for all users * login component moved to another directory, so it's a separate component; page for loggin in added * login form added to the login page * scroll to top moved to login page * link to set up for added * navigateAfter input parameter added, to know if after logging in the user should be redirected or should stay at the same page * logging in form shows up if a non-logged in user tries to start a validation * information about not being logged in added; redirection to login and signup forms added; styling updated * corrected temporal window * [QA4SM-659] Add csv example to user upload guide (#574) * Add csv example to user upload guide * Update testdata manually * [QA4SM-275] Add unit to fixtures, model and netcdf file (#575) * Add unit to fixtures, model and netcdf file * Update test data manually * setLimitationsOnGeographicalRange updated to handle a case when there is an updated on coordinates, but min values are bigger than max or the other way round (#580) * Add units to frontend (#581) * variable units displayed on the variable dropdown list * units added to the validation summary * default unit changed to n.a.; units added to the validation row * [units] added to the dropdown label * acquiring units from user dataset added * displaying units in the user dataset row added * code updated to get units from user dataset file * migration added to include a change of default unit entry * Small improvements (#582) * information for non-logged in user updated * email address added to the main page * Added descending smap data set * Added versions to dataset fixtures * Fix test_validation_tcol, after reader bugfix (status-plots) (#583) * Fix test_validation_tcol, after reader bugfix (status-plots) * Fix test_generate_graphs_ismn_metadata * updates done to properly include status plots (#585) * maxdist for datasets (#586) * maxdist for datasets * fix qa4sm-reader at old commit * Intergrate updates to reader package (#589) * Use fixed references in environment * Update tests for new plot settings * Add a field to force generating metadata plots of a validation run * Plot generation based on resp field in validation run object * Update tests and supress some warnings --------- Co-authored-by: Monika Tercjak <57092960+sheenaze@users.noreply.github.com> * Fix uploading form (#590) * handling a situation when soil moisture name can not be acquired added * tooltip updated * starting validation disabled if there is a dataset with no variable assigned chosen * File size limitations (#591) * models updated; proper migration added * migration updated to be consistent with recent updates * model field name updated to better reflect what is stored there; migration updated; property added to know how much space left for the particular user * admin panel updated; api user_view updated to take all the fields and properties into account; * proper updates done to pass space usage information between frontend and backend * message on the storage limit added * verifying file size added * function for getting file size in proper units moved to the userDatasetService, as it will be used in more than one place. * function name changed to better express its purpose; blocking upload of a too big file added. * progressbar added to show how much space have been used * my-datasets page layout updated * value in bytes removed * file size limitation introduced in the backend * testdata updated to include test data for upload * testing zipped netcdf and csv added * testing file size limit added * layout updated * File size limitations (#592) * models updated; proper migration added * migration updated to be consistent with recent updates * model field name updated to better reflect what is stored there; migration updated; property added to know how much space left for the particular user * admin panel updated; api user_view updated to take all the fields and properties into account; * proper updates done to pass space usage information between frontend and backend * message on the storage limit added * verifying file size added * function for getting file size in proper units moved to the userDatasetService, as it will be used in more than one place. * function name changed to better express its purpose; blocking upload of a too big file added. * progressbar added to show how much space have been used * my-datasets page layout updated * value in bytes removed * file size limitation introduced in the backend * testdata updated to include test data for upload * testing zipped netcdf and csv added * testing file size limit added * layout updated * env updated * Help page update (#593) * models updated; proper migration added * migration updated to be consistent with recent updates * model field name updated to better reflect what is stored there; migration updated; property added to know how much space left for the particular user * admin panel updated; api user_view updated to take all the fields and properties into account; * proper updates done to pass space usage information between frontend and backend * message on the storage limit added * verifying file size added * function for getting file size in proper units moved to the userDatasetService, as it will be used in more than one place. * function name changed to better express its purpose; blocking upload of a too big file added. * progressbar added to show how much space have been used * my-datasets page layout updated * value in bytes removed * file size limitation introduced in the backend * testdata updated to include test data for upload * testing zipped netcdf and csv added * testing file size limit added * layout updated * env updated * help page updated * File size limitations (#594) * models updated; proper migration added * migration updated to be consistent with recent updates * model field name updated to better reflect what is stored there; migration updated; property added to know how much space left for the particular user * admin panel updated; api user_view updated to take all the fields and properties into account; * proper updates done to pass space usage information between frontend and backend * message on the storage limit added * verifying file size added * function for getting file size in proper units moved to the userDatasetService, as it will be used in more than one place. * function name changed to better express its purpose; blocking upload of a too big file added. * progressbar added to show how much space have been used * my-datasets page layout updated * value in bytes removed * file size limitation introduced in the backend * testdata updated to include test data for upload * testing zipped netcdf and csv added * testing file size limit added * layout updated * env updated * help page updated * a bug fixed which caused that admin users couldn't upload data * printing removed * Update qa4sm_env.yml env updated * Update qa4sm_env.yml proper commit introduced * there is no distinction between status plot for tc and non tc validation (#595) * Fix styling issues (#596) * not needed printing removed * validation comparison updated to get variables * default metric and default boxplot set * env updated (#597) * Last improvements (#598) * Beleow => Above, since there was some rearangmenet done * setting proper id removed to check if it causes issues * not needed code removed * Last improvements (#599) * Beleow => Above, since there was some rearangmenet done * setting proper id removed to check if it causes issues * not needed code removed * id removed * id removed * Last improvements (#600) * Beleow => Above, since there was some rearangmenet done * setting proper id removed to check if it causes issues * not needed code removed * id removed * id removed * return version retrieved * Last improvements (#601) * Beleow => Above, since there was some rearangmenet done * setting proper id removed to check if it causes issues * not needed code removed * id removed * id removed * return version retrieved * id workaround retrieved as this is not the part that causes saving dataset twice * not needed printing removed * printing added * handling duplicated response added (#602) * Fixing double request (#603) * handling duplicated response added * some printing added * preprocessing and saving metadata form separated (#604) * Separate file preprocessing (#605) * preprocessing and saving metadata form separated * get back to the previous approach * cleaning redundant datasets added * problems fixed (#606) * Fix upload (#607) * problems fixed * get back to the first version * Duplicating datasets workoaround (#608) * problems fixed * get back to the first version * printing added to see if it gets proper datasets * Duplicating datasets workaround (#609) * problems fixed * get back to the first version * printing added to see if it gets proper datasets * sending data retrieved in original form * Duplicating datasets workaround (#610) * problems fixed * get back to the first version * printing added to see if it gets proper datasets * sending data retrieved in original form * cleaning redundant datasets added * printing removed * cleaning redundant datasets added (#611) * printing added (#612) * order of deleting changed (#618) * Upload file and metadata separately (#619) * order of deleting changed * workaround to avoid duplicating datasets applied * Frm4sm release 2 deployment (#620) * environment updated * docker compose, release notes and version updated * release note updated * release note updated * Final upload for release (#638) * printing added (#621) * Upload workaround (#622) * printing added * not needed printing removed; information about session printed * Upload workaround (#623) * printing added * not needed printing removed; information about session printed * field metadata_submitted and proper migration added, to be able to check if the file preprocessing started * setting metdata_submitted to True added to the proper view * response code changed to 202 along with a message to be more precise (#624) * Upload fix (#625) * response code changed to 202 along with a message to be more precise * handling metadata response updated * Upload fix (#626) * response code changed to 202 along with a message to be more precise * handling metadata response updated * dealing with duplicated request updated * another status code verified * condition removed as not needed * Upload fix (#627) * response code changed to 202 along with a message to be more precise * handling metadata response updated * dealing with duplicated request updated * another status code verified * condition removed as not needed * get back to code 202 * Upload fix (#628) * response code changed to 202 along with a message to be more precise * handling metadata response updated * dealing with duplicated request updated * another status code verified * condition removed as not needed * get back to code 202 * some printing added * Upload workaround (#629) * printing added * not needed printing removed; information about session printed * field metadata_submitted and proper migration added, to be able to check if the file preprocessing started * setting metdata_submitted to True added to the proper view * Upload fix (#630) * response code changed to 202 along with a message to be more precise * handling metadata response updated * dealing with duplicated request updated * another status code verified * condition removed as not needed * get back to code 202 * some printing added * some printing added * Upload fix (#631) * response code changed to 202 along with a message to be more precise * handling metadata response updated * dealing with duplicated request updated * another status code verified * condition removed as not needed * get back to code 202 * some printing added * some printing added * different way of handling 202 added * Upload fix (#632) * response code changed to 202 along with a message to be more precise * handling metadata response updated * dealing with duplicated request updated * another status code verified * condition removed as not needed * get back to code 202 * some printing added * some printing added * different way of handling 202 added * waiting for the proper response added * Upload fix (#633) * response code changed to 202 along with a message to be more precise * handling metadata response updated * dealing with duplicated request updated * another status code verified * condition removed as not needed * get back to code 202 * some printing added * some printing added * different way of handling 202 added * waiting for the proper response added * not needed printing removed; checking for duplicated db entries commented * frontend retrieve in the original form * decorator added to skip the view * when the second request comes, the metadata is not updated (#634) * Try to handle response (#635) * when the second request comes, the metadata is not updated * workaround applied * Try to handle response (#636) * when the second request comes, the metadata is not updated * workaround applied * printing added * first request verification removed * Try to handle response (#637) * when the second request comes, the metadata is not updated * workaround applied * printing added * first request verification removed * printing added * printing removed --------- Co-authored-by: Pietro Stradiotti Co-authored-by: pstradio <77664155+pstradio@users.noreply.github.com> Co-authored-by: Samuel Scherrer Co-authored-by: Wolfgang Preimesberger Co-authored-by: Samuel Scherrer Co-authored-by: daberer <47424802+daberer@users.noreply.github.com> --- .github/workflows/test.yml | 1 + UI/package-lock.json | 22 +- UI/package.json | 2 +- UI/src/app/app-routing.module.ts | 10 +- UI/src/app/app.module.ts | 21 +- .../anom-climatology.component.ts | 26 +- .../validation-selector.component.html | 5 +- .../validation-selector.component.ts | 48 +- .../core/services/auth/auth.service.ts | 5 +- .../modules/core/services/auth/user.dto.ts | 3 + .../services/dataset/dataset-variable.dto.ts | 3 +- .../core/services/dataset/dataset.dto.ts | 5 +- .../core/services/dataset/dataset.resolver.ts | 2 +- .../core/services/dataset/dataset.service.ts | 17 +- .../validation-run/validationrun.dto.ts | 7 +- .../validation-run/validationrun.service.ts | 14 +- .../components/dataset/dataset.component.html | 32 +- .../dataset/dataset.component.spec.ts | 11 +- .../components/dataset/dataset.component.ts | 67 +- UI/src/app/modules/dataset/dataset.module.ts | 6 +- .../components/metrics/metrics.component.ts | 4 +- .../navigation-bar.component.ts | 4 +- .../components/scaling/scaling-methods.dto.ts | 7 + .../components/scaling/scaling-model.ts | 8 +- .../components/scaling/scaling.component.html | 79 +- .../components/scaling/scaling.component.ts | 143 ++-- .../user-data-row.component.html | 158 ++++ .../user-data-row.component.scss | 32 + .../user-data-row.component.spec.ts | 25 + .../user-data-row/user-data-row.component.ts | 135 ++++ .../user-file-upload.component.html | 128 +++ .../user-file-upload.component.scss | 83 ++ .../user-file-upload.component.spec.ts | 25 + .../user-file-upload.component.ts | 148 ++++ .../services/allowed-name.directive.ts | 26 + .../services/user-data-file.dto.ts | 16 + .../services/user-datasets.service.spec.ts | 16 + .../services/user-datasets.service.ts | 87 ++ .../user-datasets/user-datasets.module.ts | 37 + .../modules/user/login/login.component.html | 35 + .../user}/login/login.component.scss | 0 .../user}/login/login.component.spec.ts | 4 +- .../user}/login/login.component.ts | 15 +- .../validation-reference/reference-model.ts | 9 + .../validation-reference.component.html | 35 + .../validation-reference.component.scss | 0 .../validation-reference.component.spec.ts | 25 + .../validation-reference.component.ts | 65 ++ .../validation-reference.module.ts | 19 + .../components/buttons/buttons.component.html | 12 +- .../buttons/buttons.component.spec.ts | 2 +- .../result-files/result-files.component.html | 5 +- .../result-files.component.spec.ts | 2 +- .../result-files/result-files.component.ts | 13 +- .../sorting-form/sorting-form.component.ts | 4 +- .../summary-statistics.component.html | 4 +- .../summary-statistics.component.spec.ts | 2 +- .../summary-statistics.component.ts | 2 +- .../validation-summary.component.html | 49 +- .../validation-summary.component.spec.ts | 2 +- .../validation-summary.component.ts | 17 +- .../validationrun-row.component.html | 31 +- .../validationrun-row.component.spec.ts | 2 +- .../validationrun-row.component.ts | 11 +- .../services/dataset-configuration.dto.ts | 5 +- .../dataset-info/dataset-info.component.html | 28 +- .../dataset-info.component.spec.ts | 16 +- UI/src/app/pages/help/help.component.html | 105 ++- UI/src/app/pages/help/help.component.ts | 36 +- UI/src/app/pages/home/home.component.html | 1 + UI/src/app/pages/home/home.component.ts | 1 - .../login-page/login-page.component.html | 4 + .../login-page/login-page.component.scss | 0 .../login-page/login-page.component.spec.ts | 25 + .../pages/login-page/login-page.component.ts | 15 + UI/src/app/pages/login/login.component.html | 36 - .../my-datasets/my-datasets.component.html | 82 ++ .../my-datasets/my-datasets.component.scss | 9 + .../my-datasets/my-datasets.component.spec.ts | 25 + .../my-datasets/my-datasets.component.ts | 46 ++ .../user-data-guidelines.component.html | 405 ++++++++++ .../user-data-guidelines.component.scss | 35 + .../user-data-guidelines.component.spec.ts | 25 + .../user-data-guidelines.component.ts | 16 + .../user-profile/user-profile.component.html | 1 + .../pages/validate/dataset-config-model.ts | 19 +- .../service/validation-run-config-dto.ts | 7 +- .../service/validation-run-config.service.ts | 31 +- .../pages/validate/validate.component.html | 161 ++-- .../pages/validate/validate.component.scss | 8 + .../app/pages/validate/validate.component.ts | 343 +++++--- UI/src/app/pages/validate/validation-model.ts | 5 +- UI/src/primeng-style.scss | 54 +- UI/src/styles.scss | 6 + UI/tsconfig.json | 3 +- api/frontend_urls.py | 3 +- api/tests/test_comparison.py | 2 +- api/tests/test_data_filter_view.py | 6 +- api/tests/test_dataset_variable_view.py | 6 +- api/tests/test_dataset_version_view.py | 5 +- api/tests/test_dataset_view.py | 2 +- api/tests/test_helper.py | 38 +- api/tests/test_upload_user_data_view.py | 554 +++++++++++++ api/tests/test_validation_config_view.py | 149 ++-- api/urls.py | 31 +- api/variable_and_field_names.py | 13 + api/views/auxiliary_functions.py | 17 +- api/views/dataset_configuration_view.py | 9 +- api/views/dataset_variable_view.py | 1 + api/views/dataset_view.py | 17 +- api/views/serving_file_view.py | 29 +- api/views/upload_user_data_view.py | 382 +++++++++ api/views/user_view.py | 7 +- api/views/validation_config_view.py | 95 ++- api/views/validation_run_view.py | 12 +- docker/compose/prod/docker-compose.yml | 14 +- docker/compose/test-2/docker-compose.yml | 6 + docker/compose/test/docker-compose.yml | 6 + docs/developers_guide.md | 2 +- environment/create_conda_env.sh | 10 +- environment/qa4sm_env.yml | 10 +- init_config.sh | 7 +- release-notes.md | 36 +- settings_example_conf.py | 1 + testdata | 2 +- valentina/version.py | 3 +- validator/admin/__init__.py | 7 +- validator/admin/custom_user_admin.py | 2 +- validator/admin/dataset_custom.py | 14 + validator/admin/dataset_variable_custom.py | 11 + validator/admin/dataset_version_custom.py | 11 + validator/admin/general.py | 12 +- validator/admin/user_data_file.py | 7 + validator/admin/validation_run.py | 13 +- validator/fixtures/datasets.json | 162 +++- validator/fixtures/filters.json | 144 ++++ validator/fixtures/networks.json | 201 ++--- validator/fixtures/variables.json | 104 ++- validator/fixtures/versions.json | 72 ++ validator/forms/data_configuration.py | 2 +- validator/forms/publishing.py | 2 +- validator/forms/results_sorting.py | 4 +- validator/mailer.py | 185 +++-- .../management/commands/setdatasetpaths.py | 4 +- .../migrations/0046_auto_20220825_1208.py | 34 + .../0047_alter_userdatasetfile_file.py | 20 + .../migrations/0048_auto_20220830_1213.py | 35 + .../migrations/0049_auto_20220928_1559.py | 52 ++ .../0050_alter_datavariable_pretty_name.py | 18 + .../migrations/0051_auto_20221115_1203.py | 25 + .../migrations/0052_auto_20221115_1418.py | 35 + ..._reference_dataset_is_spatial_reference.py | 18 + .../migrations/0054_auto_20221122_1150.py | 44 + ...0055_alter_validationrun_scaling_method.py | 18 + .../0056_alter_datasetversion_pretty_name.py | 18 + .../migrations/0056_auto_20230118_1116.py | 25 + .../migrations/0057_merge_20230120_1548.py | 14 + .../migrations/0058_auto_20230126_1216.py | 29 + .../migrations/0059_datavariable_unit.py | 18 + .../0060_alter_datavariable_unit.py | 18 + .../0061_validationrun_plots_save_metadata.py | 18 + .../migrations/0062_auto_20230223_1525.py | 32 + ...0063_userdatasetfile_metadata_submitted.py | 18 + validator/models/__init__.py | 1 + validator/models/custom_user.py | 26 + validator/models/dataset.py | 24 +- validator/models/dataset_configuration.py | 4 +- validator/models/user_dataset_file.py | 73 ++ validator/models/validation_run.py | 53 +- validator/models/variable.py | 3 +- validator/models/version.py | 2 +- validator/static/images/help/chosen_file.png | Bin 0 -> 8121 bytes validator/static/images/help/data_row.png | Bin 0 -> 31622 bytes .../images/help/data_set_selections.png | Bin 51810 -> 49457 bytes .../static/images/help/intercomparison.png | Bin 63009 -> 71724 bytes .../static/images/help/metadata_window.png | Bin 0 -> 45307 bytes .../static/images/help/my_validations.png | Bin 122168 -> 154492 bytes .../help/reference_data_set_selections.png | Bin 34823 -> 26383 bytes .../static/images/help/results_graphs.png | Bin 149654 -> 96977 bytes .../static/images/help/results_overview.png | Bin 115059 -> 173061 bytes validator/static/images/help/scaling.png | Bin 14534 -> 31598 bytes validator/static/images/help/select_file.png | Bin 0 -> 29079 bytes .../static/images/help/upload_file_window.png | Bin 0 -> 20422 bytes .../static/images/help/uploading_spinner.png | Bin 0 -> 109843 bytes .../images/help/user_data_on_the_list.png | Bin 0 -> 52008 bytes validator/tests/auxiliary_functions.py | 136 +++- validator/tests/test_doi.py | 6 +- validator/tests/test_filtering.py | 81 +- validator/tests/test_hacks.py | 22 +- validator/tests/test_mailer.py | 5 +- validator/tests/test_models.py | 14 +- validator/tests/test_validation.py | 759 +++++++++++++----- validator/tests/test_validity.py | 17 +- validator/validation/batches.py | 33 +- validator/validation/filters.py | 111 ++- validator/validation/globals.py | 33 +- validator/validation/graphics.py | 14 +- validator/validation/readers.py | 73 +- validator/validation/validation.py | 126 +-- 199 files changed, 6445 insertions(+), 1478 deletions(-) create mode 100644 UI/src/app/modules/scaling/components/scaling/scaling-methods.dto.ts create mode 100644 UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.html create mode 100644 UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.scss create mode 100644 UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.spec.ts create mode 100644 UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.ts create mode 100644 UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.html create mode 100644 UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.scss create mode 100644 UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.spec.ts create mode 100644 UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.ts create mode 100644 UI/src/app/modules/user-datasets/services/allowed-name.directive.ts create mode 100644 UI/src/app/modules/user-datasets/services/user-data-file.dto.ts create mode 100644 UI/src/app/modules/user-datasets/services/user-datasets.service.spec.ts create mode 100644 UI/src/app/modules/user-datasets/services/user-datasets.service.ts create mode 100644 UI/src/app/modules/user-datasets/user-datasets.module.ts create mode 100644 UI/src/app/modules/user/login/login.component.html rename UI/src/app/{pages => modules/user}/login/login.component.scss (100%) rename UI/src/app/{pages => modules/user}/login/login.component.spec.ts (87%) rename UI/src/app/{pages => modules/user}/login/login.component.ts (69%) create mode 100644 UI/src/app/modules/validation-reference/components/validation-reference/reference-model.ts create mode 100644 UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.html create mode 100644 UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.scss create mode 100644 UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.spec.ts create mode 100644 UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.ts create mode 100644 UI/src/app/modules/validation-reference/validation-reference.module.ts create mode 100644 UI/src/app/pages/login-page/login-page.component.html create mode 100644 UI/src/app/pages/login-page/login-page.component.scss create mode 100644 UI/src/app/pages/login-page/login-page.component.spec.ts create mode 100644 UI/src/app/pages/login-page/login-page.component.ts delete mode 100644 UI/src/app/pages/login/login.component.html create mode 100644 UI/src/app/pages/my-datasets/my-datasets.component.html create mode 100644 UI/src/app/pages/my-datasets/my-datasets.component.scss create mode 100644 UI/src/app/pages/my-datasets/my-datasets.component.spec.ts create mode 100644 UI/src/app/pages/my-datasets/my-datasets.component.ts create mode 100644 UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.html create mode 100644 UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.scss create mode 100644 UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.spec.ts create mode 100644 UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.ts create mode 100644 api/tests/test_upload_user_data_view.py create mode 100644 api/variable_and_field_names.py create mode 100644 api/views/upload_user_data_view.py create mode 100644 validator/admin/dataset_custom.py create mode 100644 validator/admin/dataset_variable_custom.py create mode 100644 validator/admin/dataset_version_custom.py create mode 100644 validator/admin/user_data_file.py create mode 100644 validator/migrations/0046_auto_20220825_1208.py create mode 100644 validator/migrations/0047_alter_userdatasetfile_file.py create mode 100644 validator/migrations/0048_auto_20220830_1213.py create mode 100644 validator/migrations/0049_auto_20220928_1559.py create mode 100644 validator/migrations/0050_alter_datavariable_pretty_name.py create mode 100644 validator/migrations/0051_auto_20221115_1203.py create mode 100644 validator/migrations/0052_auto_20221115_1418.py create mode 100644 validator/migrations/0053_rename_is_only_reference_dataset_is_spatial_reference.py create mode 100644 validator/migrations/0054_auto_20221122_1150.py create mode 100644 validator/migrations/0055_alter_validationrun_scaling_method.py create mode 100644 validator/migrations/0056_alter_datasetversion_pretty_name.py create mode 100644 validator/migrations/0056_auto_20230118_1116.py create mode 100644 validator/migrations/0057_merge_20230120_1548.py create mode 100644 validator/migrations/0058_auto_20230126_1216.py create mode 100644 validator/migrations/0059_datavariable_unit.py create mode 100644 validator/migrations/0060_alter_datavariable_unit.py create mode 100644 validator/migrations/0061_validationrun_plots_save_metadata.py create mode 100644 validator/migrations/0062_auto_20230223_1525.py create mode 100644 validator/migrations/0063_userdatasetfile_metadata_submitted.py create mode 100644 validator/models/user_dataset_file.py create mode 100644 validator/static/images/help/chosen_file.png create mode 100644 validator/static/images/help/data_row.png create mode 100644 validator/static/images/help/metadata_window.png create mode 100644 validator/static/images/help/select_file.png create mode 100644 validator/static/images/help/upload_file_window.png create mode 100644 validator/static/images/help/uploading_spinner.png create mode 100644 validator/static/images/help/user_data_on_the_list.png diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 127857341..b48c6bbaf 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,6 +9,7 @@ name: tests on: push: pull_request: + workflow_dispatch: schedule: # only upstream, won't trigger on forks! - cron: '0 0 * * *' # daily diff --git a/UI/package-lock.json b/UI/package-lock.json index e7fe1d459..6edfeaf76 100644 --- a/UI/package-lock.json +++ b/UI/package-lock.json @@ -4392,7 +4392,7 @@ "code-point-at": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "integrity": "sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA==", "dev": true }, "codelyzer": { @@ -4689,7 +4689,7 @@ "console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", "dev": true }, "constants-browserify": { @@ -5489,7 +5489,7 @@ "delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", "dev": true }, "depd": { @@ -6984,7 +6984,7 @@ "has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", "dev": true }, "has-value": { @@ -9601,7 +9601,7 @@ "number-is-nan": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "integrity": "sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ==", "dev": true }, "oauth-sign": { @@ -10643,14 +10643,14 @@ "integrity": "sha512-t6AG3iRI1rh04uI+9nC4JSlbKfA8PnpRDFtjyqi3rDSLRgdfOuhIiunJbjVQ4alnnCfuQAAbVyde1AKN9QxT/w==" }, "primeicons": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/primeicons/-/primeicons-4.1.0.tgz", - "integrity": "sha512-uEv2pSPk1zQCfaB2VgnUfnUxxlGryYi+5rbdxmZBBt5v9S/pscIQYS5YDLxsQZ7D9jn5c76+Tx5wX/2J1nK6sA==" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/primeicons/-/primeicons-5.0.0.tgz", + "integrity": "sha512-heygWF0X5HFI1otlZE62pp6ye7sZ8om78J9au2BRkg8O7Y8AHTZ9qKMRzchZUHLe8zUAvdi6hZzzm9XxgwIExw==" }, "primeng": { - "version": "11.2.3", - "resolved": "https://registry.npmjs.org/primeng/-/primeng-11.2.3.tgz", - "integrity": "sha512-8elRAGal8a+qXJ4egRKXU+bUvIyfCxsiCerXgOPbwbo/TU/DBK7WBXGGGi6KJOamFqClAqj/FO3WLAdofKQSRQ==", + "version": "11.4.5", + "resolved": "https://registry.npmjs.org/primeng/-/primeng-11.4.5.tgz", + "integrity": "sha512-7f5LDHrvFsJA4670Ftmib5ndDxTqcaQiM88XXJrjWYNGjXsXT3Yc5g9fgPvDrg2D38/jjpcSYeW9kalNcvlbrQ==", "requires": { "tslib": "^2.0.0" } diff --git a/UI/package.json b/UI/package.json index 12463c3d8..c74fda24e 100644 --- a/UI/package.json +++ b/UI/package.json @@ -47,7 +47,7 @@ "ngx-pagination": "^5.0.0", "ol": "^6.5.0", "primeflex": "^2.0.0", - "primeicons": "^4.1.0", + "primeicons": "^5.0.0", "primeng": "^11.2.3", "proj4": "^2.7.2", "rxjs": "~6.6.0", diff --git a/UI/src/app/app-routing.module.ts b/UI/src/app/app-routing.module.ts index 631966fdf..346c4fda5 100644 --- a/UI/src/app/app-routing.module.ts +++ b/UI/src/app/app-routing.module.ts @@ -5,7 +5,6 @@ import {ValidateComponent} from './pages/validate/validate.component'; import {ErrorComponent} from './pages/error/error.component'; import {AuthGuard} from './auth.guard'; import {ValidationResultComponent} from './pages/validation-result/validation-result.component'; -import {LoginComponent} from './pages/login/login.component'; import {UserProfileComponent} from './pages/user-profile/user-profile.component'; import {PublishedValidationsComponent} from './pages/published-validations/published-validations.component'; import {ValidationsComponent} from './pages/validations/validations.component'; @@ -24,13 +23,16 @@ import {SetPasswordComponent} from './pages/set-password/set-password.component' import { PasswordResetValidateTokenComponent } from './pages/password-reset-validate-token/password-reset-validate-token.component'; +import {MyDatasetsComponent} from './pages/my-datasets/my-datasets.component'; +import {UserDataGuidelinesComponent} from './pages/user-data-guidelines/user-data-guidelines.component'; +import {LoginPageComponent} from './pages/login-page/login-page.component'; const routes: Routes = [ {path: '', redirectTo: '/home', pathMatch: 'full'}, {path: 'home', component: HomeComponent}, - {path: 'login', component: LoginComponent}, - {path: 'validate', component: ValidateComponent, canActivate: [AuthGuard], resolve: {datasets: DatasetResolver}}, + {path: 'login', component: LoginPageComponent}, + {path: 'validate', component: ValidateComponent, resolve: {datasets: DatasetResolver}}, {path: 'validation-result/:validationId', component: ValidationResultComponent}, {path: 'my-validations', component: ValidationsComponent, canActivate: [AuthGuard]}, {path: 'user-profile', component: UserProfileComponent, canActivate: [AuthGuard]}, @@ -47,6 +49,8 @@ const routes: Routes = [ {path: 'password-reset-done', component: PasswordResetDoneComponent}, {path: 'password-reset/:token', component: PasswordResetValidateTokenComponent}, {path: 'set-password', component: SetPasswordComponent}, + {path: 'my-datasets', component: MyDatasetsComponent, canActivate: [AuthGuard]}, + {path: 'user-data-guidelines', component: UserDataGuidelinesComponent}, {path: '**', component: ErrorComponent} ]; diff --git a/UI/src/app/app.module.ts b/UI/src/app/app.module.ts index 8b4052fa0..afdba2263 100644 --- a/UI/src/app/app.module.ts +++ b/UI/src/app/app.module.ts @@ -14,7 +14,7 @@ import {ErrorComponent} from './pages/error/error.component'; import {ValidationsComponent} from './pages/validations/validations.component'; import {ValidationResultComponent} from './pages/validation-result/validation-result.component'; -import {LoginComponent} from './pages/login/login.component'; +import {LoginComponent} from './modules/user/login/login.component'; import {HTTP_INTERCEPTORS, HttpClientModule} from '@angular/common/http'; import {HttpTokenInterceptor} from './modules/core/interceptors/http-token.interceptor'; import {LoggerModule, NgxLoggerLevel} from 'ngx-logger'; @@ -52,7 +52,7 @@ import {ComparisonModule} from './modules/comparison/comparison.module'; import { TemporalMatchingComponent } from './modules/temporal-matching/components/temporal-matching/temporal-matching.component'; -import {TemporalMatchingModule} from "./modules/temporal-matching/temporal-matching.module"; +import {TemporalMatchingModule} from './modules/temporal-matching/temporal-matching.module'; import {HelpComponent} from './pages/help/help.component'; import {IvyGalleryModule} from 'angular-gallery'; import {NgxPageScrollModule} from 'ngx-page-scroll'; @@ -68,7 +68,13 @@ import { PasswordResetValidateTokenComponent } from './pages/password-reset-validate-token/password-reset-validate-token.component'; import {InputNumberModule} from 'primeng/inputnumber'; - +import {MyDatasetsComponent} from './pages/my-datasets/my-datasets.component'; +import {UserDatasetsModule} from './modules/user-datasets/user-datasets.module'; +import {ValidationReferenceModule} from './modules/validation-reference/validation-reference.module'; +import {UserDataGuidelinesComponent} from './pages/user-data-guidelines/user-data-guidelines.component'; +import {DialogModule} from 'primeng/dialog'; +import {LoginPageComponent} from './pages/login-page/login-page.component'; +import {ProgressBarModule} from 'primeng/progressbar'; @NgModule({ declarations: [ @@ -94,6 +100,9 @@ import {InputNumberModule} from 'primeng/inputnumber'; SetPasswordComponent, PasswordResetValidateTokenComponent, TemporalMatchingComponent, + MyDatasetsComponent, + UserDataGuidelinesComponent, + LoginPageComponent, ], imports: [ LoggerModule.forRoot({level: NgxLoggerLevel.DEBUG, serverLogLevel: NgxLoggerLevel.ERROR}), @@ -132,7 +141,11 @@ import {InputNumberModule} from 'primeng/inputnumber'; NgxPageScrollModule, CoreModule, UserModule, - InputNumberModule + InputNumberModule, + UserDatasetsModule, + ValidationReferenceModule, + DialogModule, + ProgressBarModule, ], providers: [ { diff --git a/UI/src/app/modules/anomalies/components/anom-climatology/anom-climatology.component.ts b/UI/src/app/modules/anomalies/components/anom-climatology/anom-climatology.component.ts index b59cda874..5696688d6 100644 --- a/UI/src/app/modules/anomalies/components/anom-climatology/anom-climatology.component.ts +++ b/UI/src/app/modules/anomalies/components/anom-climatology/anom-climatology.component.ts @@ -1,5 +1,6 @@ import {Component, Input, OnInit} from '@angular/core'; import {AnomaliesModel} from '../anomalies/anomalies-model'; +import {BehaviorSubject} from 'rxjs'; @Component({ selector: 'qa-anom-climatology', @@ -9,6 +10,7 @@ import {AnomaliesModel} from '../anomalies/anomalies-model'; export class AnomClimatologyComponent implements OnInit { public minYear = 1971; public maxYear = 2100; + public yearFrom$ = new BehaviorSubject(this.minYear); @Input() anomaliesModel: AnomaliesModel; @@ -24,18 +26,26 @@ export class AnomClimatologyComponent implements OnInit { getYearFrom(): number { let year: number; - this.anomaliesModel.anomaliesFrom$.subscribe(date => { - date ? year = date.getFullYear() : year = this.minYear; - }); + if (this.anomaliesModel.anomaliesFrom$.getValue()){ + year = this.anomaliesModel.anomaliesFrom$.getValue().getFullYear(); + } else { + year = this.minYear; + this.anomaliesModel.anomaliesFrom$.next(this.setDate(year)); + } + return year; } getYearTo(): number { - let year: number; - this.anomaliesModel.anomaliesTo$.subscribe(date => { - date ? year = date.getFullYear() : year = (new Date()).getFullYear(); - }); - return year; + let year: Date; + + if (this.anomaliesModel.anomaliesTo$.getValue()){ + year = this.anomaliesModel.anomaliesTo$.getValue(); + } else { + year = (new Date()); + this.anomaliesModel.anomaliesTo$.next(year); + } + return year.getFullYear(); } } diff --git a/UI/src/app/modules/comparison/components/validation-selector/validation-selector.component.html b/UI/src/app/modules/comparison/components/validation-selector/validation-selector.component.html index 6d2aca845..6b533930b 100644 --- a/UI/src/app/modules/comparison/components/validation-selector/validation-selector.component.html +++ b/UI/src/app/modules/comparison/components/validation-selector/validation-selector.component.html @@ -4,18 +4,17 @@ - + diff --git a/UI/src/app/modules/comparison/components/validation-selector/validation-selector.component.ts b/UI/src/app/modules/comparison/components/validation-selector/validation-selector.component.ts index 97f3443bf..4d6c876d3 100644 --- a/UI/src/app/modules/comparison/components/validation-selector/validation-selector.component.ts +++ b/UI/src/app/modules/comparison/components/validation-selector/validation-selector.component.ts @@ -10,6 +10,8 @@ import {ValidationrunDto} from '../../../core/services/validation-run/validation import {ExtentModel} from '../spatial-extent/extent-model'; import {ComparisonService} from '../../services/comparison.service'; import {ToastService} from '../../../core/services/toast/toast.service'; +import {BehaviorSubject} from 'rxjs'; +import {DatasetVariableService} from '../../../core/services/dataset/dataset-variable.service'; const N_MAX_VALIDATIONS = 2; // A maximum of two validation results can be compared, at the moment - this shouldn't be hardcoded @@ -39,7 +41,8 @@ export class ValidationSelectorComponent implements OnInit { private versionService: DatasetVersionService, private validationrunService: ValidationrunService, private comparisonService: ComparisonService, - private toastService: ToastService) { + private toastService: ToastService, + private datasetVariableService: DatasetVariableService) { } ngOnInit(): void { @@ -51,30 +54,45 @@ export class ValidationSelectorComponent implements OnInit { } - onDatasetChange(): void{ + onDatasetChange(): void { this.getValidations4comparison(); } private selectDataset(selected: DatasetConfigModel[]): void { const model = new DatasetConfigModel( - new DatasetComponentSelectionModel(null, null, null), - null, null, null, null); + new DatasetComponentSelectionModel( + null, + null, + null), + null, + null, + null, + null, + null, + new BehaviorSubject(false), + new BehaviorSubject(false), + new BehaviorSubject(false)); + selected.push(model); // get all datasets - this.datasetService.getAllDatasets().subscribe(datasets => { + this.datasetService.getAllDatasets(true).subscribe(datasets => { model.datasetModel.selectedDataset = datasets.find(dataset => dataset.short_name === 'ISMN'); this.selectValidationLabel = 'Wait for validations to be loaded'; // then get all versions for the first dataset in the result list this.versionService.getVersionsByDataset(model.datasetModel.selectedDataset.id).subscribe(versions => { model.datasetModel.selectedVersion = versions.find(version => version.pretty_name === '20210131 global'); - this.getValidations4comparison(String( model.datasetModel.selectedDataset.short_name), + this.getValidations4comparison(String(model.datasetModel.selectedDataset.short_name), String(model.datasetModel.selectedVersion.short_name)); + // get all variables + this.datasetVariableService.getVariablesByDataset(model.datasetModel.selectedDataset.id).subscribe(variables => { + model.datasetModel.selectedVariable = variables[0]; + }); }); }); } - getValidations4comparison(refDataset?, refVersion?): void{ - if (!refDataset && !refVersion){ + getValidations4comparison(refDataset?, refVersion?): void { + if (!refDataset && !refVersion) { refDataset = String(this.selectedDatasetModel[0].datasetModel.selectedDataset.short_name); refVersion = String(this.selectedDatasetModel[0].datasetModel.selectedVersion.short_name); } @@ -85,11 +103,11 @@ export class ValidationSelectorComponent implements OnInit { .set('max_datasets', String(this.checkbox2NonReferenceNumber())); this.selectValidationLabel = 'Wait for validations to be loaded'; this.validationrunService.getValidationsForComparison(parameters).subscribe(response => { - if (response){ + if (response) { this.validations4Comparison = response; this.selectedValidation = response[0]; this.selectValidationLabel = 'Select a validation'; - } else{ + } else { this.validations4Comparison = []; this.selectValidationLabel = 'There are no validations available'; } @@ -102,10 +120,10 @@ export class ValidationSelectorComponent implements OnInit { this.getValidations4comparison(); } - checkbox2NonReferenceNumber(): number{ + checkbox2NonReferenceNumber(): number { // convert the checkbox boolean selection to number of non-references this.comparisonModel.selectedValidations = []; // empty the selection in case the button is clicked - if (this.multipleNonReference){ + if (this.multipleNonReference) { return 2; } return 1; @@ -113,7 +131,7 @@ export class ValidationSelectorComponent implements OnInit { addValidationButtonDisabled(): boolean { // if the checkbox has been toggled - this shouldn't be hardcoded - if (this.multipleNonReference){ + if (this.multipleNonReference) { return this.comparisonModel.selectedValidations.length >= 1; } @@ -154,7 +172,7 @@ export class ValidationSelectorComponent implements OnInit { if (this.multipleNonReference) { return true; } else { - return !this.checkOverlapping(); + return !this.checkOverlapping(); } } @@ -163,7 +181,7 @@ export class ValidationSelectorComponent implements OnInit { this.comparisonModel.getIntersection = isNotChecked; } - startComparison(): void{ + startComparison(): void { // should start the comparison if (this.comparisonModel.selectedValidations.length === 0 || (this.comparisonModel.selectedValidations.length === 1 && !this.comparisonModel.multipleNonReference)) { diff --git a/UI/src/app/modules/core/services/auth/auth.service.ts b/UI/src/app/modules/core/services/auth/auth.service.ts index b3f3d7878..906570298 100644 --- a/UI/src/app/modules/core/services/auth/auth.service.ts +++ b/UI/src/app/modules/core/services/auth/auth.service.ts @@ -32,7 +32,10 @@ export class AuthService { last_name: '', organisation: '', country: '', - orcid: '' + orcid: '', + space_limit: '', + space_limit_value: null, + space_left: null }; public authenticated: BehaviorSubject = new BehaviorSubject(false); public currentUser: UserDto = this.emptyUser; diff --git a/UI/src/app/modules/core/services/auth/user.dto.ts b/UI/src/app/modules/core/services/auth/user.dto.ts index 31c1ad134..f8247f557 100644 --- a/UI/src/app/modules/core/services/auth/user.dto.ts +++ b/UI/src/app/modules/core/services/auth/user.dto.ts @@ -8,4 +8,7 @@ export interface UserDto{ organisation: string; country: string; orcid: string; + space_limit: string; + space_limit_value: number; + space_left: number; } diff --git a/UI/src/app/modules/core/services/dataset/dataset-variable.dto.ts b/UI/src/app/modules/core/services/dataset/dataset-variable.dto.ts index b41512a84..e91c97caf 100644 --- a/UI/src/app/modules/core/services/dataset/dataset-variable.dto.ts +++ b/UI/src/app/modules/core/services/dataset/dataset-variable.dto.ts @@ -4,6 +4,7 @@ constructor(public id: number, public pretty_name: string, public help_text: string, public min_value: number, - public max_value: number) { + public max_value: number, + public unit: string) { } } diff --git a/UI/src/app/modules/core/services/dataset/dataset.dto.ts b/UI/src/app/modules/core/services/dataset/dataset.dto.ts index 089f9c900..4d3015a65 100644 --- a/UI/src/app/modules/core/services/dataset/dataset.dto.ts +++ b/UI/src/app/modules/core/services/dataset/dataset.dto.ts @@ -7,10 +7,11 @@ export class DatasetDto { public detailed_description: string, public source_reference: string, public citation: string, - public is_only_reference: boolean, + public is_spatial_reference: boolean, public versions: number[], public variables: number[], public filters: number[], - public not_as_reference: boolean) { + public not_as_reference: boolean, + public user: string) { } } diff --git a/UI/src/app/modules/core/services/dataset/dataset.resolver.ts b/UI/src/app/modules/core/services/dataset/dataset.resolver.ts index 5dc9a9bbb..44200fef2 100644 --- a/UI/src/app/modules/core/services/dataset/dataset.resolver.ts +++ b/UI/src/app/modules/core/services/dataset/dataset.resolver.ts @@ -13,6 +13,6 @@ export class DatasetResolver implements Resolve { resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable { const name = route.queryParamMap.get('cucc'); - return this.datasetService.getAllDatasets(); + return this.datasetService.getAllDatasets(true); } } diff --git a/UI/src/app/modules/core/services/dataset/dataset.service.ts b/UI/src/app/modules/core/services/dataset/dataset.service.ts index 5c2034af2..62ceff4f7 100644 --- a/UI/src/app/modules/core/services/dataset/dataset.service.ts +++ b/UI/src/app/modules/core/services/dataset/dataset.service.ts @@ -1,5 +1,5 @@ import {Injectable} from '@angular/core'; -import {HttpClient} from '@angular/common/http'; +import {HttpClient, HttpParams} from '@angular/common/http'; import {DatasetDto} from './dataset.dto'; import {Observable} from 'rxjs'; import {environment} from '../../../../../environments/environment'; @@ -8,26 +8,31 @@ import {DataCache} from '../../tools/DataCache'; const datasetUrl: string = environment.API_URL + 'api/dataset'; const CACHE_KEY_ALL_DATASETS = -1; +const CACHE_USER_DATA_INFO = -2; @Injectable({ providedIn: 'root' }) export class DatasetService { - //cache for dataset arrays + // cache for dataset arrays arrayRequestCache = new DataCache>(5); - //cache for single dataset dtos + // cache for single dataset dtos singleRequestCache = new DataCache>(5); + // cache for information if user data should be also fetched + userDataInfoCache = new DataCache(5); constructor(private httpClient: HttpClient) { } - getAllDatasets(): Observable { - if (this.arrayRequestCache.isCached(CACHE_KEY_ALL_DATASETS)) { + getAllDatasets(userData = false): Observable { + if (this.arrayRequestCache.isCached(CACHE_KEY_ALL_DATASETS) && this.userDataInfoCache.get(CACHE_USER_DATA_INFO) === userData) { return this.arrayRequestCache.get(CACHE_KEY_ALL_DATASETS); } else { - let datasets$ = this.httpClient.get(datasetUrl).pipe(shareReplay()); + const params = new HttpParams().set('userData', String(userData)); + const datasets$ = this.httpClient.get(datasetUrl, {params}).pipe(shareReplay()); this.arrayRequestCache.push(CACHE_KEY_ALL_DATASETS, datasets$); + this.userDataInfoCache.push(CACHE_USER_DATA_INFO, userData); return datasets$; } } diff --git a/UI/src/app/modules/core/services/validation-run/validationrun.dto.ts b/UI/src/app/modules/core/services/validation-run/validationrun.dto.ts index d95fa6017..d701bc438 100644 --- a/UI/src/app/modules/core/services/validation-run/validationrun.dto.ts +++ b/UI/src/app/modules/core/services/validation-run/validationrun.dto.ts @@ -9,7 +9,9 @@ export class ValidationrunDto{ public error_points: number, public progress: number, public dataset_configurations: number[], - public reference_configuration: number, + public spatial_reference_configuration: number, + public temporal_reference_configuration: number, + public scaling_reference_configuration: number, public scaling_ref: number, public scaling_method: string, public interval_from: Date, @@ -37,7 +39,8 @@ export class ValidationrunDto{ public is_a_copy: boolean, public bootstrap_tcol_cis: boolean, public temporal_matching: number, - public comparison_label?: string) { + public comparison_label?: string, + public contains_user_data?: boolean) { } } diff --git a/UI/src/app/modules/core/services/validation-run/validationrun.service.ts b/UI/src/app/modules/core/services/validation-run/validationrun.service.ts index 7275b7346..b398fd338 100644 --- a/UI/src/app/modules/core/services/validation-run/validationrun.service.ts +++ b/UI/src/app/modules/core/services/validation-run/validationrun.service.ts @@ -22,6 +22,7 @@ const publishingFormURL: string = urlPrefix + '/publishing-form'; const copyValidationUrl: string = urlPrefix + '/copy-validation'; const copiedValidationRecordUrl: string = urlPrefix + '/copied-validation-record'; + const csrfToken = '{{csrf_token}}'; const changeNameUrl = urlPrefix + '/change-validation-name/00000000-0000-0000-0000-000000000000'; const archiveResultUrl = urlPrefix + '/archive-result/00000000-0000-0000-0000-000000000000'; @@ -133,30 +134,31 @@ export class ValidationrunService { return this.httpClient.get(metricsAndPlotsNamesUrl, {params}); } - getPublishingFormData(params: any): Observable{ + getPublishingFormData(params: any): Observable { return this.httpClient.get(publishingFormURL, {params}); } - refreshComponent(validationIdOrPage: string): void{ + refreshComponent(validationIdOrPage: string): void { // here we can give or validation id or the word 'page' if entire page should be reloaded (e.g. when a validation is removed) this.refresh.next(validationIdOrPage); } - copyValidation(params: any): Observable{ + copyValidation(params: any): Observable { return this.httpClient.get(copyValidationUrl, {params}); } - getCopiedRunRecord(validationId: string): Observable{ + getCopiedRunRecord(validationId: string): Observable { const urlWithParam = copiedValidationRecordUrl + '/' + validationId; return this.httpClient.get(urlWithParam); } - checkPublishingInProgress(): Observable{ + checkPublishingInProgress(): Observable { return this.publishingInProgress.asObservable(); } - changePublishingStatus(inProgress: boolean): void{ + changePublishingStatus(inProgress: boolean): void { this.publishingInProgress.next(inProgress); } + } diff --git a/UI/src/app/modules/dataset/components/dataset/dataset.component.html b/UI/src/app/modules/dataset/components/dataset/dataset.component.html index 2a61e6592..a295f97cb 100644 --- a/UI/src/app/modules/dataset/components/dataset/dataset.component.html +++ b/UI/src/app/modules/dataset/components/dataset/dataset.component.html @@ -3,7 +3,17 @@ + optionLabel="pretty_name" [style]="{'width': '100%'}"> + + + {{item.pretty_name}} {{item.user ? " (user data)" : ''}} + + + + {{item.pretty_name}} {{item.user ? " (user data)" : ''}} + + + @@ -14,7 +24,8 @@
+ [(ngModel)]="selectionModel.selectedVersion" optionLabel="pretty_name" + (onChange)="onVersionChange()"> @@ -24,10 +35,21 @@
- - + [(ngModel)]="selectionModel.selectedVariable"> + + + {{item.short_name}} [{{item.unit ? item.unit: 'n.a.'}}] + + + + {{item.short_name}} [{{item.unit ? item.unit : 'n.a.'}}] + + + + +
diff --git a/UI/src/app/modules/dataset/components/dataset/dataset.component.spec.ts b/UI/src/app/modules/dataset/components/dataset/dataset.component.spec.ts index d59d3b181..351244c8c 100644 --- a/UI/src/app/modules/dataset/components/dataset/dataset.component.spec.ts +++ b/UI/src/app/modules/dataset/components/dataset/dataset.component.spec.ts @@ -32,11 +32,12 @@ describe('DatasetComponent', () => { detailed_description: '', source_reference: '', citation: '', - is_only_reference: true, + is_spatial_reference: true, versions: [3, 4], variables: [2], filters: [1], - not_as_reference: false + not_as_reference: false, + user: null }, { id: 1, @@ -47,11 +48,12 @@ describe('DatasetComponent', () => { detailed_description: '', source_reference: '', citation: '', - is_only_reference: false, + is_spatial_reference: false, versions: [1, 2], variables: [1], filters: [1, 2], - not_as_reference: false + not_as_reference: false, + user: null }, ]; @@ -119,7 +121,6 @@ describe('DatasetComponent', () => { fixture = TestBed.createComponent(DatasetComponent); component = fixture.componentInstance; component.removable = false; - component.reference = false; component.selectionModel = { selectedDataset: testDatasets[0], selectedVersion: testVersions[0], diff --git a/UI/src/app/modules/dataset/components/dataset/dataset.component.ts b/UI/src/app/modules/dataset/components/dataset/dataset.component.ts index 69c9396d1..61fd499fc 100644 --- a/UI/src/app/modules/dataset/components/dataset/dataset.component.ts +++ b/UI/src/app/modules/dataset/components/dataset/dataset.component.ts @@ -9,6 +9,7 @@ import {DatasetComponentSelectionModel} from './dataset-component-selection-mode import {DatasetVariableDto} from '../../../core/services/dataset/dataset-variable.dto'; import {DatasetVariableService} from '../../../core/services/dataset/dataset-variable.service'; import {map} from 'rxjs/operators'; +import {ValidationRunConfigService} from '../../../../pages/validate/service/validation-run-config.service'; @Component({ @@ -19,48 +20,38 @@ import {map} from 'rxjs/operators'; export class DatasetComponent implements OnInit { datasets$: Observable; + allDatasets$: Observable; selectableDatasetVersions$: Observable; selectableDatasetVariables$: Observable; @Input() selectionModel: DatasetComponentSelectionModel; @Input() removable = false; - @Input() reference = false; @Output() changeDataset = new EventEmitter(); constructor(private datasetService: DatasetService, private datasetVersionService: DatasetVersionService, - private datasetVariableService: DatasetVariableService) { + private datasetVariableService: DatasetVariableService, + private validationConfigService: ValidationRunConfigService) { } - ngOnInit(): void { - // Create dataset observable - if (this.reference) { - this.datasets$ = this.datasetService.getAllDatasets().pipe(map(datasets => { - let referenceDatasets: DatasetDto[] = []; - datasets.forEach(dataset => { - if (dataset.not_as_reference === false){ - referenceDatasets.push(dataset); - } - }); - referenceDatasets = this.sortById(referenceDatasets); - return referenceDatasets; - })); - } else { - // filter out datasets than can be used only as reference - this.datasets$ = this.datasetService.getAllDatasets().pipe(map(datasets => { - let nonOnlyReferenceDatasets: DatasetDto[] = []; - datasets.forEach(dataset => { - if (dataset.is_only_reference === false) { - nonOnlyReferenceDatasets.push(dataset); - } - }); - nonOnlyReferenceDatasets = this.sortById(nonOnlyReferenceDatasets); - return nonOnlyReferenceDatasets; - })); - } + ngOnInit(): void { + this.allDatasets$ = this.datasetService.getAllDatasets(true); + + this.validationConfigService.listOfSelectedConfigs.subscribe(configs => { + if (configs.filter(config => config.datasetModel.selectedDataset?.short_name === 'ISMN').length !== 0 + && this.selectionModel.selectedDataset?.short_name !== 'ISMN'){ + this.datasets$ = this.allDatasets$.pipe(map(datasets => { + return this.sortById(datasets.filter(dataset => dataset.pretty_name !== 'ISMN')); + })); + } else { + this.datasets$ = this.allDatasets$.pipe(map(datasets => { + return this.sortById(datasets); + })); + } + }); this.selectableDatasetVersions$ = this.sortObservableById( this.datasetVersionService.getVersionsByDataset(this.selectionModel.selectedDataset.id)); @@ -69,7 +60,7 @@ export class DatasetComponent implements OnInit { this.datasetVariableService.getVariablesByDataset(this.selectionModel.selectedDataset.id)); } - private updateSelectableVersionsAndVariableAndEmmit(): void{ + private updateSelectableVersionsAndVariableAndEmmit(): void { if (this.selectionModel.selectedDataset === undefined || this.selectionModel.selectedDataset.versions.length === 0) { return; } @@ -80,7 +71,7 @@ export class DatasetComponent implements OnInit { this.selectableDatasetVersions$.subscribe( versions => { - this.selectionModel.selectedVersion = versions[0]; + this.selectionModel.selectedVersion = versions[0]; }, () => { }, @@ -90,28 +81,30 @@ export class DatasetComponent implements OnInit { this.selectableDatasetVariables$.subscribe( variables => { - this.selectionModel.selectedVariable = variables[0]; - }, + this.selectionModel.selectedVariable = variables[variables.length - 1]; + }, () => {}, - () => {this.changeDataset.emit(this.selectionModel); + () => { + this.changeDataset.emit(this.selectionModel); }); }); } - onDatasetChange(): void{ + onDatasetChange(): void { this.updateSelectableVersionsAndVariableAndEmmit(); } - onVersionChange(): void{ + + onVersionChange(): void { this.changeDataset.emit(this.selectionModel); } - sortById(listOfElements): any{ + sortById(listOfElements): any { return listOfElements.sort((a, b) => { return a.id < b.id ? 1 : -1; }); } - sortObservableById(observableOfListOfElements: Observable): Observable{ + sortObservableById(observableOfListOfElements: Observable): Observable { return observableOfListOfElements.pipe(map((data) => { data.sort((a, b) => { return a.id < b.id ? 1 : -1; diff --git a/UI/src/app/modules/dataset/dataset.module.ts b/UI/src/app/modules/dataset/dataset.module.ts index 5c8d5ba14..ad176dfb7 100644 --- a/UI/src/app/modules/dataset/dataset.module.ts +++ b/UI/src/app/modules/dataset/dataset.module.ts @@ -7,9 +7,9 @@ import {ButtonModule} from 'primeng/button'; @NgModule({ declarations: [DatasetComponent], - exports: [ - DatasetComponent - ], + exports: [ + DatasetComponent, + ], imports: [ CommonModule, FormsModule, diff --git a/UI/src/app/modules/metrics/components/metrics/metrics.component.ts b/UI/src/app/modules/metrics/components/metrics/metrics.component.ts index eaa98a143..eb02d7b29 100644 --- a/UI/src/app/modules/metrics/components/metrics/metrics.component.ts +++ b/UI/src/app/modules/metrics/components/metrics/metrics.component.ts @@ -40,9 +40,9 @@ export class MetricsComponent implements OnInit { checkIfDisabled(metricName: string): boolean { let condition = true; - const conditionTcol = this.validationModel.datasetConfigurations.length < 2; + const conditionTcol = this.validationModel.datasetConfigurations.length < 3; const conditionBtcol = conditionTcol || - !(this.validationModel.datasetConfigurations.length > 1 && this. tripleCollocationMetrics.value$.getValue()); + !(this.validationModel.datasetConfigurations.length > 2 && this. tripleCollocationMetrics.value$.getValue()); if (conditionTcol) { this.tripleCollocationMetrics.value$.next(false); diff --git a/UI/src/app/modules/navigation-bar/components/navigation-bar/navigation-bar.component.ts b/UI/src/app/modules/navigation-bar/components/navigation-bar/navigation-bar.component.ts index 7504e32ed..bf38688a8 100644 --- a/UI/src/app/modules/navigation-bar/components/navigation-bar/navigation-bar.component.ts +++ b/UI/src/app/modules/navigation-bar/components/navigation-bar/navigation-bar.component.ts @@ -45,11 +45,13 @@ export class NavigationBarComponent implements OnInit { command: () => this.setPreviousUrl('my-validations') }, {label: 'Published validations', icon: 'pi pi-fw pi-globe', routerLink: ['published-validations']}, - {label: 'Compare validations', icon: 'pi pi-fw pi-th-large', routerLink: ['comparison']}, + {label: 'Compare validations', icon: 'pi pi-fw pi-th-large', routerLink: ['comparison'], command: () => this.setPreviousUrl('comparison')}, + {label: 'My datasets', icon: 'pi pi-fw pi-upload', routerLink: ['my-datasets'], command: () => this.setPreviousUrl('my-datasets')}, { label: 'Info', icon: 'pi pi-fw pi-info-circle', items: [ {label: 'About', icon: 'pi pi-fw pi-info', routerLink: ['about']}, {label: 'Help', icon: 'pi pi-fw pi-question', routerLink: ['help']}, + {label: 'Upload Data Help', icon: 'pi pi-fw pi-server', routerLink: ['user-data-guidelines']}, { label: 'User Manual', icon: 'pi pi-fw pi-book', diff --git a/UI/src/app/modules/scaling/components/scaling/scaling-methods.dto.ts b/UI/src/app/modules/scaling/components/scaling/scaling-methods.dto.ts new file mode 100644 index 000000000..c58a82a76 --- /dev/null +++ b/UI/src/app/modules/scaling/components/scaling/scaling-methods.dto.ts @@ -0,0 +1,7 @@ +export class ScalingMethodDto { + + constructor(public method: string, + public description: string, + ) { + } +} diff --git a/UI/src/app/modules/scaling/components/scaling/scaling-model.ts b/UI/src/app/modules/scaling/components/scaling/scaling-model.ts index 30621ef8d..4bf527cff 100644 --- a/UI/src/app/modules/scaling/components/scaling/scaling-model.ts +++ b/UI/src/app/modules/scaling/components/scaling/scaling-model.ts @@ -1,10 +1,6 @@ -import {ScalingToModel} from './scaling-to-model'; -import {BehaviorSubject} from 'rxjs'; - export class ScalingModel { - constructor(public id: string, - public description: string, - public scaleTo$: BehaviorSubject) { + constructor(public methodName: string, + public methodDescription: string) { } } diff --git a/UI/src/app/modules/scaling/components/scaling/scaling.component.html b/UI/src/app/modules/scaling/components/scaling/scaling.component.html index 1ac4ab49c..5d36a8f92 100644 --- a/UI/src/app/modules/scaling/components/scaling/scaling.component.html +++ b/UI/src/app/modules/scaling/components/scaling/scaling.component.html @@ -1,4 +1,4 @@ - + -
-
-
-
-
-
Method
-
-
-
-
- - -
+
+
+
+ +
+
+
+ +
-
-
-
-
-
Scale to
-
-
-
-
- - -
+
+
+ +
+
+
+ + +
+ + {{item.datasetModel.selectedDataset?.pretty_name}} ({{item.datasetModel.selectedVersion?.pretty_name}}) + +
+
+ +
+ {{item.datasetModel.selectedDataset?.pretty_name}} + ({{item.datasetModel.selectedVersion?.pretty_name}}) +
+ +
+
+
diff --git a/UI/src/app/modules/scaling/components/scaling/scaling.component.ts b/UI/src/app/modules/scaling/components/scaling/scaling.component.ts index 130f0dea5..aaaa06af5 100644 --- a/UI/src/app/modules/scaling/components/scaling/scaling.component.ts +++ b/UI/src/app/modules/scaling/components/scaling/scaling.component.ts @@ -1,39 +1,9 @@ -import {Component, Input, OnInit} from '@angular/core'; -import {ScalingModel} from './scaling-model'; +import {Component, EventEmitter, Input, OnInit, Output} from '@angular/core'; import {ValidationModel} from '../../../../pages/validate/validation-model'; -import {ScalingToModel} from './scaling-to-model'; import {BehaviorSubject} from 'rxjs'; - -export const SCALING_METHOD_NO_SCALING = 'none'; -export const SCALING_METHOD_NO_SCALING_DESC = 'No scaling'; - -export const SCALING_METHOD_MIN_MAX = 'min_max'; -export const SCALING_METHOD_MIN_MAX_DESC = 'Min/Max'; - -export const SCALING_METHOD_LIN_REG = 'linreg'; -export const SCALING_METHOD_LIN_REG_DESC = 'Linear regression'; - -export const SCALING_METHOD_MEAN_STD = 'mean_std'; -export const SCALING_METHOD_MEAN_STD_DESC = 'Mean/standard deviation'; - -export const SCALING_METHOD_CDF_MATCH = 'cdf_beta_match'; -export const SCALING_METHOD_CDF_MATCH_DESC = 'Cumulative Distributions Functions (CDF) matching'; - -export const SCALING_REFERENCE_REF = 'ref'; -export const SCALING_REFERENCE_REF_DESC = 'Reference'; -export const SCALING_REFERENCE_DATA = 'data'; -export const SCALING_REFERENCE_DATA_DESC = 'Data'; - -export const SCALING_REFERENCE_DEFAULT = new ScalingToModel(SCALING_REFERENCE_REF, SCALING_REFERENCE_REF_DESC); -export const SCALING_METHOD_DEFAULT = new ScalingModel(SCALING_METHOD_MEAN_STD, SCALING_METHOD_MEAN_STD_DESC, - new BehaviorSubject(SCALING_REFERENCE_DEFAULT)); - -export let SCALING_CHOICES = {}; -SCALING_CHOICES[SCALING_METHOD_NO_SCALING] = SCALING_METHOD_NO_SCALING_DESC; -SCALING_CHOICES[SCALING_METHOD_MIN_MAX] = SCALING_METHOD_MIN_MAX_DESC; -SCALING_CHOICES[SCALING_METHOD_LIN_REG] = SCALING_METHOD_LIN_REG_DESC; -SCALING_CHOICES[SCALING_METHOD_MEAN_STD] = SCALING_METHOD_MEAN_STD_DESC; -SCALING_CHOICES[SCALING_METHOD_CDF_MATCH] = SCALING_METHOD_CDF_MATCH_DESC; +import {ValidationRunConfigService} from '../../../../pages/validate/service/validation-run-config.service'; +import {ScalingMethodDto} from './scaling-methods.dto'; +import {DatasetConfigModel} from '../../../../pages/validate/dataset-config-model'; @Component({ @@ -43,66 +13,79 @@ SCALING_CHOICES[SCALING_METHOD_CDF_MATCH] = SCALING_METHOD_CDF_MATCH_DESC; }) export class ScalingComponent implements OnInit { - readonly noScalingId = SCALING_METHOD_NO_SCALING; - scalingModels: ScalingModel[] = - [ new ScalingModel(SCALING_METHOD_NO_SCALING, SCALING_METHOD_NO_SCALING_DESC, - new BehaviorSubject(SCALING_REFERENCE_DEFAULT)), - new ScalingModel(SCALING_METHOD_MIN_MAX, SCALING_METHOD_MIN_MAX_DESC, - new BehaviorSubject(SCALING_REFERENCE_DEFAULT)), - new ScalingModel(SCALING_METHOD_LIN_REG, SCALING_METHOD_LIN_REG_DESC, - new BehaviorSubject(SCALING_REFERENCE_DEFAULT)), - new ScalingModel(SCALING_METHOD_MEAN_STD, SCALING_METHOD_MEAN_STD_DESC, - new BehaviorSubject(SCALING_REFERENCE_DEFAULT)), - new ScalingModel(SCALING_METHOD_CDF_MATCH, SCALING_METHOD_CDF_MATCH_DESC, - new BehaviorSubject(SCALING_REFERENCE_DEFAULT))]; - selectedScalingModel: BehaviorSubject; - - scaleToModels: ScalingToModel[] = []; - selectedScaleToModel$: BehaviorSubject; - @Input() validationModel: ValidationModel; + @Output() hoverOverDataset = new EventEmitter(); + + selectedScaleToModel$: BehaviorSubject = new BehaviorSubject(null); + selectedScaleToModel: DatasetConfigModel; + + + scalingMethods: ScalingMethodDto[]; + selectedScalingMethod$: BehaviorSubject = new BehaviorSubject(null); + + constructor(private validationConfigService: ValidationRunConfigService) { + } + ngOnInit(): void { + // prepare method choices + this.validationConfigService.getScalingMethods().subscribe(methods => { + this.scalingMethods = methods; + this.selectedScalingMethod$.next(methods.find(method => method.method === 'none')); + this.setScalingMethod(); + }); + } - constructor() { + setScalingMethod(): void{ + this.validationModel.scalingMethod.methodName = this.selectedScalingMethod$.getValue().method; + this.validationModel.scalingMethod.methodDescription = this.selectedScalingMethod$.getValue().description; } + updateScalingMethod(): void{ + this.setScalingMethod(); - isScalingSelectorDisabled(): boolean { - if (this.validationModel.datasetConfigurations.length > 1) { - this.selectedScaleToModel$.next(SCALING_REFERENCE_DEFAULT); - return true; + if (this.selectedScalingMethod$.getValue().method === 'none'){ + this.selectedScaleToModel$.next(null); + this.selectedScaleToModel ? this.updateScaleTo(true) : this.updateScaleTo(); } else { - return false; + if (!this.selectedScaleToModel){ + this.selectedScaleToModel$.next(this.validationModel.datasetConfigurations[0]); + this.updateScaleTo(); + } } } - public setSelection(scalingMethodName: string, reference: string): void { - this.scalingModels.forEach(scalingModel => { - if (scalingModel.id === scalingMethodName) { - this.scaleToModels.forEach(scaleToModel => { - if (scaleToModel.id === reference) { - scalingModel.scaleTo$.next(scaleToModel); - } - }); - this.selectedScalingModel.next(scalingModel); - } - }); - this.updateScalingModel(); + updateScaleTo(clearSelected = false): void{ + if (clearSelected){ + this.selectedScaleToModel.scalingReference$.next(false); + } + + this.selectedScaleToModel = this.selectedScaleToModel$.getValue(); + if (this.selectedScaleToModel){ + this.selectedScaleToModel.scalingReference$.next(true); + } } - ngOnInit(): void { - this.selectedScalingModel = new BehaviorSubject(this.scalingModels.find(model => model.id === 'mean_std')); - this.prepareScalingReferenceModels(); - this.updateScalingModel(); + onHoverOverDataset(item, highlight): void{ + this.hoverOverDataset.emit({hoveredDataset: item, highlight}); } - updateScalingModel(): void{ - this.validationModel.scalingModel.id = this.selectedScalingModel.getValue().id; - this.validationModel.scalingModel.scaleTo$.next(this.selectedScaleToModel$.getValue()); + verifyScaleToModel(): BehaviorSubject{ + this.selectedScaleToModel = this.validationModel.datasetConfigurations + .find(datasetConfig => datasetConfig.scalingReference$.getValue()); + this.selectedScaleToModel$.next(this.selectedScaleToModel); + return this.selectedScaleToModel$; } - private prepareScalingReferenceModels(): void{ - this.scaleToModels.push(SCALING_REFERENCE_DEFAULT); - this.scaleToModels.push(new ScalingToModel(SCALING_REFERENCE_DATA, SCALING_REFERENCE_DATA_DESC)); - this.selectedScaleToModel$ = new BehaviorSubject(this.scaleToModels[0]); + public setSelection(scalingMethodName: string, reference: DatasetConfigModel): void { + + this.validationConfigService.getScalingMethods().subscribe(methods => { + methods.forEach(scalingMethod => { + if (scalingMethod.method === scalingMethodName) { + this.selectedScalingMethod$.next(scalingMethod); + } + }); + this.updateScalingMethod(); + }); + this.selectedScaleToModel$.next(reference); + this.updateScaleTo(); } } diff --git a/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.html b/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.html new file mode 100644 index 000000000..fc03309e1 --- /dev/null +++ b/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.html @@ -0,0 +1,158 @@ + + + +
Size: {{ getTheFileSize() }}
+
+ +
Upload date: {{userDataset.upload_date| date: dateFormat :timeZone}} UTC +
+
+
+ +
+
Dataset:
+ + +
Name: {{(datasetName$| async)}} + +
+ +
+ + + + +
+ + +
Version: {{(versionName$ | async)}} + +
+ +
+ + + + +
+ + + + Variable: {{(variableName.shortName$|async)}} ({{variableName.prettyName$|async}}) [{{variableName.unit$|async}}] + + +
+ + + + +
+
+ +
+
Validation list:
+ +
+ {{ind + 1}}) {{validation.val_name}} +
+
+
+ No validation has been run with this data +
+
+ +
+
+ +
+
+
+ +
+ diff --git a/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.scss b/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.scss new file mode 100644 index 000000000..3ba0863d9 --- /dev/null +++ b/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.scss @@ -0,0 +1,32 @@ +@import "src/styles"; +.att_val_container{ + background-color: white; + color: black; + padding: 0.3em 0.3em 0.3em 0.3em; + border-bottom: #dddddd solid 1px; + margin: 0; +} + +.small-edit-icon{ + font-size: 0.8rem; + line-height: 1rem; +} + +.edit_dataset { + border: 1px solid #dddddd; + border-radius: 4px; + position: relative; + padding: 3px; + height: 2rem; + font-size: 1rem; +} + +#no-validation-box{ + color: lightgray; + height: 70%; + display: flex; + align-items:center; +} +.warning { + color: red; +} diff --git a/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.spec.ts b/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.spec.ts new file mode 100644 index 000000000..f47fb89cd --- /dev/null +++ b/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.spec.ts @@ -0,0 +1,25 @@ +import {ComponentFixture, TestBed} from '@angular/core/testing'; + +import {UserDataRowComponent} from './user-data-row.component'; + +describe('UserDataRowComponent', () => { + let component: UserDataRowComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ UserDataRowComponent ] + }) + .compileComponents(); + }); + + beforeEach(() => { + fixture = TestBed.createComponent(UserDataRowComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.ts b/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.ts new file mode 100644 index 000000000..9623ba7eb --- /dev/null +++ b/UI/src/app/modules/user-datasets/components/user-data-row/user-data-row.component.ts @@ -0,0 +1,135 @@ +import {Component, Input, OnInit} from '@angular/core'; +import {UserDataFileDto} from '../../services/user-data-file.dto'; +import {BehaviorSubject, Observable} from 'rxjs'; +import {UserDatasetsService} from '../../services/user-datasets.service'; +import {DatasetService} from '../../../core/services/dataset/dataset.service'; +import {DatasetVersionService} from '../../../core/services/dataset/dataset-version.service'; +import {DatasetVariableService} from '../../../core/services/dataset/dataset-variable.service'; +import {ToastService} from '../../../core/services/toast/toast.service'; +import {DatasetDto} from '../../../core/services/dataset/dataset.dto'; +import {DatasetVersionDto} from '../../../core/services/dataset/dataset-version.dto'; +import {AuthService} from '../../../core/services/auth/auth.service'; + +@Component({ + selector: 'qa-user-data-row', + templateUrl: './user-data-row.component.html', + styleUrls: ['./user-data-row.component.scss'] +}) +export class UserDataRowComponent implements OnInit { + + @Input() userDataset: UserDataFileDto; + datasetName$: BehaviorSubject = new BehaviorSubject(''); + versionName$: BehaviorSubject = new BehaviorSubject(''); + variableName: { shortName$: BehaviorSubject, prettyName$: BehaviorSubject, unit$: BehaviorSubject } = + { + shortName$: new BehaviorSubject(''), + prettyName$: new BehaviorSubject(''), + unit$: new BehaviorSubject('') + }; + variableUnit: string; + + datasetFieldName = 'dataset_name'; + versionFieldName = 'version_name'; + variableFieldName = 'variable_name'; + latFieldName = 'lat_name'; + lonFieldName = 'lon_name'; + timeFiledName = 'time_name'; + + editDataset = {opened: false}; + editVersion = {opened: false}; + editVariable = {opened: false}; + editLatName = {opened: false}; + editLonName = {opened: false}; + editTimeName = {opened: false}; + + dateFormat = 'medium'; + timeZone = 'UTC'; + + // variables$: Observable[] = []; + + constructor(private userDatasetService: UserDatasetsService, + private datasetService: DatasetService, + private datasetVersionService: DatasetVersionService, + private datasetVariableService: DatasetVariableService, + private toastService: ToastService, + public authService: AuthService) { + } + + ngOnInit(): void { + this.datasetService.getDatasetById(this.userDataset.dataset).subscribe(datasetData => { + this.datasetName$.next(datasetData.pretty_name); + }); + this.datasetVersionService.getVersionById(this.userDataset.version).subscribe(versionData => { + this.versionName$.next(versionData.pretty_name); + }); + this.datasetVariableService.getVariableById(this.userDataset.variable).subscribe(variableData => { + this.variableName.shortName$.next(variableData.short_name); + this.variableName.prettyName$.next(variableData.pretty_name); + this.variableName.unit$.next(variableData.unit); + // this.variableUnit = variableData.unit; + }); + } + + removeDataset(dataFileId: string): void { + if (!confirm('Do you really want to delete the dataset?')) { + return; + } + this.userDatasetService.deleteUserData(dataFileId).subscribe(() => { + this.userDatasetService.refresh.next(true); + this.authService.init(); + }); + } + + getDataset(datasetId): Observable { + return this.datasetService.getDatasetById(datasetId); + } + + getDatasetVersion(versionId): Observable { + return this.datasetVersionService.getVersionById(versionId); + } + + updateMetadata(fieldName, fieldValue, userDataId): void { + this.userDatasetService.updateMetadata(fieldName, fieldValue, userDataId).subscribe(() => { + this.toggle(fieldName, false); + if (fieldName === this.datasetFieldName) { + this.datasetName$.next(fieldValue); + } + if (fieldName === this.versionFieldName) { + this.versionName$.next(fieldValue); + } + if (fieldName === this.variableFieldName) { + this.variableName.prettyName$.next( + this.userDataset.all_variables.find(choice => choice.name === fieldValue).long_name); + this.variableName.unit$.next(this.userDataset.all_variables.find(choice => choice.name === fieldValue).units); + this.variableName.shortName$.next(fieldValue); + } + }, + () => { + this.toastService.showError('Metadata could not be updated'); + }, + () => { + this.toastService.showSuccess('Metadata has been updated'); + }); + } + + toggle(fieldName, open): void { + let editableField; + switch (fieldName) { + case this.datasetFieldName: + editableField = this.editDataset; + break; + case this.versionFieldName: + editableField = this.editVersion; + break; + case this.variableFieldName: + editableField = this.editVariable; + break; + } + editableField.opened = open; + } + + getTheFileSize(): string { + return this.userDatasetService.getTheSizeInProperUnits(this.userDataset.file_size); + } + +} diff --git a/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.html b/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.html new file mode 100644 index 000000000..59cd4caac --- /dev/null +++ b/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.html @@ -0,0 +1,128 @@ +
+
+
+
+ +
+
+ + {{file ? file.name : "No file chosen yet."}} +
+
+
+ +
+
+
+
+ +
+
+
+
+ +
+ + +
+ You can upload your own data using this form. Please follow this file + standard +
+
+ +
+
+ {{file ? file.name : (isFileTooBig ? 'This file is too big, please choose another one. You still have ' + getTheFileSize() + ' available*' + : "No file chosen yet.")}} +
+
+ +
+
+ +
+
+ +
+
+
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ +
+
+
+
+ + + +
Do not close this tab. We are uploading your data...
+
Progress {{uploadProgress|async}} %
+
Your file has been uploaded. We still need a few seconds to preprocess it..
+
+ diff --git a/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.scss b/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.scss new file mode 100644 index 000000000..c8d752dab --- /dev/null +++ b/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.scss @@ -0,0 +1,83 @@ +.file-input { + display: none; +} +.center{ + display: flex; + justify-content: center; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", "Liberation Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; +} +button{ + width: 15rem; +} +.label-button{ + font-size: 1.2rem; + line-height: 1rem; + height: 2.25rem; + padding: 1rem; + display: flex; + align-items: center; + text-align: center; +} + +.center h1{ + text-align: left; + justify-content: center; +} +.form-field{ + width: 16rem; + height: 2.5rem; + margin-right: 0.5rem; +} +//.form-small-field{ +// width: 10rem; +// height: 2.5rem; +// margin-right: 0.5rem; +//} +.p-float-label{ + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", "Liberation Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; + font-size: 16px; +} +button{ + width: 15rem; +} +div.alert{ + margin-top: 1rem; +} +.alert{ + background-color: #FF6C5C26; + color: #FF6C5C; + width: 15rem; + border: #FF6C5C30 solid 1px; + border-radius: 3px; + padding: 0.5rem; +} +.verticalAlign{ + vertical-align: 85%; +} + +@keyframes changeColor { + from {color: #9CF2F6} + to {color: #00AAE3;} +} + +//.loading-spinner{ +// font-size: 10rem; +// margin: 1rem; +// animation-name: spinAndChangeColor; +// animation-duration: 10s; +// animation-iteration-count: infinite; +//} + +.uploading-message{ + margin-top: 2px; + font-size: 2rem; + animation-name: changeColor; + animation-duration: 3s; + animation-iteration-count: infinite; +} + +.warning{ + color: red; + font-weight: bold; + width: 80%; +} diff --git a/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.spec.ts b/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.spec.ts new file mode 100644 index 000000000..12b1b4d2b --- /dev/null +++ b/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.spec.ts @@ -0,0 +1,25 @@ +import {ComponentFixture, TestBed} from '@angular/core/testing'; + +import {UserFileUploadComponent} from './user-file-upload.component'; + +describe('UserFileUploadComponent', () => { + let component: UserFileUploadComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ UserFileUploadComponent ] + }) + .compileComponents(); + }); + + beforeEach(() => { + fixture = TestBed.createComponent(UserFileUploadComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.ts b/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.ts new file mode 100644 index 000000000..567502f2d --- /dev/null +++ b/UI/src/app/modules/user-datasets/components/user-file-upload/user-file-upload.component.ts @@ -0,0 +1,148 @@ +import {Component, OnInit} from '@angular/core'; +import {FormBuilder, Validators} from '@angular/forms'; +import {UserDatasetsService} from '../../services/user-datasets.service'; +import {ToastService} from '../../../core/services/toast/toast.service'; +import {BehaviorSubject, Subscription} from 'rxjs'; +import {finalize} from 'rxjs/operators'; +import {HttpEventType} from '@angular/common/http'; +import {allowedNameValidator} from '../../services/allowed-name.directive'; +import * as uuid from 'uuid'; +// @ts-ignore +import JSZip from 'jszip'; +import {AuthService} from '../../../core/services/auth/auth.service'; + +@Component({ + selector: 'qa-user-file-upload', + templateUrl: './user-file-upload.component.html', + styleUrls: ['./user-file-upload.component.scss'] +}) +export class UserFileUploadComponent implements OnInit { + // variables to store file information + file: File; + fileName = ''; + name = ''; + // variable to open the form + dialogVisible = false; + spinnerVisible = false; + isFileTooBig = false; + + uploadProgress: BehaviorSubject = new BehaviorSubject(0); + uploadSub: Subscription; + allowedExtensions = ['.zip', '.nc', '.nc4']; + + // dataset file form + metadataForm = this.formBuilder.group({ + dataset_name: [null, [Validators.required, Validators.maxLength(30), allowedNameValidator()]], + dataset_pretty_name: [null, [Validators.maxLength(30), allowedNameValidator(true)]], + version_name: [null, [Validators.required, Validators.maxLength(30), allowedNameValidator()]], + version_pretty_name: [null, [Validators.maxLength(30), allowedNameValidator(true)]], + }); + + constructor(private userDatasetService: UserDatasetsService, + private formBuilder: FormBuilder, + private toastService: ToastService, + public authService: AuthService) { + } + + ngOnInit(): void { + } + + private verifyZipContent(): void { + const zip = new JSZip(); + zip.loadAsync(this.file).then(contents => { + const files = Object.keys(contents.files).filter(key => + !['nc', 'nc4', 'csv', 'yml'].includes(key.split('.').reverse()[0])); + if (files.length !== 0){ + this.toastService.showErrorWithHeader('File can not be uploaded', + 'The zip file you are trying to upload contains files with no acceptable extensions (i.e. netCDF or csv + yml'); + this.file = null; + } + }); + } + + onFileSelected(event): void { + this.file = event.target.files[0]; + this.isFileTooBig = false; + + if (this.authService.currentUser.space_left && this.file.size > this.authService.currentUser.space_left){ + this.isFileTooBig = true; + this.file = null; + return null; + } + + + const fileExtension = this.file.name.split('.').reverse()[0]; + + if (!this.allowedExtensions.includes('.' + fileExtension)) { + this.file = null; + return null; + } + + if (fileExtension === 'zip') { + this.verifyZipContent(); + } + + this.fileName = `${uuid.v4()}.${fileExtension}`; + this.dialogVisible = true; + // I need to clean the selected file, otherwise there will be problem with choosing the same file next time + event.target.value = null; + } + + sendForm(): void { + if (this.file) { + this.name = 'uploadedFile'; + this.spinnerVisible = true; + const upload$ = this.userDatasetService.userFileUpload(this.name, this.file, this.fileName) + .pipe(finalize(() => this.reset)); + + this.uploadSub = upload$.subscribe(event => { + if (event.type === HttpEventType.UploadProgress) { + this.uploadProgress.next(Math.round(100 * (event.loaded / event.total))); + } else if (event.type === HttpEventType.Response) { + this.userDatasetService.sendMetadata(this.metadataForm.value, event.body.id).subscribe(() => { + this.userDatasetService.refresh.next(true); + this.authService.init(); + this.resetFile(); + }, + (message) => { + this.spinnerVisible = false; + this.toastService.showErrorWithHeader('Metadata not saved.', + `${message.error.error}.\n Provided metadata could not be saved. Please try again or contact our team.`); + }, + () => { + this.spinnerVisible = false; + this.metadataForm.reset(''); + }); + } else { + } + }, + (message) => { + this.spinnerVisible = false; + this.toastService.showErrorWithHeader('File not saved', + `${message.error.error}.\n File could not be uploaded. Please try again or contact our team.`); + } + ); + } + } + + + onSaveData(): void { + this.dialogVisible = false; + } + + resetFile(): void { + this.file = null; + this.fileName = null; + } + + reset(): void { + this.uploadProgress = null; + this.uploadSub = null; + } + + getTheFileSize(): string { + return this.userDatasetService.getTheSizeInProperUnits(this.authService.currentUser.space_left); + } + + +} diff --git a/UI/src/app/modules/user-datasets/services/allowed-name.directive.ts b/UI/src/app/modules/user-datasets/services/allowed-name.directive.ts new file mode 100644 index 000000000..57f995a2c --- /dev/null +++ b/UI/src/app/modules/user-datasets/services/allowed-name.directive.ts @@ -0,0 +1,26 @@ +import {Directive} from '@angular/core'; +import {AbstractControl, NG_VALIDATORS, ValidationErrors, Validator, ValidatorFn} from '@angular/forms'; + +export function allowedNameValidator(spaceAllowed= false): ValidatorFn { + return (control: AbstractControl): ValidationErrors | null => { + const chars = spaceAllowed ? /[a-z|A-Z0-9@.+_\- ]/i : /[a-z|A-Z0-9@.+_\-]/i; + const allowed = []; + if (control.value){ + control.value.split('').forEach(char => { + allowed.push(chars.test(char)); + }); + } + return allowed.every(val => val) ? null : {forbiddenName: {value: control.value}}; + }; +} + +@Directive({ + selector: '[qaAllowedName]', + providers: [{provide: NG_VALIDATORS, useExisting: AllowedNameDirective, multi: true}] +}) +export class AllowedNameDirective implements Validator { + + validate(control: AbstractControl): ValidationErrors | null { + return allowedNameValidator()(control); + } +} diff --git a/UI/src/app/modules/user-datasets/services/user-data-file.dto.ts b/UI/src/app/modules/user-datasets/services/user-data-file.dto.ts new file mode 100644 index 000000000..d10e5523b --- /dev/null +++ b/UI/src/app/modules/user-datasets/services/user-data-file.dto.ts @@ -0,0 +1,16 @@ +export class UserDataFileDto { + constructor(public id: string, + public file: File, + public file_name: string, + public owner: number, + public dataset: number, + public version: number, + public variable: number, + public all_variables: {name: string, standard_name: string, long_name: string, units: string}[], + public upload_date: Date, + public is_used_in_validation: boolean, + public file_size: number, + public validation_list: {val_id: string, val_name: string}[] + ) { + } +} diff --git a/UI/src/app/modules/user-datasets/services/user-datasets.service.spec.ts b/UI/src/app/modules/user-datasets/services/user-datasets.service.spec.ts new file mode 100644 index 000000000..539e11c69 --- /dev/null +++ b/UI/src/app/modules/user-datasets/services/user-datasets.service.spec.ts @@ -0,0 +1,16 @@ +import {TestBed} from '@angular/core/testing'; + +import {UserDatasetsService} from './user-datasets.service'; + +describe('UserDatasetsService', () => { + let service: UserDatasetsService; + + beforeEach(() => { + TestBed.configureTestingModule({}); + service = TestBed.inject(UserDatasetsService); + }); + + it('should be created', () => { + expect(service).toBeTruthy(); + }); +}); diff --git a/UI/src/app/modules/user-datasets/services/user-datasets.service.ts b/UI/src/app/modules/user-datasets/services/user-datasets.service.ts new file mode 100644 index 000000000..7ca24be4a --- /dev/null +++ b/UI/src/app/modules/user-datasets/services/user-datasets.service.ts @@ -0,0 +1,87 @@ +import {Injectable} from '@angular/core'; +import {BehaviorSubject, Observable} from 'rxjs'; +import {HttpClient, HttpHeaders} from '@angular/common/http'; +import {environment} from '../../../../environments/environment'; +import {UserDataFileDto} from './user-data-file.dto'; + + +const urlPrefix = environment.API_URL + 'api'; +const uploadUserDataUrl: string = urlPrefix + '/upload-user-data'; +const userDataListUrl: string = urlPrefix + '/get-list-of-user-data-files'; +const userDataDeleteUrl: string = urlPrefix + '/delete-user-datafile'; +const userDataMetadataUrl: string = urlPrefix + '/user-file-metadata'; +const userDataTestUrl: string = urlPrefix + '/test-user-dataset'; +const updateMetadataUrl: string = urlPrefix + '/update-metadata'; + +// const validateUserDataUrl: string = urlPrefix + '/validate-user-data'; + +const csrfToken = '{{csrf_token}}'; +const headers = new HttpHeaders({'X-CSRFToken': csrfToken}); + +@Injectable({ + providedIn: 'root' +}) +export class UserDatasetsService { + + public refresh: BehaviorSubject = new BehaviorSubject(false); + doRefresh = this.refresh.asObservable(); + + constructor(private httpClient: HttpClient) { } + + userFileUpload(name, file, fileName): Observable { + const formData = new FormData(); + formData.append(name, file, fileName); + const uploadUrl = uploadUserDataUrl + '/' + fileName + '/'; + return this.httpClient.post(uploadUrl, formData.get(name), {headers, reportProgress: true, observe: 'events', responseType: 'json'}); + } + + getUserDataList(): Observable{ + return this.httpClient.get(userDataListUrl); + } + + deleteUserData(dataFileId: string): Observable{ + const deleteUrl = userDataDeleteUrl + '/' + dataFileId + '/'; + return this.httpClient.delete(deleteUrl, {headers}); + } + + sendMetadata(metadataForm: any, fileId: string): Observable { + const metadataUrl = userDataMetadataUrl + '/' + fileId + '/'; + return this.httpClient.post(metadataUrl, metadataForm, {observe: 'response', responseType: 'json'}); + } + + testDataset(dataFileId: string): Observable{ + const testUrl = userDataTestUrl + '/' + dataFileId + '/'; + return this.httpClient.get(testUrl); + } + + updateMetadata(fieldName: string, fieldValue: string, dataFileId: string): Observable{ + const updateUrl = updateMetadataUrl + '/' + dataFileId + '/'; + return this.httpClient.put(updateUrl, {field_name: fieldName, field_value: fieldValue}); + } + + getTheSizeInProperUnits(sizeInBites): string { + let properSize; + let units; + const coeff = Math.pow(10, 6); + if (sizeInBites < coeff) { + properSize = sizeInBites / Math.pow(10, 3); + units = 'kB'; + } else if (sizeInBites >= coeff && sizeInBites < coeff * 1000) { + properSize = sizeInBites / coeff; + units = 'MB'; + } else { + properSize = sizeInBites / Math.pow(10, 9); + units = 'GB'; + } + + return `${Math.round(properSize * 10) / 10} ${units}`; + } + + // userFileValidate(name, file, filename): Observable { + // const formData = new FormData(); + // formData.append(name, file, filename); + // const validateUserDataUrlWithFileName = validateUserDataUrl + '/' + file.name + '/'; + // return this.httpClient.put(validateUserDataUrlWithFileName, {file: formData.getAll(name)}); + // } + +} diff --git a/UI/src/app/modules/user-datasets/user-datasets.module.ts b/UI/src/app/modules/user-datasets/user-datasets.module.ts new file mode 100644 index 000000000..d383dbef7 --- /dev/null +++ b/UI/src/app/modules/user-datasets/user-datasets.module.ts @@ -0,0 +1,37 @@ +import {NgModule} from '@angular/core'; +import {CommonModule} from '@angular/common'; +import {UserFileUploadComponent} from './components/user-file-upload/user-file-upload.component'; +import {ButtonModule} from 'primeng/button'; +import {DialogModule} from 'primeng/dialog'; +import {RouterModule} from '@angular/router'; +import {FormsModule, ReactiveFormsModule} from '@angular/forms'; +import {InputTextModule} from 'primeng/inputtext'; +import {TooltipModule} from 'primeng/tooltip'; +import {DropdownModule} from 'primeng/dropdown'; +import {UserDataRowComponent} from './components/user-data-row/user-data-row.component'; +import {PanelModule} from 'primeng/panel'; +import {AllowedNameDirective} from './services/allowed-name.directive'; +import {ScrollPanelModule} from 'primeng/scrollpanel'; + + +@NgModule({ + declarations: [UserFileUploadComponent, UserDataRowComponent, AllowedNameDirective], + exports: [ + UserFileUploadComponent, + UserDataRowComponent + ], + imports: [ + CommonModule, + ButtonModule, + DialogModule, + RouterModule, + ReactiveFormsModule, + InputTextModule, + TooltipModule, + DropdownModule, + FormsModule, + PanelModule, + ScrollPanelModule + ] +}) +export class UserDatasetsModule { } diff --git a/UI/src/app/modules/user/login/login.component.html b/UI/src/app/modules/user/login/login.component.html new file mode 100644 index 000000000..d5d986c10 --- /dev/null +++ b/UI/src/app/modules/user/login/login.component.html @@ -0,0 +1,35 @@ +
+
+
+
+

Please sign in

+ +
+ + + + +
+ + +
+ + + + +
+ + + + + +
+
+
+
+ + diff --git a/UI/src/app/pages/login/login.component.scss b/UI/src/app/modules/user/login/login.component.scss similarity index 100% rename from UI/src/app/pages/login/login.component.scss rename to UI/src/app/modules/user/login/login.component.scss diff --git a/UI/src/app/pages/login/login.component.spec.ts b/UI/src/app/modules/user/login/login.component.spec.ts similarity index 87% rename from UI/src/app/pages/login/login.component.spec.ts rename to UI/src/app/modules/user/login/login.component.spec.ts index 386e3e9cc..c07afe903 100644 --- a/UI/src/app/pages/login/login.component.spec.ts +++ b/UI/src/app/modules/user/login/login.component.spec.ts @@ -1,9 +1,9 @@ import {ComponentFixture, TestBed} from '@angular/core/testing'; import {LoginComponent} from './login.component'; -import {AuthService} from '../../modules/core/services/auth/auth.service'; +import {AuthService} from '../../core/services/auth/auth.service'; import {Router} from '@angular/router'; -import {ToastService} from '../../modules/core/services/toast/toast.service'; +import {ToastService} from '../../core/services/toast/toast.service'; describe('LoginComponent', () => { let component: LoginComponent; diff --git a/UI/src/app/pages/login/login.component.ts b/UI/src/app/modules/user/login/login.component.ts similarity index 69% rename from UI/src/app/pages/login/login.component.ts rename to UI/src/app/modules/user/login/login.component.ts index 7f9ccaa7d..806469ee7 100644 --- a/UI/src/app/pages/login/login.component.ts +++ b/UI/src/app/modules/user/login/login.component.ts @@ -1,9 +1,9 @@ -import {Component, OnInit} from '@angular/core'; -import {AuthService} from '../../modules/core/services/auth/auth.service'; -import {LoginDto} from '../../modules/core/services/auth/login.dto'; +import {Component, EventEmitter, Input, OnInit, Output} from '@angular/core'; +import {AuthService} from '../../core/services/auth/auth.service'; +import {LoginDto} from '../../core/services/auth/login.dto'; import {FormControl, FormGroup, Validators} from '@angular/forms'; import {Router} from '@angular/router'; -import {ToastService} from '../../modules/core/services/toast/toast.service'; +import {ToastService} from '../../core/services/toast/toast.service'; @Component({ @@ -13,6 +13,8 @@ import {ToastService} from '../../modules/core/services/toast/toast.service'; providers: [] }) export class LoginComponent implements OnInit { + @Input() navigateAfter: boolean; + @Output() loggedIn = new EventEmitter(); loginDto = new LoginDto('', ''); submitted = false; @@ -42,9 +44,12 @@ export class LoginComponent implements OnInit { this.loginService.login(this.loginDto).subscribe(authenticated => { - if (authenticated) { + if (authenticated && this.navigateAfter) { this.router.navigate([this.prevUrl]).then( value => this.toastService.showSuccessWithHeader('Successful login', 'Welcome ' + this.loginService.currentUser.username)); + } else if (authenticated && !this.navigateAfter) { + this.loggedIn.emit(authenticated); + this.toastService.showSuccessWithHeader('Successful login', 'Welcome ' + this.loginService.currentUser.username); } else { this.toastService.showErrorWithHeader('Login failed', 'Wrong username or password'); } diff --git a/UI/src/app/modules/validation-reference/components/validation-reference/reference-model.ts b/UI/src/app/modules/validation-reference/components/validation-reference/reference-model.ts new file mode 100644 index 000000000..e755fbb7a --- /dev/null +++ b/UI/src/app/modules/validation-reference/components/validation-reference/reference-model.ts @@ -0,0 +1,9 @@ +import {DatasetConfigModel} from '../../../../pages/validate/dataset-config-model'; + +export class ReferenceModel { + constructor(public temporal: DatasetConfigModel, + public spatial: DatasetConfigModel, + public scaling: DatasetConfigModel + ) { + } +} diff --git a/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.html b/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.html new file mode 100644 index 000000000..237087ba1 --- /dev/null +++ b/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.html @@ -0,0 +1,35 @@ +
+
+ + + +
+ + {{item.datasetModel.selectedDataset?.pretty_name}} ({{item.datasetModel.selectedVersion?.pretty_name}}) + + (Not recommended) +
+
+ +
+ {{item.datasetModel.selectedDataset?.pretty_name}} ({{item.datasetModel.selectedVersion?.pretty_name}}) + (Not recommended) +
+ +
+
+ +
+
+ + +
+ diff --git a/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.scss b/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.scss new file mode 100644 index 000000000..e69de29bb diff --git a/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.spec.ts b/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.spec.ts new file mode 100644 index 000000000..72eb88913 --- /dev/null +++ b/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.spec.ts @@ -0,0 +1,25 @@ +import {ComponentFixture, TestBed} from '@angular/core/testing'; + +import {ValidationReferenceComponent} from './validation-reference.component'; + +describe('ValidationReferenceComponent', () => { + let component: ValidationReferenceComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ ValidationReferenceComponent ] + }) + .compileComponents(); + }); + + beforeEach(() => { + fixture = TestBed.createComponent(ValidationReferenceComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.ts b/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.ts new file mode 100644 index 000000000..2bffb250e --- /dev/null +++ b/UI/src/app/modules/validation-reference/components/validation-reference/validation-reference.component.ts @@ -0,0 +1,65 @@ +import {Component, EventEmitter, Input, OnInit, Output} from '@angular/core'; +import {BehaviorSubject, Observable} from 'rxjs'; +import {DatasetConfigModel} from '../../../../pages/validate/dataset-config-model'; +import {ValidationModel} from '../../../../pages/validate/validation-model'; + +@Component({ + selector: 'qa-validation-reference', + templateUrl: './validation-reference.component.html', + styleUrls: ['./validation-reference.component.scss'] +}) +export class ValidationReferenceComponent implements OnInit { + + datasets$: Observable = new Observable(); + + @Input() validationModel: ValidationModel; + @Input() referenceType: string; + @Output() hoverOverDataset = new EventEmitter(); + + chosenDatasets$: BehaviorSubject = new BehaviorSubject(null); + selectionModel$: BehaviorSubject = new BehaviorSubject(null); + selectedValue: DatasetConfigModel; + constructor() { + } + + ngOnInit(): void { + this.selectedValue = this.validationModel.datasetConfigurations.find(datasetConfig => datasetConfig[this.referenceType].getValue()); + this.selectionModel$.next(this.selectedValue); + } + + onDatasetChange(reference = null): void { + if (!reference){ + this.selectedValue[this.referenceType].next(false); + } + this.selectedValue = this.selectionModel$.getValue(); + this.selectedValue[this.referenceType].next(true); + } + + onHoverOverDataset(item, highlight): void{ + this.hoverOverDataset.emit({hoveredDataset: item, highlight}); + } + + verifyOptions(): BehaviorSubject{ + this.chosenDatasets$.next(this.validationModel.datasetConfigurations); + if (this.referenceType === 'spatialReference$'){ + const listOfISMNDatasets = this.validationModel.datasetConfigurations.filter(dataset => + dataset.datasetModel.selectedDataset?.short_name === 'ISMN'); + if (listOfISMNDatasets.length !== 0){ + this.chosenDatasets$.next(listOfISMNDatasets); + } + } + return this.chosenDatasets$; + } + + verifyChosenValue(): BehaviorSubject{ + this.selectedValue = this.validationModel.datasetConfigurations.find(datasetConfig => datasetConfig[this.referenceType].getValue()); + this.selectionModel$.next(this.selectedValue); + return this.selectionModel$; + } + + setReference(reference: DatasetConfigModel): void{ + this.selectionModel$.next(reference); + this.onDatasetChange(reference); + } + +} diff --git a/UI/src/app/modules/validation-reference/validation-reference.module.ts b/UI/src/app/modules/validation-reference/validation-reference.module.ts new file mode 100644 index 000000000..bde655234 --- /dev/null +++ b/UI/src/app/modules/validation-reference/validation-reference.module.ts @@ -0,0 +1,19 @@ +import {NgModule} from '@angular/core'; +import {CommonModule} from '@angular/common'; +import {ValidationReferenceComponent} from './components/validation-reference/validation-reference.component'; +import {DropdownModule} from 'primeng/dropdown'; +import {FormsModule} from '@angular/forms'; +import {TooltipModule} from 'primeng/tooltip'; + + +@NgModule({ + declarations: [ValidationReferenceComponent], + exports: [ValidationReferenceComponent], + imports: [ + CommonModule, + DropdownModule, + FormsModule, + TooltipModule + ] +}) +export class ValidationReferenceModule { } diff --git a/UI/src/app/modules/validation-result/components/buttons/buttons.component.html b/UI/src/app/modules/validation-result/components/buttons/buttons.component.html index 7567f58c7..541fa7578 100644 --- a/UI/src/app/modules/validation-result/components/buttons/buttons.component.html +++ b/UI/src/app/modules/validation-result/components/buttons/buttons.component.html @@ -220,7 +220,7 @@ - + You can not copy nor reload this validation, because it contains data belonging to another user. - --> + + + + { output_file_name: null, progress: 0, publishing_in_progress: false, - reference_configuration: 4699, + spatial_reference_configuration: 4699, scaling_method: 'mean_std', scaling_ref: 4699, start_time: new Date('2021-10-05T12:53:27.156239Z'), diff --git a/UI/src/app/modules/validation-result/components/result-files/result-files.component.html b/UI/src/app/modules/validation-result/components/result-files/result-files.component.html index 03a5fce5a..047539993 100644 --- a/UI/src/app/modules/validation-result/components/result-files/result-files.component.html +++ b/UI/src/app/modules/validation-result/components/result-files/result-files.component.html @@ -8,12 +8,11 @@

Result files

-
-
+
Result files
{ output_file_name: null, progress: 0, publishing_in_progress: false, - reference_configuration: 1, + spatial_reference_configuration: 1, scaling_method: 'mean_std', scaling_ref: 1, start_time: new Date('2021-10-05T12:53:27.156239Z'), diff --git a/UI/src/app/modules/validation-result/components/result-files/result-files.component.ts b/UI/src/app/modules/validation-result/components/result-files/result-files.component.ts index 9991808eb..d60d50de5 100644 --- a/UI/src/app/modules/validation-result/components/result-files/result-files.component.ts +++ b/UI/src/app/modules/validation-result/components/result-files/result-files.component.ts @@ -1,5 +1,5 @@ import {Component, Input, OnInit} from '@angular/core'; -import {EMPTY, Observable} from 'rxjs'; +import {Observable, of} from 'rxjs'; import {MetricsPlotsDto} from '../../../core/services/validation-run/metrics-plots.dto'; import {ValidationrunService} from '../../../core/services/validation-run/validationrun.service'; import {HttpParams} from '@angular/common/http'; @@ -37,6 +37,10 @@ export class ResultFilesComponent implements OnInit { ngOnInit(): void { this.updateMetricsWithPlots(); + this.updatedMetrics$.subscribe(metrics => { + this.selectedMetrics = metrics[0]; + this.selectedBoxplot = metrics[0].boxplot_dicts[0]; + }); } private updateMetricsWithPlots(): void { @@ -55,7 +59,8 @@ export class ResultFilesComponent implements OnInit { ); } - onMetricChange(): void { + onMetricChange(option): void { + console.log(option); this.metricIndx = this.selectedMetrics.ind; // resetting boxplot index this.boxplotIndx = 0; @@ -86,8 +91,8 @@ export class ResultFilesComponent implements OnInit { getPlots(files: any): Observable { let params = new HttpParams(); // handling an empty list added - if (files.length === 0){ - return EMPTY; + if (files.length === 0 || files[0].length === 0){ + return of([]); } files.forEach(file => { diff --git a/UI/src/app/modules/validation-result/components/sorting-form/sorting-form.component.ts b/UI/src/app/modules/validation-result/components/sorting-form/sorting-form.component.ts index 7696b52dd..ddbfec800 100644 --- a/UI/src/app/modules/validation-result/components/sorting-form/sorting-form.component.ts +++ b/UI/src/app/modules/validation-result/components/sorting-form/sorting-form.component.ts @@ -12,8 +12,8 @@ export const SORT_BY_NAME_QUERY_NAME = 'name_tag'; export const SORT_BY_STATUS_DISPLAY_NAME = 'Status'; export const SORT_BY_STATUS_QUERY_NAME = 'progress'; -export const SORT_BY_REFERENCE_DISPLAY_NAME = 'Reference dataset'; -export const SORT_BY_REFERENCE_QUERY_NAME = 'reference_configuration_id__dataset__pretty_name'; +export const SORT_BY_REFERENCE_DISPLAY_NAME = 'Spatial reference dataset'; +export const SORT_BY_REFERENCE_QUERY_NAME = 'spatial_reference_configuration_id__dataset__pretty_name'; export const ORDER_DIRECTION_DESC = 'descending'; export const ORDER_DIRECTION_DESC_PREP = '-'; diff --git a/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.html b/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.html index 702f89426..b87e58478 100644 --- a/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.html +++ b/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.html @@ -19,8 +19,8 @@

Summary statistics

The mean, median and standard deviation are calculated on the validation results aggregated by metric - and dataset. The reference dataset used is {{ (refDataset$ | async)?.pretty_name }} - ({{ (refDatasetVersion$ | async)?.pretty_name }}, {{ (refDatasetVariable$ | async)?.pretty_name }}). + and dataset. The spatial reference dataset used is {{ (refDataset$ | async)?.pretty_name }} + ({{ (refDatasetVersion$ | async)?.short_name }}, {{ (refDatasetVariable$ | async)?.pretty_name }}).
diff --git a/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.spec.ts b/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.spec.ts index 2b1e2d771..a031098f8 100644 --- a/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.spec.ts +++ b/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.spec.ts @@ -64,7 +64,7 @@ describe('SummaryStatisticsComponent', () => { output_file_name: null, progress: 0, publishing_in_progress: false, - reference_configuration: 1, + spatial_reference_configuration: 1, scaling_method: 'mean_std', scaling_ref: 1, start_time: new Date('2021-10-05T12:53:27.156239Z'), diff --git a/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.ts b/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.ts index a6b0f3a09..832c204c3 100644 --- a/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.ts +++ b/UI/src/app/modules/validation-result/components/summary-statistics/summary-statistics.component.ts @@ -45,7 +45,7 @@ export class SummaryStatisticsComponent implements OnInit { } getRefConfig(): void{ - this.refConfig = this.configs.find(config => config.id === this.validationRun.reference_configuration); + this.refConfig = this.configs.find(config => config.id === this.validationRun.spatial_reference_configuration); this.refDataset$ = this.datasetService.getDatasetById(this.refConfig.dataset); this.refDatasetVersion$ = this.datasetVersionService.getVersionById(this.refConfig.version); this.refDatasetVariable$ = this.datasetVariableService.getVariableById(this.refConfig.variable); diff --git a/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.html b/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.html index 26584a32c..279e45d70 100644 --- a/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.html +++ b/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.html @@ -68,23 +68,31 @@

Validation copied on {{ validationRun.start_time | date: dateFormat :timeZone }} {{timeZone}}, original validation run on {{this.originalDate| date: 'mediumDate' :timeZone }}. -
  • Started validation on {{ validationRun.start_time | date: dateFormat :timeZone }} {{timeZone}}, finished +
  • Started validation + on {{ validationRun.start_time | date: dateFormat :timeZone }} {{timeZone}}, finished on {{ validationRun.end_time | date: dateFormat :timeZone }} {{timeZone}}.
  • Compared {{ configurations.length }} datasets: -
      +
      1. - Dataset {{ind + 1}}: - Reference: - - {{config.dataset}} ({{config.version}}, {{config.variable}}) - [Filters: - {{filter}}; - - {{paramFilter}} {{config.parametrisedFiltersValues[indF]}} - ] + + {{config.is_spatial_reference ? 0 : ind + 1}}-{{config.dataset}} ({{config.version}}, {{config.variable}} + {{config.variableUnit ? '[' + config.variableUnit + ']': ''}} + ) + {{validationConfigService.getInformationOnTheReference(config.is_spatial_reference, + config.is_temporal_reference, config.is_scaling_reference)}} +
        + Filters: +
          +
        • {{filter}};
        • +
        +
          +
        • + {{paramFilter}}: {{config.parametrisedFiltersValues[indF].split(',').join(', ')}}; +
        • +
      2. -
    +
  • Spatial filter bounding box: [{{ validationRun.min_lat }}, {{ validationRun.min_lon }} @@ -137,9 +145,9 @@

    deactivated.

  • -
  • Scaling reference: +
  • Scaling reference: - + {{config.dataset}} ({{ config.version }}, {{ config.variable }}) [Filters: @@ -155,10 +163,9 @@

  • - -
  • Scaling reference: {{ scalingMethods[validationRun.scaling_method] }}.
  • -
    -
  • Scaling method: {{ scalingMethods[validationRun.scaling_method] }}.
  • +
  • Scaling + method: {{scalingMethod}}. +
  • Processing took {{ runTime }} minutes (wall time).
  • @@ -166,7 +173,8 @@

  • for {{errorRate * 100 | number: '.0'}}% ({{ validationRun.error_points }} - of {{ validationRun.total_points }}) of the processed locations (grid points) the validation metrics could not be calculated. + of {{ validationRun.total_points }}) of the processed locations (grid points) the validation metrics could + not be calculated. For an overview of potential causes select '# status' (available from version 2.2) in the 'Result files' section.
  • @@ -196,7 +204,8 @@

    {{ validationRun.doi }}.

  • - +

    diff --git a/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.spec.ts b/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.spec.ts index 7ec88c64e..608bb2026 100644 --- a/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.spec.ts +++ b/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.spec.ts @@ -103,7 +103,7 @@ describe('ValidationSummaryComponent', () => { output_file_name: null, progress: 0, publishing_in_progress: false, - reference_configuration: 4699, + spatial_reference_configuration: 4699, scaling_method: 'mean_std', scaling_ref: 4699, start_time: new Date('2021-10-05T12:53:27.156239Z'), diff --git a/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.ts b/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.ts index c1d0e6d96..a5d54f7f2 100644 --- a/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.ts +++ b/UI/src/app/modules/validation-result/components/validation-summary/validation-summary.component.ts @@ -6,13 +6,13 @@ import {DatasetVersionService} from '../../../core/services/dataset/dataset-vers import {DatasetVariableService} from '../../../core/services/dataset/dataset-variable.service'; import {FilterService} from '../../../core/services/filter/filter.service'; import {map} from 'rxjs/operators'; -import {SCALING_CHOICES} from '../../../scaling/components/scaling/scaling.component'; import {GlobalParamsService} from '../../../core/services/global/global-params.service'; import {ValidationrunService} from '../../../core/services/validation-run/validationrun.service'; import {AuthService} from '../../../core/services/auth/auth.service'; import {fas} from '@fortawesome/free-solid-svg-icons'; import {Router} from '@angular/router'; import {ValidationrunDto} from '../../../core/services/validation-run/validationrun.dto'; +import {ValidationRunConfigService} from '../../../../pages/validate/service/validation-run-config.service'; @Component({ @@ -30,7 +30,6 @@ export class ValidationSummaryComponent implements OnInit { configurations$: Observable; dateFormat = 'medium'; timeZone = 'UTC'; - scalingMethods = SCALING_CHOICES; hideElement = true; originalDate: Date; runTime: number; @@ -43,6 +42,7 @@ export class ValidationSummaryComponent implements OnInit { isNearExpiry$: BehaviorSubject = new BehaviorSubject(null); faIcons = {faArchive: fas.faArchive, faPencil: fas.faPen}; + scalingMethod: string; constructor(private datasetService: DatasetService, private datasetVersionService: DatasetVersionService, @@ -51,13 +51,17 @@ export class ValidationSummaryComponent implements OnInit { public globalParamsService: GlobalParamsService, private validationService: ValidationrunService, private authService: AuthService, - private router: Router) { + private router: Router, + public validationConfigService: ValidationRunConfigService) { } ngOnInit(): void { this.setInitialValues(); this.updateConfig(); this.getOriginalDate(); + this.validationConfigService.getScalingMethods().subscribe(methods => { + this.scalingMethod = methods.find(method => method.method === this.validationRun.scaling_method).description; + }); } getCurrentUser(): number { @@ -67,7 +71,7 @@ export class ValidationSummaryComponent implements OnInit { private updateConfig(): void { this.configurations$ = combineLatest( this.validationModel.datasetConfigs, - this.datasetService.getAllDatasets(), + this.datasetService.getAllDatasets(true), this.datasetVersionService.getAllVersions(), this.datasetVariableService.getAllVariables(), this.filterService.getAllFilters(), @@ -90,7 +94,10 @@ export class ValidationSummaryComponent implements OnInit { config.version === dsVersion.id).pretty_name, variable: variables.find(dsVar => - config.variable === dsVar.id).pretty_name, + config.variable === dsVar.id).short_name, + + variableUnit: variables.find(dsVar => + config.variable === dsVar.id).unit, filters: config.filters.map(f => dataFilters.find(dsF => dsF.id === f).description), diff --git a/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.html b/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.html index 9c5594644..6cce83e80 100644 --- a/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.html +++ b/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.html @@ -110,24 +110,29 @@
    Data:
    - + {{config.dataset}}, {{config.version}}, - {{config.variable}}
    -
    -
    -
    - -
    - Reference:
    - - - {{config.dataset}}, - {{config.version}}, - {{config.variable}}
    + {{config.variable}} + {{config.variableUnit ? '[' + config.variableUnit + ']': ''}} + + {{validationConfigService.getInformationOnTheReference(config.is_spatial_reference, + config.is_temporal_reference, config.is_scaling_reference)}} +
    + + + + + + + + + + +
    diff --git a/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.spec.ts b/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.spec.ts index 4be75ab1c..583ad06eb 100644 --- a/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.spec.ts +++ b/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.spec.ts @@ -81,7 +81,7 @@ describe('ValidationrunRowComponent', () => { output_file_name: null, progress: 0, publishing_in_progress: false, - reference_configuration: 4699, + spatial_reference_configuration: 4699, scaling_method: 'mean_std', scaling_ref: 4699, start_time: new Date('2021-10-05T12:53:27.156239Z'), diff --git a/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.ts b/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.ts index e5d7f9563..e7d4039c3 100644 --- a/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.ts +++ b/UI/src/app/modules/validation-result/components/validationrun-row/validationrun-row.component.ts @@ -9,6 +9,7 @@ import {fas} from '@fortawesome/free-solid-svg-icons'; import {ValidationrunService} from '../../../core/services/validation-run/validationrun.service'; import {BehaviorSubject, combineLatest, Observable} from 'rxjs'; import {map} from 'rxjs/operators'; +import {ValidationRunConfigService} from '../../../../pages/validate/service/validation-run-config.service'; @Component({ @@ -37,7 +38,8 @@ export class ValidationrunRowComponent implements OnInit, OnDestroy { private datasetVersionService: DatasetVersionService, private datasetVariableService: DatasetVariableService, public globalParamsService: GlobalParamsService, - private validationService: ValidationrunService) { + private validationService: ValidationrunService, + public validationConfigService: ValidationRunConfigService,) { } ngOnInit(): void { @@ -53,7 +55,7 @@ export class ValidationrunRowComponent implements OnInit, OnDestroy { private updateConfig(): void { this.configurations$ = combineLatest( this.datasetConfigService.getConfigByValidationrun(this.validationRun.id), - this.datasetService.getAllDatasets(), + this.datasetService.getAllDatasets(true), this.datasetVersionService.getAllVersions(), this.datasetVariableService.getAllVariables() ).pipe( @@ -69,7 +71,10 @@ export class ValidationrunRowComponent implements OnInit, OnDestroy { config.version === dsVersion.id).pretty_name, variable: variables.find(dsVar => - config.variable === dsVar.id).pretty_name, + config.variable === dsVar.id).short_name, + + variableUnit: variables.find(dsVar => + config.variable === dsVar.id).unit, }) ) ) diff --git a/UI/src/app/modules/validation-result/services/dataset-configuration.dto.ts b/UI/src/app/modules/validation-result/services/dataset-configuration.dto.ts index d95161cc4..e3f1dcddf 100644 --- a/UI/src/app/modules/validation-result/services/dataset-configuration.dto.ts +++ b/UI/src/app/modules/validation-result/services/dataset-configuration.dto.ts @@ -7,7 +7,10 @@ export class DatasetConfigurationDto { public variable: number, public filters: number[], public parametrised_filters: number[], - public parametrisedfilter_set: number[] + public parametrisedfilter_set: number[], + public is_spatial_reference: boolean, + public is_temporal_reference: boolean, + public is_scaling_reference: boolean ) { } } diff --git a/UI/src/app/pages/dataset-info/dataset-info.component.html b/UI/src/app/pages/dataset-info/dataset-info.component.html index a0fafaf41..c0c1b3960 100644 --- a/UI/src/app/pages/dataset-info/dataset-info.component.html +++ b/UI/src/app/pages/dataset-info/dataset-info.component.html @@ -1,7 +1,5 @@
    -
    - -
    +

    Datasets - Data to be validated and to be used as a reference

    The following datasets can be validated within the QA4SM tool. These datasets are available in both the "data" @@ -9,15 +7,6 @@

    Datasets - Data to be validated and to be

    - -

    Datasets - Reference Data

    -

    - The following datasets can be used only as a reference within the QA4SM tool. These datasets are available in - the - "reference" section of the validate page. -

    -
    - @@ -31,10 +20,10 @@

    Datasets - Reference Data

    - - - + - - - + +
    {{ dataset.pretty_name }} + {{ dataset.pretty_name }} * +
    • {{ version }}
      @@ -51,9 +40,9 @@

      Datasets - Reference Data

    +
    • {{filter}}: {{dataset.filtersHelpText[ind]}} @@ -63,8 +52,7 @@

      Datasets - Reference Data

    - * dataset temporarily excluded from the reference list for technical reasons
    + * dataset temporarily excluded from the reference list for technical reasons

    -
    diff --git a/UI/src/app/pages/dataset-info/dataset-info.component.spec.ts b/UI/src/app/pages/dataset-info/dataset-info.component.spec.ts index b436384fa..08181333a 100644 --- a/UI/src/app/pages/dataset-info/dataset-info.component.spec.ts +++ b/UI/src/app/pages/dataset-info/dataset-info.component.spec.ts @@ -35,11 +35,12 @@ describe('DatasetInfoComponent', () => { detailed_description: '', source_reference: '', citation: '', - is_only_reference: true, + is_spatial_reference: true, versions: [3, 4], variables: [2], filters: [1], - not_as_reference: false + not_as_reference: false, + user: null }, { id: 1, @@ -50,11 +51,12 @@ describe('DatasetInfoComponent', () => { detailed_description: '', source_reference: '', citation: '', - is_only_reference: false, + is_spatial_reference: false, versions: [1, 2], variables: [1], filters: [1, 2], - not_as_reference: false + not_as_reference: false, + user: null }, ]; @@ -107,7 +109,8 @@ describe('DatasetInfoComponent', () => { default_parameter: null, to_include: null, disable_filter: null, - default_set_active: true + default_set_active: true, + readonly: false, }, { id: 2, @@ -119,7 +122,8 @@ describe('DatasetInfoComponent', () => { default_parameter: null, to_include: '4,5', disable_filter: null, - default_set_active: true + default_set_active: true, + readonly: false, }, ]; diff --git a/UI/src/app/pages/help/help.component.html b/UI/src/app/pages/help/help.component.html index 0e75c5640..b07e32015 100644 --- a/UI/src/app/pages/help/help.component.html +++ b/UI/src/app/pages/help/help.component.html @@ -1,4 +1,4 @@ -
    +

    Go to section

    Validate Page
    @@ -7,6 +7,7 @@

    Go to section

    Publishing
    Published Validations Page
    Validation Comparison
    + User Data Upload

    @@ -32,7 +33,8 @@

    Validate Page

    Step 1 - Choose the data you would like to validate - including the dataset name, the version of the dataset, and one of the soil moisture variables provided in the dataset. All the supported datasets are listed on - datasets page here. + datasets page here. Plase note, + that there have to be at least two datasets added, as one of them will be treated as the reference one.

    dataset selection @@ -40,7 +42,7 @@

    Validate Page

    Step 2 [optional] - Choose the criteria by which you would like to filter this dataset. The filters available depend on the data contained within the chosen dataset. For example, - you can filter the C3S data to include only data with no inconsistencies detected (flag = 0). + you can filter the C3S data to include only data with variable in valid geophysical range. Details of the filter options provided for each dataset are given on the supported datasets page. You can also @@ -48,15 +50,15 @@

    Validate Page

    Step 3 [optional] - If you want to intercompare several datasets, you can add more datasets to the validation - using the Add dataset button, up to a maximum of five. Configure the settings for the additional datasets by + using the Add dataset button, up to a maximum of six. Configure the settings for the additional datasets by selecting the respective tab and repeating steps 1 and 2 above.

    - Intercomparison: The intercomparison mode of QA4SM validates up to five satellite data sets against a common - reference data set. For each reference location (e.g. each ISMN station) it finds the nearest observation series in + Intercomparison: The intercomparison mode of QA4SM validates up to six satellite data sets against a common + spatial reference data set. For each reference location (e.g. each ISMN station) it finds the nearest observation series in all selected satellite products. - All observations series are then scaled (if selected) and temporally matched to the reference series. + All observations series are then scaled (if selected) and temporally matched to the temporal reference series. For validation only the common time stamps (that are available in all satellite products) are used to calculate validation metrics between the reference and each individual satellite product. @@ -68,21 +70,17 @@

    Validate Page

    intercomparison

    - Step 4 - Choose the reference dataset you would like to use for the - validation including the dataset name, the version of the dataset, and the soil moisture variables provided in - the dataset. + Step 4 - Choose which dataset selected for validation should be used as the spatial and temporal reference. + Please note, that if the ISMN dataset belongs to the pool, it is set as the spatial reference automatically and there + is no possibility of changing it.

    reference dataset

    - Step 5 [optional] - Choose the criteria by which you would like to filter the reference data prior to - running the validation. The filters available depend on the data contained within the chosen dataset. For example, - you can filter the ISMN data to include only data points where the soil_moisture_flag is "G" for "good". - You can also hover your mouse pointer over the question mark next to a filter - to get a short explanation.
    + Step 5 [optional] - - If the reference dataset is ISMN you can also choose specific networks and/or measurements in a defined depth range. + If the ISMN belongs to the dataset pool you can also choose specific networks and/or measurements in a defined depth range. To do that you have to open an appropriate window by clicking a 'select...' link next to the filter you want to parameterised.

    @@ -142,8 +140,10 @@

    Validate Page

    Step 8 [optional] - Choose the date range over which the validation should be performed and temporal matching window size. The only accepted date format for the validation period is: YYYY-MM-DD. It is also possible to choose a date from - a calendar, available when clicking the date input field. By default, the date range is determined through temporal matching of the data and reference selected. - If the validation period fields are left empty, the validation will not be started. For the time range covered by the various + a calendar, available when clicking the date input field. By default, the date range is determined through temporal + matching of the data and reference selected. + If the validation period fields are left empty, the validation will not be started. For the time range covered by + the various datasets, see the datasets page.
    @@ -173,12 +173,13 @@

    Validate Page

    - Step 10 - Choose how the data (or reference) will be scaled before metrics calculation. - The data can be scaled to the reference (default) or vice versa. - Note that in an intercomparision validation (with multiple datasets), only scaling to reference - is possible. + Step 10 - Choose how the data will be scaled before metrics calculation. + The data can be scaled to any dataset selected for validation. + The scaling method determines how values of one dataset are mapped onto the value range of the other dataset for better comparability. + + Note that if the chosen method is 'No scaling', there is no scaling reference drop down list available.

    scaling @@ -605,6 +606,66 @@

    Validation Comparison + +

    User Data Upload Procedure

    +

    + To upload your go to 'My datasets' page, by clicking proper button on the navigation bar. +

    +
    + select-file +
    +

    + Click the 'Select file' button, and then choose 'Add file' to add the file you want to upload. Note, that you'll be able + to choose only .netCDF or .zip files, both types need to follow the standard described + here. If the .zip file you are trying to upload contains + other types of files than .netCDF, .csv or .yml, you will not be able to upload the file. Please note, that you have 5GB + space available for your files. In case you need more, please contact our team. +

    +
    + select-file +
    +

    + When you choose proper file, a form for providing metadata will show up. You need to provide dataset and version names. + Additionally you can introduce display names for both dataset and its version, which is going to be used on the list of available datasets. + If the display name is not provided, the basic name is going to be used. +

    +
    + select-file +
    +

    + If you want to change file, you can do it by clicking 'Change file' button. If you want to save provided information, click 'Save' button. + Clicking 'Save' button will close the metadata form, but won't start uploading yet. To start uploading, click the 'Upload file' button. +

    +
    + select-file +
    +

    + Depending on the file size and your upload speed it may take from few seconds up to tens of minutes. You will see a blue spinner and information + about the percentage of uploaded file. Eventually an information will show up that the file has been uploaded, but it still needs to be pre-processed. +

    +
    + select-file +
    +

    + When the file gets uploaded and preprocessed, there will show up a row with basic information on the uploaded file. + You'll be able to change the dataset's name, the version's name and choose a different variable, if the default one + is inproper or we were not able to retrieve the proper name. + By clicking 'Remove dataset' button, you will remove the uploaded file. Note, that removing is possible only if the uploaded data + hasn't been used for any validation. +

    +
    + select-file +
    + +

    If you want to use your dataset in a validation, simply go to the Validate page and expand the dataset dropdown list. Your dataset will be there.

    +
    + select-file +
    +

    + Now you can choose other settings and run a validation. Note that for now it is not possible to publish validation that uses private data. + You can still share your validation with another user, but they won't be able to rerun it, as they don't have access to your dataset. +

    + diff --git a/UI/src/app/pages/help/help.component.ts b/UI/src/app/pages/help/help.component.ts index e761c4ea3..bbafde3cd 100644 --- a/UI/src/app/pages/help/help.component.ts +++ b/UI/src/app/pages/help/help.component.ts @@ -1,7 +1,9 @@ -import {Component, OnInit} from '@angular/core'; +import {AfterViewInit, Component, ElementRef, OnInit, ViewChild} from '@angular/core'; import {GlobalParamsService} from '../../modules/core/services/global/global-params.service'; import {fas} from '@fortawesome/free-solid-svg-icons'; import {SettingsService} from '../../modules/core/services/global/settings.service'; +import {HttpClient} from '@angular/common/http'; +import {ActivatedRoute} from '@angular/router'; const plotsUrlPrefix = '/static/images/help/'; @@ -10,7 +12,7 @@ const plotsUrlPrefix = '/static/images/help/'; templateUrl: './help.component.html', styleUrls: ['./help.component.scss'] }) -export class HelpComponent implements OnInit { +export class HelpComponent implements OnInit, AfterViewInit { // Icons for bullet points faIcons = { faArchive: fas.faArchive, @@ -41,9 +43,30 @@ export class HelpComponent implements OnInit { datsetConfigurationComparison: string; validationSelectionsComparison: string; spatialExtentComparison: string; + chosenFile: string; + selectFile: string; + uploadFileWindow: string; + metadataWindow: string; + uploadingSpinner: string; + dataRow: string; + userDataOnTheList: string; constructor(private globalParamsService: GlobalParamsService, - public settingsService: SettingsService) { + public settingsService: SettingsService, + private http: HttpClient, + private activeRoute: ActivatedRoute) { + } + + @ViewChild('helpPage') container: ElementRef; + + ngAfterViewInit(): void{ + this.activeRoute.params.subscribe(param => { + if (param.pageSec){ + const section = this.container.nativeElement.querySelector(`#${param.pageSec}`); + // section?.scrollTo(); + section?.scrollIntoView(); + } + }); } ngOnInit(): void { @@ -73,6 +96,13 @@ export class HelpComponent implements OnInit { this.datsetConfigurationComparison = plotsUrlPrefix + 'dataset-configuration-for-comparison.png'; this.validationSelectionsComparison = plotsUrlPrefix + 'validation-selection-comparison.png'; this.spatialExtentComparison = plotsUrlPrefix + 'spatial-extent-comparison.png'; + this.metadataWindow = plotsUrlPrefix + 'metadata_window.png'; + this.uploadFileWindow = plotsUrlPrefix + 'upload_file_window.png'; + this.selectFile = plotsUrlPrefix + 'select_file.png'; + this.chosenFile = plotsUrlPrefix + 'chosen_file.png'; + this.uploadingSpinner = plotsUrlPrefix + 'uploading_spinner.png'; + this.dataRow = plotsUrlPrefix + 'data_row.png'; + this.userDataOnTheList = plotsUrlPrefix + 'user_data_on_the_list.png'; } getAdminMail(): string { diff --git a/UI/src/app/pages/home/home.component.html b/UI/src/app/pages/home/home.component.html index 3e037a445..d73df2d7c 100644 --- a/UI/src/app/pages/home/home.component.html +++ b/UI/src/app/pages/home/home.component.html @@ -109,6 +109,7 @@

    Share with us your opin

    + If you have any questions or you would like to share your opinion with us, please email us at support (at) qa4sm.eu.
    diff --git a/UI/src/app/pages/home/home.component.ts b/UI/src/app/pages/home/home.component.ts index d1f5db408..04c66bcaf 100644 --- a/UI/src/app/pages/home/home.component.ts +++ b/UI/src/app/pages/home/home.component.ts @@ -82,7 +82,6 @@ export class HomeComponent implements OnInit { this.authService.authenticated.subscribe(authenticated => this.userLoggedIn = authenticated); this.settings$ = this.settingsService.getAllSettings(); this.settings$.subscribe(data => { - console.log(data); } ); } diff --git a/UI/src/app/pages/login-page/login-page.component.html b/UI/src/app/pages/login-page/login-page.component.html new file mode 100644 index 000000000..f2a377a77 --- /dev/null +++ b/UI/src/app/pages/login-page/login-page.component.html @@ -0,0 +1,4 @@ +
    + + +
    diff --git a/UI/src/app/pages/login-page/login-page.component.scss b/UI/src/app/pages/login-page/login-page.component.scss new file mode 100644 index 000000000..e69de29bb diff --git a/UI/src/app/pages/login-page/login-page.component.spec.ts b/UI/src/app/pages/login-page/login-page.component.spec.ts new file mode 100644 index 000000000..8ee3a24c5 --- /dev/null +++ b/UI/src/app/pages/login-page/login-page.component.spec.ts @@ -0,0 +1,25 @@ +import {ComponentFixture, TestBed} from '@angular/core/testing'; + +import {LoginPageComponent} from './login-page.component'; + +describe('LoginPageComponent', () => { + let component: LoginPageComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ LoginPageComponent ] + }) + .compileComponents(); + }); + + beforeEach(() => { + fixture = TestBed.createComponent(LoginPageComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/UI/src/app/pages/login-page/login-page.component.ts b/UI/src/app/pages/login-page/login-page.component.ts new file mode 100644 index 000000000..726d952dc --- /dev/null +++ b/UI/src/app/pages/login-page/login-page.component.ts @@ -0,0 +1,15 @@ +import {Component, OnInit} from '@angular/core'; + +@Component({ + selector: 'qa-login-page', + templateUrl: './login-page.component.html', + styleUrls: ['./login-page.component.scss'] +}) +export class LoginPageComponent implements OnInit { + + constructor() { } + + ngOnInit(): void { + } + +} diff --git a/UI/src/app/pages/login/login.component.html b/UI/src/app/pages/login/login.component.html deleted file mode 100644 index 7aafbcf3b..000000000 --- a/UI/src/app/pages/login/login.component.html +++ /dev/null @@ -1,36 +0,0 @@ -
    -
    -
    - -
    -
    -

    Please sign in

    - -
    - - - - -
    - - -
    - - - - -
    - - - - -
    -
    -
    -
    - -
    diff --git a/UI/src/app/pages/my-datasets/my-datasets.component.html b/UI/src/app/pages/my-datasets/my-datasets.component.html new file mode 100644 index 000000000..611fbb75b --- /dev/null +++ b/UI/src/app/pages/my-datasets/my-datasets.component.html @@ -0,0 +1,82 @@ +
    + +
    +
    + You have no space for uploading data assigned. Please contact our team (support(at)qa4sm.eu) to solve this + problem. +
    +
    + +
    +
    + +

    Upload your data

    + +
    +
    + {{getLimitMessage()}} +
    + +
    + +
    +
    +
    + +

    Uploaded data sets

    + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    + Above you can find a list of datasets you have uploaded: ... + + Read more + +
    +
    +
      +
    • + The soil moisture variable names were retrieved from the file.
      + If they are improper or you want to use another one, click on the name and + choose a different one.
      + Please note, if the chosen name is incorrect, data validation will not be possible. +
    • +
    • + Dataset name and version can be changed by clicking on it.
      + The new name can consist of max 30 characters. Letters, digits, space and @/./+/-/_ only." +
    • +
    • + Only datasets for which there were no validation run can be removed. +
    • +
    + + Read less +
    + +
    + +
    + +
    +
    +

    + You have not uploaded any data yet. Click the 'Upload file' button to add your own data. +

    +
    +
    +
    +
    + + +
    + + diff --git a/UI/src/app/pages/my-datasets/my-datasets.component.scss b/UI/src/app/pages/my-datasets/my-datasets.component.scss new file mode 100644 index 000000000..bec47c685 --- /dev/null +++ b/UI/src/app/pages/my-datasets/my-datasets.component.scss @@ -0,0 +1,9 @@ +@import "src/styles"; +.readMoreOrLess { + //font-size: 1rem; + cursor: pointer; + color: #007BFF; +} +li{ + margin-top: 1.1rem; +} diff --git a/UI/src/app/pages/my-datasets/my-datasets.component.spec.ts b/UI/src/app/pages/my-datasets/my-datasets.component.spec.ts new file mode 100644 index 000000000..f20d42a89 --- /dev/null +++ b/UI/src/app/pages/my-datasets/my-datasets.component.spec.ts @@ -0,0 +1,25 @@ +import {ComponentFixture, TestBed} from '@angular/core/testing'; + +import {MyDatasetsComponent} from './my-datasets.component'; + +describe('MyDatasetsComponent', () => { + let component: MyDatasetsComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ MyDatasetsComponent ] + }) + .compileComponents(); + }); + + beforeEach(() => { + fixture = TestBed.createComponent(MyDatasetsComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/UI/src/app/pages/my-datasets/my-datasets.component.ts b/UI/src/app/pages/my-datasets/my-datasets.component.ts new file mode 100644 index 000000000..6f6b3b1d9 --- /dev/null +++ b/UI/src/app/pages/my-datasets/my-datasets.component.ts @@ -0,0 +1,46 @@ +import {Component, OnInit} from '@angular/core'; +import {UserDatasetsService} from '../../modules/user-datasets/services/user-datasets.service'; +import {Observable} from 'rxjs'; +import {UserDataFileDto} from '../../modules/user-datasets/services/user-data-file.dto'; +import {AuthService} from '../../modules/core/services/auth/auth.service'; + +@Component({ + selector: 'qa-my-datasets', + templateUrl: './my-datasets.component.html', + styleUrls: ['./my-datasets.component.scss'] +}) +export class MyDatasetsComponent implements OnInit { + // userDatasets = []; + constructor(private userDatasetService: UserDatasetsService, + public authService: AuthService) { } + userDatasets$: Observable; + readMore = false; + hasNoSpaceLimit: boolean; + hasNoSpaceAssigned: boolean; + + ngOnInit(): void { + this.userDatasets$ = this.userDatasetService.getUserDataList(); + this.userDatasetService.doRefresh.subscribe(value => { + if (value){ + this.userDatasets$ = this.userDatasetService.getUserDataList(); + } + }); + this.hasNoSpaceLimit = !this.authService.currentUser.space_limit_value; + this.hasNoSpaceAssigned = this.authService.currentUser.space_limit_value === 1; + } + + toggleReadMore(): void{ + this.readMore = !this.readMore; + } + + getLimitMessage(): string{ + let message; + this.hasNoSpaceLimit ? + message = 'You have no space limit for your data.' : + message = `You can use up to ${this.authService.currentUser.space_limit_value / Math.pow(10, 9)} + GB space for your data. You still have ${this.userDatasetService.getTheSizeInProperUnits(this.authService.currentUser.space_left)} available.`; + + return message; + } + +} diff --git a/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.html b/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.html new file mode 100644 index 000000000..9aa8cfbaa --- /dev/null +++ b/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.html @@ -0,0 +1,405 @@ +
    +

    Go to section

    +

    + Option 1: NetCDF4 Image Stacks
    + Option 2: CSV Time Series Files
    + Example Files
    +

    + +

    User Data Upload

    + +

    + With this new, experimental feature we allow users to ingest (parts of) their own data sets into QA4SM for + comparison + and validation with the state-of-the-art products available in the service. Please note that +

    +
      +
    1. + Data uploaded into the service is not accessible or downloadable by other users. Your data sets in the graphical + user + interface will only appear under your account. Other users cannot see, use or download them! +
    2. +
    3. + There might be data wipes in the future, making it necessary to re-upload your files +
        +
      1. + We do not yet provide traceability for validation results with uploaded data. +
      2. +
      3. + It is therefore not yet possible to publish results from validation runs that use uploaded data. +
      4. +
      +
    4. +
    5. + Data upload speed depends on your local internet connection. The correct structure of your data can only be + evaluated after uploading. + We therefore encourage you to use small data samples for testing. +
    6. +
    7. + The amount of data users can upload will be limited at some point. Please get in contact with us if you plan to + upload large data sets (multiple GB) at + support@qa4sm.eu. Otherwise we will delete your data again at some point. +
    8. +
    9. + The upload feature is in an early stage, if you find any bugs or need help with structuring your files correctly, + contact us at support@qa4sm.eu. +
    10. + +
    + + +

    Option 1: Upload NetCDF4 Image Stacks

    +

    + Most Level 3 and Level 4 soil moisture data is usually provided in image format with three data dimensions + (latitude, longitude, time). Images can be concatenated along the time dimension to create image stacks / data + cubes. + From these data cubes, time series can be extracted, which is the basis for our validation algorithm. +

    +

    General Requirements

    +

    + QA4SM supports image stacks in NetCDF4 format only. NetCDF is the most common format for climate data records and + detailed CF conventions to create those files exist + (available here). + In general we use the python library xarray to open netCDF files. If your data is + compatible with xarray, there are only a few more things to consider. +

    +
      +
    • + No specific file name is required when uploading a netcdf stack. The file name will be changed upon uploading + data. It is however required that filenames have the extension .nc or .nc4. +
    • +
    • + While netCDF also supports data types such as strings, those are not usable by QA4SM. It is required that your + soil moisture data and the assigned coordinates (see next chapter) are of type + integer, float or real, or double. +
    • +
    • + All variables, dimensions and attributes in the file should be named using only letters, digits, and underscores. + Required attributes (see next chapter) are case sensitive. + Variable names, long names and standard names should not exceed a length of 30 characters. +
    • +
    • + Only 'coordinate data', i.e. 3-dimensional raster data, are supported as netCDF validation input. + As data variables must be 3-dimensional, 1 coordinate variable for each dimension (latitude, longitude, time) must + be present in the file (see this section). +
    • +
    • + Latitude and Longitude coordinates assigned to soil moisture observations must always refer to the WGS84 reference + system (i.e. Longitudes from -180 to 180 °E, Latitudes between -90 and 90 °N. +
    • +
    • + A file must contain at least one soil moisture variable to use for validation. If a file contains multiple + variables, one must be selected after uploading the file in the respective dropdown menu. +
    • +
    + + + +

    Required dimensions and dimension attributes

    +

    Latitude and Longitude

    +

    + Two of the three dimensions in a netCDF image stack refer to the location of a grid cell and assign latitude and + longitude coordinates. There are two ways to pass coordinates for a grid: +

    +
      +
    1. + Latitude and longitude can be 1-dimensional coordinate variables, with a dimension of the same name assigned, that + contain the coordinate values directly (148 longitude values in the following example). + The coordinates along the two axes then span a 2-dimension grid that contains the information on the location of + all points. The longitude variable is structured accordingly. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      Variablelat
      Attributes (dimensions) + lat (lat: 148) +
      standard_name:latitude
      units:degrees_north
      valid_range:[-90. 90]
      Coordinates [dtype]lat (lat) [float64]
      Data / Values[69.88, 69.62, …, 33.38, 33.1]
      +
    2. +
    3. + Alternatively latitude and longitude can also be 2-dimensionsional coordinate variables, which both have + two dimensions (in x and y direction; different name as the variables) + assigned that contain the data point locations directly (16*35 data points in the following example). The + longitude variable is structured accordingly. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      Variable lat
      Attributes (dimensions) + lat (y: 16, x: 35) +
      standard_name:latitude
      units:degrees_north
      valid_range:[-90. 90]
      Coordinates [dtypes]lat (y, x) [float64]
      lon (y, x) [float64]
      Data / Values [5.125, 5.375, …, 13.375, 13.625]
      + [5.125, 5.375, …, 13.375, 13.625]
      +
      + [5.125, 5.375, …, 13.375, 13.625]
      + [5.125, 5.375, …, 13.375, 13.625] +
      +
    4. +
    +

    + Latitude and Longitude coordinate variables must have the unit assigned! +

    +
      +
    • + Longitude: 'units: degrees_east' or one of 'units: degree_east', 'units: degree_E', 'units: degrees_E', + 'units: degreeE', and 'units: degreesE'. + If no unit is assigned or multiple coordinate variables of that unit are found, the standard_name attribute + with value 'longitude' also identifies longitude coordinates. + If no standard name is provided, the axis attribute with value 'X', otherwise the variable names (case + insensitive) 'longitude' and 'lon' should also work. +
    • +
    • + Latitude: 'units: degrees_north' or one of 'units: degree_north', 'units: degree_N', 'units: degrees_N', + 'units: degreeN', and 'units: degreesN'. + If no unit is assigned or multiple coordinate variables of that unit are found, the standard_name attribute + with value 'latitude' also identifies latitude coordinates. + If no standard name is provided, the axis attribute with value 'Y', otherwise the variable names (case + insensitive) 'latitude' and 'lat' should also work. +
    • +
    + +

    Observation time stamps

    +

    + The third dimension in a file refers to the observation time stamp. It is assigned to a 1-dimensional coordinate + variable time (10 time stamps in the following example) +

    +

    + Time coordinates are stored as numeric timestamps. The variable must therefore have a unit attribute assigned like + <UNIT> since <REF DATE>, e.g. 'Days since 2000-01-01T00:00:00+00:00' + (see also the CF conventions). + If multiple potential time variables are found, the variable with attribute 'axis: T', the 'standard_name: time' or + the variable name 'time' identify the time coordinate variable. + A calendar name should be specified in the attributes but is not required (assumes 'calendar: standard', i.e. mixed + Gregorian/Julian calendar). +

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Variabletime
    Attributes (dimensions) + time (time=10) +
    standard_name:time
    units:days since 2019-01-01 00:00:00
    calendar:proleptic_gregorian
    Coordinates [dtype]time (time) [datetime64[ns]]
    Data / Values['2019-01-01T00:00:00.0', …, '2019-01-10T00:00:00.0']
    +

    + Often, time stamps cannot be applied globally. In this case the values provided in 'time' is the basis to which a + time offset is added. The time offset must be given as a separate, 3-dimensional variable (same size as the soil + moisture data) in **Seconds** (from the global time stamp of the same image). +

    + + +

    Optional variable attributes

    + +
      +
    • + Compression and packing is handled by xarray and controlled by (optional) variable encoding attributes + 'add_offset', 'scale_factor', 'zlib' and 'complevel'. For more details on compressing netCDF files see + this chapter in + the CF conventions. +
    • +
    • + The (physical) valid range of a variable (for SM usually 0-100 or 0-1) should be identified by the variable + attributes 'valid_range' resp. 'valid_min' and 'valid_max'. + Values outside if this range will be treated as invalid. +
    • +
    • + While it is technically not necessary, it is highly recommended that all variables in a file have units assigned + (as the variable attribute 'units'). QA4SM may use these units in e.g. plots. +
    • +
    • + Data variables in the file should have a 'long_name' and/or 'standard_name' attribute. The name given under + 'long_name' will be used in plots. If no 'long_name' is given, but a 'standard_name' + (see this + table for all possible standard names), + this will be used. If neither 'long_name' nor 'standard_name' attributes exist, the variable name will be used + directly. +
    • +
    + + + +

    Option 2: Upload Time Series

    + +

    + As a more flexible way of providing data (which requires some pre-processing on the user side), we also allow + uploading a collection of individual time series stored in .csv format. + This is especially useful for datasets with non-orthogonal time steps (i.e. different time stamps at different + locations) and to reduce the amount of uploaded data + (as e.g. only time series at in situ locations can be extracted beforehand). This data can be uploaded to QA4SM as a + zip achive of individual time series stored in a separate CSV file for each location. + The location metadata (latitude, longitude, unique grid point index) has to be included in the individual file + names. Dataset wide metadata (units, variable names) are provided in a yaml file in the + same zip archive. +

    + +

    File name conventions

    +

    + The file name of an individual CSV file in the zip archive must match the following pattern: +

    + +
    <DATASET_NAME>_gpi=<GPI>_lat=<LAT>_lon=<LON>.csv
    +

    + with: +

    + + + + + + + + + + + + + + + + + +
    <DATASET_NAME>The name of the dataset (same for all files)
    <GPI>a unique grid point index for the location / file
    <LAT>latitude in °N (between -90 and 90 °N) as a floating point number
    <LON>longitude in °E (between -180 and 180 °E) as a floating point number
    +

    + E.g. the filename for a data set called 'myDataset' for a single point (ID: 000001) at the location at Latitude + 38.887 °N, Longitude 101.043 °W would be: +

    +
    myDataset_gpi=000001_lat=38.887_lon=-101.043.csv
    + +

    File format

    +

    + All CSV files must follow the same format, which consists of +

    +
      +
    • + A header row containing column names (must be the same for all files). +
    • +
    • + A date index column in a format that dateutil can parse (see here for more info). +
    • +
    • + A variable number of data columns (same number of columns for all files). +
    • +
    +

    + Below is an example for the contents of a single csv time series file containing values for one soil moisture variable: +

    +
    + Time,SM
    + 01-Jun-2016 01:30:07,0.18278
    + 02-Jun-2016 00:51:39,0.2003
    + 02-Jun-2016 12:08:07,0.181
    + 04-Jun-2016 01:13:32,0.22323
    +  ⋮
    +
    + +

    + To test whether a file complies with the required format, you can try to read it in python with pandas. + <FNAME> refers to the location of the CSV file in the following example: +

    +
    pandas.read_csv(<FNAME>, index_col=0, parse_dates=True)
    + +

    Metadata

    +

    + Metadata for the individual data variables (columns) in the CSV files (e.g. units, long name) can be provided in an + additional file with the name + 'metadata.yml' alongside the CSV files (stored in the same zip archive). This should have a similar format to this + example for a 'metadata.yml' file: +

    +
    + SM:
    +    long_name: soil moisture
    +    units: m^3/m^3
    +  ⋮
    + myvarnameX:
    +    long_name: soil moisture x
    +    units: m^3/m^3
    +
    + +

    Upload format

    +

    + All CSV files and optionally the metadata file have to be in the same directory. This directory has to be uploaded + as zip file. +

    + + +

    Example Files

    + Example files that fulfill the above described requirements to various degrees can be downloaded from + this GitHub package. + + +
    + diff --git a/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.scss b/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.scss new file mode 100644 index 000000000..90c43ee00 --- /dev/null +++ b/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.scss @@ -0,0 +1,35 @@ +@import "src/styles"; + +main{ + max-width: 50rem; +} + +div { + text-align: center; +} +.yml { + padding: 0.3rem; + text-align: left; + width: 15rem; + margin-left: 5rem; + margin-bottom: 2rem; + border-width:3px; border-style:solid; +} + +img { + padding-right: 2rem; + padding-left: 2rem; +} + +table, th, td { + border:1px solid black; + border-collapse: collapse; + padding: 0.3rem; +} + + + +//.w3-round{ +// align-content: center; +//} + diff --git a/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.spec.ts b/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.spec.ts new file mode 100644 index 000000000..3b03b2b8b --- /dev/null +++ b/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.spec.ts @@ -0,0 +1,25 @@ +import {ComponentFixture, TestBed} from '@angular/core/testing'; + +import {UserDataGuidelinesComponent} from './user-data-guidelines.component'; + +describe('UserDataGuidelinesComponent', () => { + let component: UserDataGuidelinesComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ UserDataGuidelinesComponent ] + }) + .compileComponents(); + }); + + beforeEach(() => { + fixture = TestBed.createComponent(UserDataGuidelinesComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.ts b/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.ts new file mode 100644 index 000000000..1d1dde7bc --- /dev/null +++ b/UI/src/app/pages/user-data-guidelines/user-data-guidelines.component.ts @@ -0,0 +1,16 @@ +import {Component, ElementRef, OnInit, ViewChild} from '@angular/core'; + +@Component({ + selector: 'qa-user-data-guidelines', + templateUrl: './user-data-guidelines.component.html', + styleUrls: ['./user-data-guidelines.component.scss'] +}) +export class UserDataGuidelinesComponent implements OnInit { + + constructor() { } + @ViewChild('userDataHelpPage') container: ElementRef; + + ngOnInit(): void { + } + +} diff --git a/UI/src/app/pages/user-profile/user-profile.component.html b/UI/src/app/pages/user-profile/user-profile.component.html index 8fd9df49b..2f3cb5e0e 100644 --- a/UI/src/app/pages/user-profile/user-profile.component.html +++ b/UI/src/app/pages/user-profile/user-profile.component.html @@ -1 +1,2 @@ + diff --git a/UI/src/app/pages/validate/dataset-config-model.ts b/UI/src/app/pages/validate/dataset-config-model.ts index 812273f1b..3f05b3b52 100644 --- a/UI/src/app/pages/validate/dataset-config-model.ts +++ b/UI/src/app/pages/validate/dataset-config-model.ts @@ -1,4 +1,6 @@ -import {DatasetComponentSelectionModel} from '../../modules/dataset/components/dataset/dataset-component-selection-model'; +import { + DatasetComponentSelectionModel +} from '../../modules/dataset/components/dataset/dataset-component-selection-model'; import {FilterModel} from '../../modules/filter/components/basic-filter/filter-model'; import {ParametrisedFilterConfig, ValidationRunDatasetConfigDto} from './service/validation-run-config-dto'; import {BehaviorSubject} from 'rxjs'; @@ -6,14 +8,20 @@ import {BehaviorSubject} from 'rxjs'; export const ISMN_NETWORK_FILTER_ID = 18; export const ISMN_DEPTH_FILTER_ID = 24; export const SMOS_RFI_FILTER_ID = 34; +export const SMOS_CHI2_FILTER_ID = 35; export class DatasetConfigModel { constructor(public datasetModel: DatasetComponentSelectionModel, public basicFilters: FilterModel[], public smosRfiFilter$: BehaviorSubject, + public smosChi2Filter$: BehaviorSubject, public ismnNetworkFilter$: BehaviorSubject, public ismnDepthFilter$: BehaviorSubject, + public spatialReference$: BehaviorSubject, + public temporalReference$: BehaviorSubject, + public scalingReference$: BehaviorSubject, + public highlighted$?: BehaviorSubject ) { } @@ -41,12 +49,19 @@ export class DatasetConfigModel { parameterisedFilters.push({id: SMOS_RFI_FILTER_ID, parameters: this.smosRfiFilter$.value.parameters$.value}); } + if (this.smosChi2Filter$.value != null) { + parameterisedFilters.push({id: SMOS_CHI2_FILTER_ID, parameters: this.smosChi2Filter$.value.parameters$.value}); + } + return { dataset_id: this.datasetModel.selectedDataset.id, variable_id: this.datasetModel.selectedVariable.id, version_id: this.datasetModel.selectedVersion.id, basic_filters: enabledBasicFilters, - parametrised_filters: parameterisedFilters + parametrised_filters: parameterisedFilters, + is_spatial_reference: this.spatialReference$.value, + is_temporal_reference: this.temporalReference$.value, + is_scaling_reference: this.scalingReference$.value }; } } diff --git a/UI/src/app/pages/validate/service/validation-run-config-dto.ts b/UI/src/app/pages/validate/service/validation-run-config-dto.ts index 3dabdb5ac..d68d06f98 100644 --- a/UI/src/app/pages/validate/service/validation-run-config-dto.ts +++ b/UI/src/app/pages/validate/service/validation-run-config-dto.ts @@ -1,6 +1,8 @@ export interface ValidationRunConfigDto { dataset_configs: ValidationRunDatasetConfigDto[]; - reference_config: ValidationRunDatasetConfigDto; + spatial_reference_config?: ValidationRunDatasetConfigDto; + temporal_reference_config?: ValidationRunDatasetConfigDto; + scaling_reference_config?: ValidationRunDatasetConfigDto; interval_from?: Date; interval_to?: Date; min_lat?: number; @@ -24,6 +26,9 @@ export interface ValidationRunDatasetConfigDto { variable_id: number; basic_filters: number[]; parametrised_filters: ParametrisedFilterConfig[]; + is_spatial_reference: boolean; + is_temporal_reference: boolean; + is_scaling_reference: boolean; } diff --git a/UI/src/app/pages/validate/service/validation-run-config.service.ts b/UI/src/app/pages/validate/service/validation-run-config.service.ts index 8f29bbc5b..cb71b5dfc 100644 --- a/UI/src/app/pages/validate/service/validation-run-config.service.ts +++ b/UI/src/app/pages/validate/service/validation-run-config.service.ts @@ -3,10 +3,13 @@ import {environment} from '../../../../environments/environment'; import {HttpClient, HttpParams} from '@angular/common/http'; import {ValidationRunConfigDto} from './validation-run-config-dto'; import {ValidationrunDto} from '../../../modules/core/services/validation-run/validationrun.dto'; -import {Observable} from 'rxjs'; +import {BehaviorSubject, Observable} from 'rxjs'; +import {ScalingMethodDto} from '../../../modules/scaling/components/scaling/scaling-methods.dto'; +import {DatasetConfigModel} from '../dataset-config-model'; const runValidationUrl: string = environment.API_URL + 'api/validation-configuration'; const getValidationConfigUrl: string = environment.API_URL + 'api/validation-configuration'; +const getScalingMethodsUrl: string = environment.API_URL + 'api/scaling-methods'; /** * This service -together with its DTOs- responsible for submitting new validations @@ -16,6 +19,9 @@ const getValidationConfigUrl: string = environment.API_URL + 'api/validation-con }) export class ValidationRunConfigService { + public listOfSelectedConfigs: BehaviorSubject + = new BehaviorSubject([]); + constructor(private httpClient: HttpClient) { } @@ -28,4 +34,27 @@ export class ValidationRunConfigService { public getValidationConfig(validationRunId: string): Observable { return this.httpClient.get(getValidationConfigUrl + '/' + validationRunId); } + + public getScalingMethods(): Observable { + return this.httpClient.get(getScalingMethodsUrl); + } + + + public getInformationOnTheReference(isSpatialReference, isTemporalReference, isScalingReference): string { + const listOfReference = []; + if (isSpatialReference) { + listOfReference.push('spatial'); + } + if (isTemporalReference) { + listOfReference.push('temporal'); + } + if (isScalingReference) { + listOfReference.push('scaling'); + } + + let information: string; + listOfReference.length !== 0 ? information = ` (${listOfReference.join(', ')} reference)` : information = ''; + + return information; + } } diff --git a/UI/src/app/pages/validate/validate.component.html b/UI/src/app/pages/validate/validate.component.html index 7dd5391f1..d3b980914 100644 --- a/UI/src/app/pages/validate/validate.component.html +++ b/UI/src/app/pages/validate/validate.component.html @@ -1,4 +1,16 @@
    +
    +
    + You are not logged in. + You can check validation options, but cannot start a validation. + Click here or on the Validate button + to log in or use + this form + to register. +
    + +
    + [header]="(item.datasetModel.selectedDataset?.pretty_name)+' / ' + + ''+(item.datasetModel.selectedVersion?.pretty_name)+' / '+(item.datasetModel.selectedVariable?.short_name) + + (validationConfigService.getInformationOnTheReference((item.spatialReference$|async), (item.temporalReference$|async), (item.scalingReference$|async)))" + [selected]="datasetIdx == validationModel.datasetConfigurations.length - 1" + [class]="{'highlighted': (item.highlighted$|async), + 'is-reference': ((item.spatialReference$|async) || (item.temporalReference$|async) || (item.scalingReference$|async))}"> + [datasetModel]="validationModel.datasetConfigurations[datasetIdx].datasetModel" + [filterModel$]="validationModel.datasetConfigurations[datasetIdx].smosRfiFilter$" + [minThreshold]="0." [increment]="0.05" maxThreshold="1." [units]="' [-]'"> + + + +
    + +
    +
    + + +
    - +
    + -
    - - - - - - - - > - - -
    - -
    -
    - - -
    + +
    + + +
    + +
    + +
    + + + + + + + + + + +
    @@ -139,7 +183,6 @@
    -