diff --git a/docs/api_reference/adapters/hdf5.md b/docs/api_reference/adapters/hdf5.md new file mode 100644 index 000000000..677c355ef --- /dev/null +++ b/docs/api_reference/adapters/hdf5.md @@ -0,0 +1,3 @@ +# hdf5 + +::: optimade.adapters.hdf5 diff --git a/optimade/adapters/hdf5.py b/optimade/adapters/hdf5.py new file mode 100644 index 000000000..91c8a9d2f --- /dev/null +++ b/optimade/adapters/hdf5.py @@ -0,0 +1,268 @@ +from io import BytesIO +from typing import Union, Any +from pydantic import AnyUrl +from datetime import datetime, timezone +from optimade.models import EntryResponseMany, EntryResponseOne +import h5py +from sys import getsizeof +import numpy as np + + +"""This adaptor can be used to generate a hdf5 response instead of a json response and to convert the hdf5 response back into an python dictionary. +It can handle numeric data in a binary format compatible with numpy. +It is therefore more efficient than the JSON format at returning large amounts of numeric data. +It however also has more overhead resulting in a larger response for entries with little numeric data. +To enable support for your server the parameter "enabled_response_formats" can be specified in the config file. +It is a list of the supported response_formats. To support the hdf5 return format it should be set to: ["json", "hdf5"] +(support for the JSON format is mandatory) + +Unfortunately, h5py does not support storing objects with the numpy.object type. +It is therefore not possible to directly store a list of dictionaries in a hdf5 file with h5py. +As a workaround, the index of a value in a list is used as a dictionary key so a list can be stored as a dictionary if neccesary. + +It also assumes that all the elements of a list, tuple or numpy array are of the same type. +""" + + +def generate_hdf5_file_content( + response_object: Union[EntryResponseMany, EntryResponseOne, dict, list, tuple] +) -> bytes: + """This function generates the content of a hdf5 file from an EntryResponse object. + It should also be able to handle python dictionaries lists and tuples. + + Parameters: + response_object: an OPTIMADE response object. This can be of any OPTIMADE entry type, such as structure, reference etc. + + Returns: + A binary object containing the contents of the hdf5 file. + """ + + temp_file = BytesIO() + hdf5_file = h5py.File(temp_file, "w") + if isinstance(response_object, (EntryResponseMany, EntryResponseOne)): + response_object = response_object.dict(exclude_unset=True) + store_hdf5_dict(hdf5_file, response_object) + hdf5_file.close() + file_content = temp_file.getvalue() + temp_file.close() + return file_content + + +def store_hdf5_dict( + hdf5_file: h5py._hl.files.File, iterable: Union[dict, list, tuple], group: str = "/" +): + """This function stores a python list, dictionary or tuple in a hdf5 file. + the currently supported datatypes are str, int, float, list, dict, tuple, bool, AnyUrl, + None ,datetime or any numpy type or numpy array. + + Unfortunately, h5py does not support storing objects with the numpy.object type. + It is therefore not possible to directly store a list of dictionaries in a hdf5 file with h5py. + As a workaround, the index of a value in a list is used as a dictionary key so a list can be stored as a dictionary if neccesary. + + Parameters: + hdf5_file: An hdf5 file like object. + iterable: The object to be stored in the hdf5 file. + group: This indicates to group in the hdf5 file the list, tuple or dictionary should be added. + + Raises: + TypeError: If this function encounters an object with a type that it cannot convert to the hdf5 format + a ValueError is raised. + """ + if isinstance(iterable, (list, tuple)): + iterable = enumerate(iterable) + elif isinstance(iterable, dict): + iterable = iterable.items() + for x in iterable: + key = str(x[0]) + value = x[1] + if isinstance( + value, (list, tuple) + ): # For now, I assume that all values in the list have the same type. + if len(value) < 1: # case empty list + store_value_in_hdf5(key, value, group, hdf5_file) + continue + val_type = type(value[0]) + if isinstance(value[0], dict): + hdf5_file.create_group(group + "/" + key) + store_hdf5_dict(hdf5_file, value, group + "/" + key) + elif val_type.__module__ == np.__name__: + try: + store_value_in_hdf5(key, value, group, hdf5_file) + except TypeError as hdf5_error: + raise TypeError( + "Unfortunatly more complex numpy types like object can not yet be stored in hdf5. Error from hdf5:" + + hdf5_error + ) + elif isinstance(value[0], (int, float)): + store_value_in_hdf5(key, np.asarray(value), group, hdf5_file) + elif isinstance(value[0], str): + # Here I can pass a list of strings to hdf5 which is stored as a numpy object. + store_value_in_hdf5(key, value, group, hdf5_file) + elif isinstance(value[0], (list, tuple)): + list_type = get_recursive_type(value[0]) + if list_type in (int, float): + store_value_in_hdf5(key, np.asarray(value), group, hdf5_file) + else: + hdf5_file.create_group(group + "/" + key) + store_hdf5_dict(hdf5_file, value, group + "/" + key) + else: + hdf5_file.create_group(group + "/" + key) + store_hdf5_dict(hdf5_file, value, group + "/" + key) + + elif isinstance(value, dict): + hdf5_file.create_group(group + "/" + key) + store_hdf5_dict(hdf5_file, value, group + "/" + key) + elif isinstance(value, bool): + store_value_in_hdf5(key, np.bool_(value), group, hdf5_file) + elif isinstance(value, AnyUrl): + # This case had to be placed above the str case as AnyUrl inherits from the string class, but cannot be handled directly by h5py. + store_value_in_hdf5(key, str(value), group, hdf5_file) + elif isinstance(value, (int, float, str)): + store_value_in_hdf5(key, value, group, hdf5_file) + + elif type(value).__module__ == np.__name__: + try: + store_value_in_hdf5(key, value, group, hdf5_file) + except TypeError as hdf5_error: + raise TypeError( + f"Unfortunatly more complex numpy types like object can not yet be stored in hdf5. Error from hdf5:{hdf5_error}" + ) + elif isinstance(value, datetime): + store_value_in_hdf5( + key, + value.astimezone(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"), + group, + hdf5_file, + ) + elif value is None: + store_value_in_hdf5(key, h5py.Empty("f"), group, hdf5_file) + else: + raise ValueError( + f"Unable to store a value of type: {type(value)} in hdf5 format." + ) + + +def store_value_in_hdf5(key, value, group, hdf5_file): + compression_level = 1 + if ( + getsizeof(value) < 4096 + ): # small properties can be sored as attributes the value of 4096 is rather arbitrary. The total of all the properties should however not exceed 64 kb. + if ( + group + ): # if a group is already present we can store small properties as attributes. (It seems that for each group /dataset a 64kb header is made causing the files to become very large.) + hdf5_file[group].attrs[key] = value + else: + hdf5_file[group + "/" + key] = value + else: + hdf5_file.create_dataset( + group + "/" + key, + data=value, + compression="gzip", + compression_opts=compression_level, + ) + + +def get_recursive_type(obj: Any) -> type: + """If obj is a list or tuple this function returns the type of the first object in the list/tuple that is not a list + or tuple. If the list or tuple is empty it returns None. + Finally if the object is not a list or tuple it returns the type of the object. + + Parameters: + obj: any python object + + Returns: + The type of the objects that the object contains or the type of the object itself when it does not contain other objects.""" + + if isinstance(obj, (list, tuple)): + if len(obj) == 0: + return None + else: + if isinstance(obj[0], (list, tuple)): + return get_recursive_type(obj[0]) + else: + return type(obj[0]) + return type(obj) + + +def generate_response_from_hdf5(hdf5_content: bytes) -> dict: + """Generates a response_dict from a HDF5 file like object. + It is similar to the response_dict generated from the JSON response, except that the numerical data will have numpy + types. + + Parameters: + hdf5_content: the content of a hdf5 file. + + Returns: + A dictionary containing the data of the hdf5 file.""" + + temp_file = BytesIO(hdf5_content) + hdf5_file = h5py.File(temp_file, "r") + response_dict = generate_dict_from_hdf5(hdf5_file) + return response_dict + + +def generate_dict_from_hdf5( + hdf5_file: h5py._hl.files.File, group: str = "/" +) -> Union[dict, list]: + """This function returns the content of a hdf5 group. + Because of the workaround described under the store_hdf5_dict function, groups which have numbers as keys will be turned to lists(No guartee that the order is the same as in th eoriginal list). + Otherwise, the group will be turned into a dict. + + Parameters: + hdf5_file: An HDF5_object containing the data that should be converted to a dictionary or list. + group: The hdf5 group for which the dictionary should be created. The default is "/" which will return all the data in the hdf5_object + + Returns: + A dict or list containing the content of the hdf5 group. + """ + + return_value = None + for key, value in hdf5_file[group].items(): + return_value = inside_generate_dict_from_hdf5( + key, value, return_value, group, hdf5_file + ) + for key, value in hdf5_file[group].attrs.items(): + return_value = inside_generate_dict_from_hdf5( + key, value, return_value, group, hdf5_file + ) + return return_value + + +def inside_generate_dict_from_hdf5(key, value, return_value, group, hdf5_file): + if key.isdigit(): + if return_value is None: + return_value = [] + if isinstance(value, h5py._hl.group.Group): + return_value.append( + generate_dict_from_hdf5(hdf5_file, group=group + key + "/") + ) + elif isinstance(value, h5py._hl.base.Empty): + return_value.append(None) + elif isinstance(value, str): + return_value.append(value) + elif isinstance(value[()], h5py._hl.base.Empty): + return_value.append(None) + elif isinstance(value[()], bytes): + return_value.append(value[()].decode()) + else: + return_value.append(value[()]) + + else: # Case dictionary + if return_value is None: + return_value = {} + if isinstance(value, h5py._hl.group.Group): + return_value[key] = generate_dict_from_hdf5( + hdf5_file, group=group + key + "/" + ) + elif isinstance(value, h5py._hl.base.Empty): + return_value[key] = None + elif isinstance(value, str): + return_value[key] = value + elif isinstance(value[()], h5py._hl.base.Empty): + return_value[key] = None + elif isinstance(value[()], bytes): + return_value[key] = value[()].decode() + else: + return_value[key] = value[()] + + return return_value diff --git a/optimade/models/jsonapi.py b/optimade/models/jsonapi.py index f6724e01b..0dcd48173 100644 --- a/optimade/models/jsonapi.py +++ b/optimade/models/jsonapi.py @@ -8,6 +8,7 @@ parse_obj_as, root_validator, ) +import numpy from optimade.models.utils import StrictField @@ -319,6 +320,13 @@ class Resource(BaseResource): ) +def process_ndarray(arg): + if arg.dtype == object: + return arg.astype(str).tolist() + else: + return arg.tolist() + + class Response(BaseModel): """A top-level response""" @@ -365,4 +373,10 @@ class Config: datetime: lambda v: v.astimezone(timezone.utc).strftime( "%Y-%m-%dT%H:%M:%SZ" ), + numpy.int32: lambda v: int(v), + numpy.float32: lambda v: float(v), + numpy.int64: lambda v: int(v), + numpy.float64: lambda v: float(v), + numpy.bool_: lambda v: bool(v), + numpy.ndarray: process_ndarray, } diff --git a/optimade/server/config.py b/optimade/server/config.py index 4b02fc76f..4e46bc903 100644 --- a/optimade/server/config.py +++ b/optimade/server/config.py @@ -68,6 +68,18 @@ class SupportedBackend(Enum): MONGOMOCK = "mongomock" +class SupportedResponseFormats(Enum): + """Enumeration of supported database backends + + - 'JSON': [JSON](https://www.json.org/json-en.html) + - 'HDF5': [HDF5](https://portal.hdfgroup.org/display/HDF5/HDF5) + + """ + + HDF5 = "hdf5" + JSON = "json" + + def config_file_settings(settings: BaseSettings) -> Dict[str, Any]: """Configuration file settings source. @@ -291,6 +303,10 @@ class ServerConfig(BaseSettings): True, description="If True, the server will check whether the query parameters given in the request are correct.", ) + enabled_response_formats: Optional[List[SupportedResponseFormats]] = Field( + ["json"], + description="""A list of the response formats that are supported by this server. Must include the "json" format.""", + ) @validator("implementation", pre=True) def set_implementation_version(cls, v): @@ -318,6 +334,9 @@ def use_real_mongo_override(cls, values): return values + def get_enabled_response_formats(self): + return [e.value for e in self.enabled_response_formats] + class Config: """ This is a pydantic model Config object that modifies the behaviour of diff --git a/optimade/server/entry_collections/entry_collections.py b/optimade/server/entry_collections/entry_collections.py index b909339d8..2d7d4aa49 100644 --- a/optimade/server/entry_collections/entry_collections.py +++ b/optimade/server/entry_collections/entry_collections.py @@ -301,10 +301,10 @@ def handle_query_params( # response_format if ( getattr(params, "response_format", False) - and params.response_format != "json" + and params.response_format not in CONFIG.get_enabled_response_formats() ): raise BadRequest( - detail=f"Response format {params.response_format} is not supported, please use response_format='json'" + detail=f"Response format {params.response_format} is not supported, please use one of the supported response_formats: {','.join(CONFIG.get_enabled_response_formats())}" ) # page_limit diff --git a/optimade/server/middleware.py b/optimade/server/middleware.py index b25642bd4..ae90b5800 100644 --- a/optimade/server/middleware.py +++ b/optimade/server/middleware.py @@ -445,7 +445,13 @@ async def dispatch(self, request: Request, call_next): if not isinstance(chunk, bytes): chunk = chunk.encode(charset) body += chunk - body = body.decode(charset) + for i in range(len(response.raw_headers)): + if ( + response.raw_headers[i][0] == b"content-type" + and response.raw_headers[i][1] == b"application/vnd.api+json" + ): + body = body.decode(charset) + break if self._warnings: response = json.loads(body) diff --git a/optimade/server/routers/info.py b/optimade/server/routers/info.py index ddd48adfd..255a1cf82 100644 --- a/optimade/server/routers/info.py +++ b/optimade/server/routers/info.py @@ -25,6 +25,9 @@ def get_info(request: Request) -> InfoResponse: from optimade.models import BaseInfoResource, BaseInfoAttributes + entry_types_by_format_dict = {} + for _ in CONFIG.get_enabled_response_formats(): + entry_types_by_format_dict[_] = list(ENTRY_INFO_SCHEMAS) return InfoResponse( meta=meta_values( request.url, 1, 1, more_data_available=False, schema=CONFIG.schema_url @@ -40,9 +43,9 @@ def get_info(request: Request) -> InfoResponse: "version": __api_version__, } ], - formats=["json"], - available_endpoints=["info", "links"] + list(ENTRY_INFO_SCHEMAS.keys()), - entry_types_by_format={"json": list(ENTRY_INFO_SCHEMAS.keys())}, + formats=CONFIG.get_enabled_response_formats(), + available_endpoints=["info", "links"] + list(ENTRY_INFO_SCHEMAS), + entry_types_by_format=entry_types_by_format_dict, is_index=False, ), ), @@ -71,8 +74,9 @@ def get_entry_info(request: Request, entry: str) -> EntryInfoResponse: properties = retrieve_queryable_properties( schema, queryable_properties, entry_type=entry ) - - output_fields_by_format = {"json": list(properties.keys())} + output_fields_by_format = {} + for outputformat in CONFIG.get_enabled_response_formats(): + output_fields_by_format[outputformat] = list(properties) return EntryInfoResponse( meta=meta_values( diff --git a/optimade/server/routers/utils.py b/optimade/server/routers/utils.py index a56d030d4..1e1bbe7c9 100644 --- a/optimade/server/routers/utils.py +++ b/optimade/server/routers/utils.py @@ -4,7 +4,7 @@ from datetime import datetime from typing import Any, Dict, List, Optional, Set, Union -from fastapi import Request +from fastapi import Request, Response from fastapi.responses import JSONResponse from starlette.datastructures import URL as StarletteURL @@ -22,6 +22,7 @@ from optimade.server.exceptions import BadRequest, InternalServerError from optimade.server.query_params import EntryListingQueryParams, SingleEntryQueryParams from optimade.utils import mongo_id_for_database, get_providers, PROVIDER_LIST_URLS +from optimade.adapters.hdf5 import generate_hdf5_file_content __all__ = ( "BASE_URL_PREFIXES", @@ -268,7 +269,7 @@ def get_entries( if fields or include_fields: results = handle_response_fields(results, fields, include_fields) - return response( + response_object = response( links=links, data=results, meta=meta_values( @@ -282,6 +283,21 @@ def get_entries( ), included=included, ) + if params.response_format in CONFIG.get_enabled_response_formats(): + if params.response_format == "json": + return response_object + elif params.response_format == "hdf5": + return Response( + content=generate_hdf5_file_content(response_object), + media_type="application/x-hdf5", + headers={ + "Content-disposition": f"attachment; filename={results[0]['type']}.hdf5" + }, + ) + else: + raise BadRequest( + detail=f"The response_format {params.response_format} is not supported by this server. Use one of the supported formats: {','.join(CONFIG.get_enabled_response_formats())} instead " + ) def get_single_entry( @@ -318,7 +334,7 @@ def get_single_entry( if fields or include_fields and results is not None: results = handle_response_fields(results, fields, include_fields)[0] - return response( + response_object = response( links=links, data=results, meta=meta_values( @@ -332,3 +348,18 @@ def get_single_entry( ), included=included, ) + if params.response_format in CONFIG.get_enabled_response_formats(): + if params.response_format == "json": + return response_object + elif params.response_format == "hdf5": + return Response( + content=generate_hdf5_file_content(response_object), + media_type="application/x-hdf5", + headers={ + "Content-disposition": f"attachment; filename={entry_id}.hdf5" + }, + ) + else: + raise BadRequest( + detail=f"The response_format {params.response_format} is not supported by this server. Use one of the supported formats: {','.join(CONFIG.get_enabled_response_formats())} instead " + ) diff --git a/requirements.txt b/requirements.txt index 7d28e683f..3e824b4aa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,10 @@ elasticsearch-dsl==7.4.0 email_validator==1.2.1 fastapi==0.82.0 +h5py==3.7.0 lark==1.1.2 mongomock==4.1.2 +numpy==1.23.2 pydantic==1.10.2 pymongo==4.2.0 pyyaml==5.4 diff --git a/setup.py b/setup.py index e36bc82c9..5f75fb8e8 100644 --- a/setup.py +++ b/setup.py @@ -19,10 +19,15 @@ # Server minded elastic_deps = ["elasticsearch-dsl~=7.4,<8.0"] mongo_deps = ["pymongo>=3.12.1,<5", "mongomock~=4.1"] -server_deps = [ - "uvicorn~=0.18", - "pyyaml>=5.4,<7", # Keep at pyyaml 5.4 for aiida-core support -] + mongo_deps +hdf5_deps = ["h5py"] +server_deps = ( + [ + "uvicorn~=0.18", + "pyyaml>=5.4,<7", # Keep at pyyaml 5.4 for aiida-core support + ] + + mongo_deps + + hdf5_deps +) # Client minded @@ -35,7 +40,7 @@ "click~=8.1", ] ase_deps = ["ase~=3.22"] -cif_deps = ["numpy~=1.21"] +cif_deps = ["numpy~=1.23"] pdb_deps = cif_deps pymatgen_deps = ["pymatgen~=2022.7"] jarvis_deps = ["jarvis-tools==2022.8.27"] @@ -105,6 +110,7 @@ "pydantic~=1.10,>=1.10.2", "email_validator~=1.2", "requests~=2.28", + "numpy~=1.23", ], extras_require={ "all": all_deps, diff --git a/tests/server/conftest.py b/tests/server/conftest.py index 3f071e1ba..c80eb32ef 100644 --- a/tests/server/conftest.py +++ b/tests/server/conftest.py @@ -1,8 +1,7 @@ +import pytest from typing import Union, Dict from optimade.server.warnings import OptimadeWarning - - -import pytest +from optimade.adapters.hdf5 import generate_response_from_hdf5 @pytest.fixture(scope="session") @@ -84,13 +83,17 @@ def inner( pytest.fail("'server' must be either a string or an OptimadeTestClient.") try: + expected_mime_type = get_expeced_response_format(request) response = used_client.get(request, **kwargs) - response_json = response.json() - assert response.status_code == 200, f"Request failed: {response_json}" - expected_mime_type = "application/vnd.api+json" assert ( response.headers["content-type"] == expected_mime_type ), f"Response should have MIME type {expected_mime_type!r}, not {response.headers['content-type']!r}." + if expected_mime_type == "application/vnd.api+json": + response_dict = response.json() + else: + response_dict = generate_response_from_hdf5(response.content) + assert response.status_code == 200, f"Request failed: {response_dict}" + except json.JSONDecodeError: print( f"Request attempted:\n{used_client.base_url}{used_client.version}" @@ -106,12 +109,31 @@ def inner( raise exc else: if return_json: - return response_json + return response_dict return response return inner +def get_expeced_response_format(request: str) -> str: + """This function tries to extract the MIME type from the request string. + If it is unable to do so it returns the default json MIME type. + + Parameters: + request: The request from which the mime type should be extracted. + """ + + expected_mime_type = "application/vnd.api+json" + response_format_start = request.find("response_format") + if response_format_start > -1 and len(request[response_format_start:]) > 15: + returntype = ( + request[15 + response_format_start :].split("=")[1].split("&")[0].strip() + ) + if returntype == "hdf5": + expected_mime_type = "application/x-hdf5" + return expected_mime_type + + @pytest.fixture def check_response(get_good_response): """Check response matches expectations for a given request. @@ -203,17 +225,20 @@ def inner( pytest.fail("'server' must be either a string or an OptimadeTestClient.") try: + expected_mime_type = get_expeced_response_format(request) response = used_client.get(request) assert response.status_code == expected_status, ( f"Request should have been an error with status code {expected_status}, " f"but instead {response.status_code} was received.\nResponse:\n{response.json()}", ) - expected_mime_type = "application/vnd.api+json" assert ( response.headers["content-type"] == expected_mime_type ), f"Response should have MIME type {expected_mime_type!r}, not {response.headers['content-type']!r}." - response = response.json() + if expected_mime_type == "application/vnd.api+json": + response = response.json() + else: + response = generate_response_from_hdf5(response.content) assert len(response["errors"]) == 1, response.get( "errors", "'errors' not found in response" ) diff --git a/tests/server/query_params/test_response_format.py b/tests/server/query_params/test_response_format.py new file mode 100644 index 000000000..a11cea5d8 --- /dev/null +++ b/tests/server/query_params/test_response_format.py @@ -0,0 +1,87 @@ +import numpy +from pydantic import AnyUrl +from pydantic.tools import parse_obj_as +from datetime import datetime +from optimade.adapters.hdf5 import ( + generate_hdf5_file_content, + generate_response_from_hdf5, +) +from fastapi.encoders import jsonable_encoder +from optimade.models.jsonapi import Response +from optimade.server.config import CONFIG + + +def test_response_format(check_response): + request = ( + '/structures?filter=chemical_formula_descriptive="Ac"&response_format=json' + ) + expected_ids = ["mpf_1"] + check_response(request, expected_ids) + + if "hdf5" in CONFIG.get_enabled_response_formats(): + request = ( + '/structures?filter=chemical_formula_descriptive="Ac"&response_format=hdf5' + ) + check_response(request, expected_ids) + + +if "hdf5" in CONFIG.get_enabled_response_formats(): + + def test_single_entry(check_response): + """For single entry. Default value for `include` is 'references'""" + request = "/structures/mpf_1?response_format=hdf5" + expected_ids = "mpf_1" + check_response(request, expected_ids) + + def test_convert_to_hdf5_and_back(): + test_dict = { + "int": 1, + "float": 5.26, + "string": "str", + "datetime": datetime.now(), + "list": [[[2.3, 6.3], [8.6, 4.5]], [[8.9, 9.4], [5.6, 3.5]]], + "list_of_str": [ + ["string 1", "string 2"], + ["another string"], + "less nested string", + ], + "None_list": [None, None], + "dict": {"a key": "a value", "another key": 7.33}, + "tuple": (95, 63), + "bool": False, + "AnyUrl": parse_obj_as(AnyUrl, "https://example.com"), + "None": None, + "empty": [], + "numpy_int64": numpy.int64(42), + "numpy_float32": numpy.float32(0.88153), + "numpy_bool": numpy.bool_(True), + "numpy_array": numpy.array([(1, 2), (3, 4)]), + "list_of_numpy_int": [numpy.int64(42), numpy.int64(16), numpy.int64(23)], + "list_of_numpy_array": [ + numpy.array([(1, 2), (3, 4)]), + numpy.array([(1.2, 2.3), (3.5, 4.1)]), + numpy.array([(1.8, 2.0), (3, 4)]), + ], + } + + hdf5_file_content = generate_hdf5_file_content(test_dict) + + returned_dict = generate_response_from_hdf5(hdf5_file_content) + reference_dict = jsonable_encoder( + test_dict, custom_encoder=Response.Config.json_encoders + ) + returned_dict = jsonable_encoder( + returned_dict, custom_encoder=Response.Config.json_encoders + ) + assert reference_dict == returned_dict + + +def test_unsupported_response_format(check_error_response): + request = '/structures?filter=chemical_formula_descriptive="Ac"&response_format=png' + error_detail = f"Response format png is not supported, please use one of the supported response_formats: {','.join(CONFIG.get_enabled_response_formats())}" + check_error_response( + request, + expected_status=400, + expected_title="Bad Request", + expected_detail=error_detail, + ) diff --git a/tests/test_config.json b/tests/test_config.json index 84e05066c..a74d6f675 100644 --- a/tests/test_config.json +++ b/tests/test_config.json @@ -34,5 +34,6 @@ "structures": { "chemsys": "nelements" } - } + }, + "enabled_response_formats": ["json", "hdf5"] }