From 9ab85566fe4bfffe4bbd793b6a9904480e4eef05 Mon Sep 17 00:00:00 2001 From: Adrian D'Alessandro Date: Wed, 26 Jul 2023 16:39:55 +0100 Subject: [PATCH 1/7] Move OpalArrayData to opal.py --- datahub/main.py | 9 +-------- datahub/opal.py | 6 ++++++ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/datahub/main.py b/datahub/main.py index a272f6a..c84829d 100644 --- a/datahub/main.py +++ b/datahub/main.py @@ -3,23 +3,16 @@ import h5py # type: ignore from fastapi import FastAPI, HTTPException, UploadFile -from pydantic import BaseModel from . import data as dt from . import log from .dsr import validate_dsr_data -from .opal import OpalModel +from .opal import OpalArrayData, OpalModel from .wesim import get_wesim app = FastAPI() -class OpalArrayData(BaseModel): - """Class for defining required key values for Opal data as an array.""" - - array: list[float] - - @app.post("/opal") def create_opal_data(data: OpalModel | OpalArrayData) -> dict[str, str]: """POST method function for appending data to Opal Dataframe. diff --git a/datahub/opal.py b/datahub/opal.py index 05cc741..f0e519c 100644 --- a/datahub/opal.py +++ b/datahub/opal.py @@ -5,6 +5,12 @@ OPAL_START_DATE = "2035-01-22 00:00" +class OpalArrayData(BaseModel): + """Class for defining required key values for Opal data as an array.""" + + array: list[float] + + class OpalModel(BaseModel): """Define required key values for Opal data.""" From b3a69a94ad8b0b67bc6edb840431bf3b5f53da1b Mon Sep 17 00:00:00 2001 From: Adrian D'Alessandro Date: Fri, 28 Jul 2023 12:39:29 +0100 Subject: [PATCH 2/7] Remove __main__ blocks used for development --- datahub/data.py | 57 ------------------------------------------------ datahub/wesim.py | 7 ------ 2 files changed, 64 deletions(-) diff --git a/datahub/data.py b/datahub/data.py index 690eda7..666ed5a 100644 --- a/datahub/data.py +++ b/datahub/data.py @@ -3,63 +3,6 @@ from .opal import create_opal_frame -opal_data = [ - 1, - 8.58, - 34.9085, - 34.9055, - 16.177, - 7.8868, - 15.1744, - 3.3549, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16192.8871, - 16194.8348, - -0.5713, - -0.8467, - 16.2002, - 9.0618, - 0.2806, - -2.1328, - 0, - 0.7931, - 0.0522, - 0.0522, - 34.8373, - 34.8343, - 0, - 0, - 30.801, - 30.801, - 28, - 5, - 63, - 72, - 0, - 303, - 7230, - 3.774, - 3.774, - 510, - 2, - 34, -] - opal_df = create_opal_frame() dsr_data: list[dict[str, str | list]] = [] # type: ignore[type-arg] wesim_data: dict[str, dict[Hashable, Any]] = {} # type: ignore[misc] - - -if __name__ == "__main__": - opal_df = create_opal_frame() - print("Initial ---") - print(opal_df) - # opal_df = pd.concat([opal_df, append_opal_frame(opal_data)]) - # print("Append ---") - # print(opal_df) diff --git a/datahub/wesim.py b/datahub/wesim.py index ca74714..adf58f3 100644 --- a/datahub/wesim.py +++ b/datahub/wesim.py @@ -137,10 +137,3 @@ def get_wesim() -> dict[str, dict[Hashable, Any]]: # type: ignore[misc] "Interconnector Capacity": interconnector_capacity.to_dict(orient="split"), "Interconnectors": interconnectors.to_dict(orient="split"), } - - -if __name__ == "__main__": - for name, df in get_wesim().items(): - print(name + ":") - print(df) - print("--------") From 0a88c9a05966ade7debe2ad821e64fea77a6fcaf Mon Sep 17 00:00:00 2001 From: Adrian D'Alessandro Date: Fri, 28 Jul 2023 14:14:22 +0100 Subject: [PATCH 3/7] Remove use of Any and Hashable types --- datahub/data.py | 4 +--- datahub/main.py | 13 +++++-------- datahub/wesim.py | 3 +-- 3 files changed, 7 insertions(+), 13 deletions(-) diff --git a/datahub/data.py b/datahub/data.py index 666ed5a..2490eb2 100644 --- a/datahub/data.py +++ b/datahub/data.py @@ -1,8 +1,6 @@ """This module defines the data structures for each of the models.""" -from typing import Any, Hashable - from .opal import create_opal_frame opal_df = create_opal_frame() dsr_data: list[dict[str, str | list]] = [] # type: ignore[type-arg] -wesim_data: dict[str, dict[Hashable, Any]] = {} # type: ignore[misc] +wesim_data: dict[str, dict] = {} # type: ignore[type-arg] diff --git a/datahub/main.py b/datahub/main.py index c84829d..d8d1a4a 100644 --- a/datahub/main.py +++ b/datahub/main.py @@ -1,6 +1,4 @@ """Script for running Datahub API.""" -from typing import Any, Hashable - import h5py # type: ignore from fastapi import FastAPI, HTTPException, UploadFile @@ -47,11 +45,10 @@ def create_opal_data(data: OpalModel | OpalArrayData) -> dict[str, str]: return {"message": "Data submitted successfully."} -# TODO: Fix return typing annotation @app.get("/opal") -def get_opal_data( # type: ignore[misc] +def get_opal_data( start: int = 0, end: int | None = None -) -> dict[Hashable, Any]: +) -> dict[str, dict]: # type: ignore[type-arg] """GET method function for getting Opal Dataframe as JSON. Args: @@ -138,9 +135,9 @@ def upload_dsr(file: UploadFile) -> dict[str, str | None]: @app.get("/dsr") -def get_dsr_data( # type: ignore[misc] +def get_dsr_data( start: int = 0, end: int | None = None -) -> dict[Hashable, Any]: +) -> dict[str, list]: # type: ignore[type-arg] """GET method function for getting DSR data as JSON. Args: @@ -166,7 +163,7 @@ def get_dsr_data( # type: ignore[misc] @app.get("/wesim") -def get_wesim_data() -> dict[Hashable, Any]: # type: ignore[misc] +def get_wesim_data() -> dict[str, dict[str, dict]]: # type: ignore[type-arg] """GET method function for getting Wesim data as JSON. Returns: diff --git a/datahub/wesim.py b/datahub/wesim.py index adf58f3..d5e7515 100644 --- a/datahub/wesim.py +++ b/datahub/wesim.py @@ -1,6 +1,5 @@ """This module defines the data structures for the WESIM model.""" import os -from typing import Any, Hashable import pandas as pd @@ -114,7 +113,7 @@ def structure_capacity(df: pd.DataFrame) -> pd.DataFrame: return df.reset_index().replace({"Code": REGIONS_KEY}) -def get_wesim() -> dict[str, dict[Hashable, Any]]: # type: ignore[misc] +def get_wesim() -> dict[str, dict]: # type: ignore[type-arg] """Gets the WESIM data from disk and puts it into pandas dataframes. Returns: From 0c34d5f112344c37e18d5feecd05165fb77f5ece Mon Sep 17 00:00:00 2001 From: Adrian D'Alessandro Date: Fri, 28 Jul 2023 19:41:16 +0100 Subject: [PATCH 4/7] Include a validate step in the opal append accessor --- datahub/data.py | 4 +++- datahub/main.py | 8 +++++++- datahub/opal.py | 20 +++++++++++++++++--- tests/test_opal.py | 24 ++++++++++++++++++++++++ tests/test_opal_api.py | 13 +++++++++++++ 5 files changed, 64 insertions(+), 5 deletions(-) diff --git a/datahub/data.py b/datahub/data.py index 2490eb2..68fa715 100644 --- a/datahub/data.py +++ b/datahub/data.py @@ -1,6 +1,8 @@ """This module defines the data structures for each of the models.""" +import pandas as pd + from .opal import create_opal_frame -opal_df = create_opal_frame() +opal_df: pd.DataFrame = create_opal_frame() dsr_data: list[dict[str, str | list]] = [] # type: ignore[type-arg] wesim_data: dict[str, dict] = {} # type: ignore[type-arg] diff --git a/datahub/main.py b/datahub/main.py index d8d1a4a..cb89465 100644 --- a/datahub/main.py +++ b/datahub/main.py @@ -39,7 +39,13 @@ def create_opal_data(data: OpalModel | OpalArrayData) -> dict[str, str]: log.info("Appending new data...") log.debug(f"Original Opal DataFrame:\n\n{dt.opal_df}") - dt.opal_df.opal.append(append_input) + try: + dt.opal_df.opal.append(append_input) + except AssertionError: + message = "Error with Opal data on server. Fails validation." + log.error(message) + raise HTTPException(status_code=400, detail=message) + log.debug(f"Updated Opal DataFrame:\n\n{dt.opal_df}") return {"message": "Data submitted successfully."} diff --git a/datahub/opal.py b/datahub/opal.py index f0e519c..c02cd96 100644 --- a/datahub/opal.py +++ b/datahub/opal.py @@ -1,4 +1,5 @@ """This module defines the data structures for the Opal model.""" +import numpy as np import pandas as pd from pydantic import BaseModel, Field @@ -75,11 +76,24 @@ class OpalAccessor: """Pandas custom accessor for appending new data to Opal dataframe.""" def __init__(self, pandas_obj: pd.DataFrame) -> None: - """Initialization of dataframe. + """Initialization of dataframe.""" + self._validate(pandas_obj) + self._obj = pandas_obj + + @staticmethod + def _validate(pandas_obj: pd.DataFrame) -> None: + """Validates the DataFrame to ensure it is usable by this accessor. - TODO: Add validation function. + Raises: + AssertionError if the Dataset fails the validation. """ - self._obj = pandas_obj + assert set(pandas_obj.columns) == set(opal_headers.keys()) + assert pd.api.types.is_datetime64_dtype(pandas_obj.get("Time", None)) + assert all( + np.issubdtype(dtype, np.number) + for column, dtype in pandas_obj.dtypes.items() + if column != "Time" + ) def append(self, data: dict[str, float] | list[float]) -> None: """Function to append new data to existing dataframe. diff --git a/tests/test_opal.py b/tests/test_opal.py index 680d577..453b4c5 100644 --- a/tests/test_opal.py +++ b/tests/test_opal.py @@ -1,4 +1,5 @@ import pandas as pd +import pytest def test_create_opal_frame(): @@ -67,6 +68,29 @@ def test_append_opal_data(opal_data): assert (df.iloc[2, :] == list(data_3.values())[1:]).all() +def test_append_validate(opal_data): + """Tests appending new row of Opal data to Dataframe using custom accessor.""" + from datahub.opal import create_opal_frame + + # Check incorrect type + df = create_opal_frame() + df["Total Generation"] = df["Total Generation"].astype(str) + with pytest.raises(AssertionError): + df.opal.append(opal_data) + + df = create_opal_frame() + df["Time"] = df["Time"].astype(str) + with pytest.raises(AssertionError): + df.opal.append(opal_data) + + df.drop("Time", axis=1, inplace=True) + with pytest.raises(AssertionError): + df.opal.append(opal_data) + + # Checks that Dataframe does not have an additional row + assert len(df.index) == 1 + + def test_append_opal_data_array(opal_data_array): """Tests appending new row of Opal data using array format.""" from datahub.opal import OPAL_START_DATE, create_opal_frame diff --git a/tests/test_opal_api.py b/tests/test_opal_api.py index 431c7f6..8a0e280 100644 --- a/tests/test_opal_api.py +++ b/tests/test_opal_api.py @@ -68,6 +68,19 @@ def test_post_opal_api_invalid(client, opal_data, opal_data_array): "detail": "Array has invalid length. Expecting 45 items." } + # Check that error is raised when the data on the server has been corrupted + post_data = json.dumps(opal_data.copy()) + dt.opal_df.drop("Time", axis=1, inplace=True) + + response = client.post("/opal", data=post_data) + assert response.status_code == 400 + assert response.json() == { + "detail": "Error with Opal data on server. Fails validation." + } + + # Check that the Opal global variable has not been updated. + assert len(dt.opal_df.index) == 1 + def test_get_opal_api(client, opal_data): """Tests Opal data GET method.""" From 8df741bb3ed0a992ddf308b70b8d15f416153754 Mon Sep 17 00:00:00 2001 From: Adrian D'Alessandro Date: Sat, 29 Jul 2023 01:07:38 +0100 Subject: [PATCH 5/7] Separate API tests into their own modules --- tests/conftest.py | 8 +++++ tests/test_dsr.py | 69 --------------------------------------- tests/test_dsr_api.py | 71 +++++++++++++++++++++++++++++++++++++++++ tests/test_wesim.py | 19 ----------- tests/test_wesim_api.py | 9 ++++++ 5 files changed, 88 insertions(+), 88 deletions(-) create mode 100644 tests/test_dsr_api.py create mode 100644 tests/test_wesim_api.py diff --git a/tests/conftest.py b/tests/conftest.py index 4d76bf3..0d41290 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -69,3 +69,11 @@ def dsr_data_path(tmp_path): # Return the path to the file return file_path + + +@pytest.fixture +def wesim_input_data(): + """The filepath for the test version of the wesim data.""" + from datahub.wesim import read_wesim + + return read_wesim("tests/data/wesim_example.xlsx") diff --git a/tests/test_dsr.py b/tests/test_dsr.py index 4cacf1c..cbc8ecc 100644 --- a/tests/test_dsr.py +++ b/tests/test_dsr.py @@ -1,18 +1,5 @@ -import h5py # type: ignore import numpy as np import pytest -from fastapi.testclient import TestClient - -from datahub import data as dt -from datahub.main import app - -client = TestClient(app) - - -@pytest.fixture(autouse=True) -def reset_dsr_data(): - """Pytest Fixture for resetting DSR data global variable.""" - dt.dsr_data = [] def test_validate_dsr_data(dsr_data): @@ -37,59 +24,3 @@ def test_validate_dsr_data(dsr_data): with pytest.raises(HTTPException) as err: validate_dsr_data(dsr_data) assert err.value.detail == "Missing required fields: Amount." - - -def test_post_dsr_api(dsr_data_path): - """Tests POSTing DSR data as a hdf5 file to the API.""" - with open(dsr_data_path, "rb") as dsr_data: - response = client.post("/dsr", files={"file": dsr_data}) - - assert response.status_code == 200 - assert response.json() == {"filename": dsr_data_path.name} - - # Checks that the DSR global variable has been updated - assert len(dt.dsr_data) == 1 - - -def test_post_dsr_api_invalid(dsr_data_path): - """Tests POSTing invalid DSR data to API.""" - # Check invalid array lengths raises an error - with h5py.File(dsr_data_path, "r+") as dsr_data: - amount = dsr_data.pop("Amount")[...] - dsr_data["Amount"] = np.append(amount, 1.0) - cost = dsr_data.pop("Cost")[...] - dsr_data["Cost"] = cost[1:] - - with open(dsr_data_path, "rb") as dsr_data: - response = client.post("/dsr", files={"file": dsr_data}) - assert response.status_code == 422 - assert response.json()["detail"] == "Invalid size for: Amount, Cost." - - # Check missing fields raises an error - with h5py.File(dsr_data_path, "r+") as dsr_data: - dsr_data.pop("Amount") - - with open(dsr_data_path, "rb") as dsr_data: - response = client.post("/dsr", files={"file": dsr_data}) - assert response.status_code == 422 - assert response.json()["detail"] == "Missing required fields: Amount." - - # Checks that the DSR global variable has not been updated - assert len(dt.dsr_data) == 0 - - -def test_get_dsr_api(): - """Tests DSR data GET method.""" - dt.dsr_data = [0, 1, 2, 3, 4, 5] - - response = client.get("/dsr") - assert response.json()["data"] == dt.dsr_data - - response = client.get("/dsr?start=2") - assert response.json()["data"] == dt.dsr_data[2:] - - response = client.get("/dsr?end=2") - assert response.json()["data"] == dt.dsr_data[:3] - - response = client.get("/dsr?start=1&end=2") - assert response.json()["data"] == dt.dsr_data[1:3] diff --git a/tests/test_dsr_api.py b/tests/test_dsr_api.py new file mode 100644 index 0000000..1b2ca87 --- /dev/null +++ b/tests/test_dsr_api.py @@ -0,0 +1,71 @@ +import h5py # type: ignore +import numpy as np +import pytest +from fastapi.testclient import TestClient + +from datahub import data as dt +from datahub.main import app + +client = TestClient(app) + + +@pytest.fixture(autouse=True) +def reset_dsr_data(): + """Pytest Fixture for resetting DSR data global variable.""" + dt.dsr_data = [] + + +def test_post_dsr_api(dsr_data_path): + """Tests POSTing DSR data as a hdf5 file to the API.""" + with open(dsr_data_path, "rb") as dsr_data: + response = client.post("/dsr", files={"file": dsr_data}) + + assert response.status_code == 200 + assert response.json() == {"filename": dsr_data_path.name} + + # Checks that the DSR global variable has been updated + assert len(dt.dsr_data) == 1 + + +def test_post_dsr_api_invalid(dsr_data_path): + """Tests POSTing invalid DSR data to API.""" + # Check invalid array lengths raises an error + with h5py.File(dsr_data_path, "r+") as dsr_data: + amount = dsr_data.pop("Amount")[...] + dsr_data["Amount"] = np.append(amount, 1.0) + cost = dsr_data.pop("Cost")[...] + dsr_data["Cost"] = cost[1:] + + with open(dsr_data_path, "rb") as dsr_data: + response = client.post("/dsr", files={"file": dsr_data}) + assert response.status_code == 422 + assert response.json()["detail"] == "Invalid size for: Amount, Cost." + + # Check missing fields raises an error + with h5py.File(dsr_data_path, "r+") as dsr_data: + dsr_data.pop("Amount") + + with open(dsr_data_path, "rb") as dsr_data: + response = client.post("/dsr", files={"file": dsr_data}) + assert response.status_code == 422 + assert response.json()["detail"] == "Missing required fields: Amount." + + # Checks that the DSR global variable has not been updated + assert len(dt.dsr_data) == 0 + + +def test_get_dsr_api(): + """Tests DSR data GET method.""" + dt.dsr_data = [0, 1, 2, 3, 4, 5] + + response = client.get("/dsr") + assert response.json()["data"] == dt.dsr_data + + response = client.get("/dsr?start=2") + assert response.json()["data"] == dt.dsr_data[2:] + + response = client.get("/dsr?end=2") + assert response.json()["data"] == dt.dsr_data[:3] + + response = client.get("/dsr?start=1&end=2") + assert response.json()["data"] == dt.dsr_data[1:3] diff --git a/tests/test_wesim.py b/tests/test_wesim.py index b829721..dc4025b 100644 --- a/tests/test_wesim.py +++ b/tests/test_wesim.py @@ -1,15 +1,4 @@ import pandas as pd -import pytest - -from datahub import data as dt - - -@pytest.fixture -def wesim_input_data(): - """The filepath for the test version of the wesim data.""" - from datahub.wesim import read_wesim - - return read_wesim("tests/data/wesim_example.xlsx") def test_read_wesim(wesim_input_data): @@ -84,11 +73,3 @@ def test_get_wesim(mocker, wesim_input_data): assert pd.DataFrame(**wesim["Regions"]).shape == (30, 10) assert pd.DataFrame(**wesim["Interconnector Capacity"]).shape == (4, 2) assert pd.DataFrame(**wesim["Interconnectors"]).shape == (25, 3) - - -def test_get_wesim_api(client, mocker, wesim_input_data): - """Test get_wesim returns a dictionary with appropriate DataFrames.""" - with mocker.patch("datahub.wesim.read_wesim", return_value=wesim_input_data): - response = client.get("/wesim") - - assert response.json()["data"] == dt.wesim_data diff --git a/tests/test_wesim_api.py b/tests/test_wesim_api.py new file mode 100644 index 0000000..63bb7bb --- /dev/null +++ b/tests/test_wesim_api.py @@ -0,0 +1,9 @@ +from datahub import data as dt + + +def test_get_wesim_api(client, mocker, wesim_input_data): + """Test get_wesim returns a dictionary with appropriate DataFrames.""" + with mocker.patch("datahub.wesim.read_wesim", return_value=wesim_input_data): + response = client.get("/wesim") + + assert response.json()["data"] == dt.wesim_data From 509343dc1816aa69523d0e62ef9cfb746b885edd Mon Sep 17 00:00:00 2001 From: Adrian D'Alessandro Date: Sat, 29 Jul 2023 01:48:35 +0100 Subject: [PATCH 6/7] Expand on API docs. Close #101 --- datahub/main.py | 55 +++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 46 insertions(+), 9 deletions(-) diff --git a/datahub/main.py b/datahub/main.py index cb89465..0993d37 100644 --- a/datahub/main.py +++ b/datahub/main.py @@ -8,19 +8,26 @@ from .opal import OpalArrayData, OpalModel from .wesim import get_wesim -app = FastAPI() +app = FastAPI( + title="Gridlington DataHub", +) @app.post("/opal") def create_opal_data(data: OpalModel | OpalArrayData) -> dict[str, str]: """POST method function for appending data to Opal Dataframe. + It takes the Opal data as a dictionary or list in JSON format and updates the data + held in the datahub and returns a success message. + + \f + Args: data: The raw opal data in either Dict or List format Returns: A Dict of the Opal data that has just been added to the Dataframe - """ + """ # noqa: D301 log.info("Recieved Opal data.") raw_data = data.dict() @@ -57,16 +64,24 @@ def get_opal_data( ) -> dict[str, dict]: # type: ignore[type-arg] """GET method function for getting Opal Dataframe as JSON. + It takes optional query parameters of: + - `start`: Starting index for exported Dataframe + - `end`: Last index that will be included in exported Dataframe + + And returns a dictionary containing the Opal Dataframe in JSON format. + + This can be converted back to a DataFrame using the following: + `pd.DataFrame(**data)` + + \f + Args: start: Starting index for exported Dataframe end: Last index that will be included in exported Dataframe Returns: - A Dict containing the Opal Dataframe in JSON format - - This can be converted back to a Dataframe using the following: - pd.DataFrame(**data) - """ + A Dict containing the Opal DataFrame in JSON format + """ # noqa: D301 log.info("Sending Opal data...") log.debug(f"Query parameters:\n\nstart={start}\nend={end}\n") if isinstance(end, int) and end < start: @@ -146,13 +161,26 @@ def get_dsr_data( ) -> dict[str, list]: # type: ignore[type-arg] """GET method function for getting DSR data as JSON. + It takes optional query parameters of: + - `start`: Starting index for exported list + - `end`: Last index that will be included in exported list + + And returns a dictionary containing the DSR data in JSON format. + + This can be converted back to a DataFrame using the following: + `pd.DataFrame(**data)` + + TODO: Ensure data is json serializable or returned in binary format + + \f + Args: start: Starting index for exported list end: Last index that will be included in exported list Returns: A Dict containing the DSR list - """ + """ # noqa: D301 log.info("Sending DSR data...") log.debug(f"Query parameters:\n\nstart={start}\nend={end}\n") if isinstance(end, int) and end < start: @@ -172,9 +200,18 @@ def get_dsr_data( def get_wesim_data() -> dict[str, dict[str, dict]]: # type: ignore[type-arg] """GET method function for getting Wesim data as JSON. + It returns a dictionary with the WESIM data in JSON format containing the following + 4 DataFrames: + - Capacity (6, 12) + - Regions (30, 10) + - Interconnector Capacity (4, 2) + - Interconnectors (25, 3) + + \f + Returns: A Dict containing the Wesim Dataframes - """ + """ # noqa: D301 log.info("Sending Wesim data...") if dt.wesim_data == {}: log.debug("Wesim data empty! Creating Wesim data...") From dc57c881e2b92f7641769daaf9669bd7f71be05c Mon Sep 17 00:00:00 2001 From: Adrian D'Alessandro Date: Sat, 29 Jul 2023 10:07:38 +0100 Subject: [PATCH 7/7] Remove mention of version in code. Update to 0.1.1 --- datahub/__init__.py | 2 -- pyproject.toml | 8 ++++---- tests/test_datahub.py | 6 ------ 3 files changed, 4 insertions(+), 12 deletions(-) delete mode 100644 tests/test_datahub.py diff --git a/datahub/__init__.py b/datahub/__init__.py index 8443050..fc7da00 100644 --- a/datahub/__init__.py +++ b/datahub/__init__.py @@ -4,8 +4,6 @@ from .core.log_config import logging_dict_config -__version__ = "0.0.1" - logging.config.dictConfig(logging_dict_config) log = logging.getLogger("api_logger") diff --git a/pyproject.toml b/pyproject.toml index c7ac9cc..0936a13 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,11 @@ [project] name = "datahub" -version = "0.0.1" +version = "0.1.1" authors = [ { name = "Adrian D'Alessandro", email = "a.dalessandro@imperial.ac.uk" }, { name = "Callum West", email = "c.west@imperial.ac.uk" }, { name = "Dan Davies", email = "d.w.davies@imperial.ac.uk" }, - { name = "Imperial College London RSE Team", email = "ict-rse-team@imperial.ac.uk" } + { name = "Imperial College London RSE Team", email = "ict-rse-team@imperial.ac.uk" }, ] requires-python = ">=3.10" dependencies = [ @@ -13,7 +13,7 @@ dependencies = [ "fastapi", "uvicorn", "python-multipart", - "h5py" + "h5py", ] [project.optional-dependencies] @@ -28,7 +28,7 @@ dev = [ "pytest-mypy", "pytest-mock", "pandas-stubs", - "httpx" + "httpx", ] [tool.setuptools.packages.find] diff --git a/tests/test_datahub.py b/tests/test_datahub.py deleted file mode 100644 index f934434..0000000 --- a/tests/test_datahub.py +++ /dev/null @@ -1,6 +0,0 @@ -from datahub import __version__ - - -def test_version(): - """Check that the version is acceptable.""" - assert __version__ == "0.0.1"