Skip to content

Commit

Permalink
Merge pull request #2 from Kamparia/dev
Browse files Browse the repository at this point in the history
gdacs-api v.2 release
  • Loading branch information
Kamparia authored Jun 10, 2022
2 parents feb3b38 + 36245d6 commit 6d1617f
Show file tree
Hide file tree
Showing 9 changed files with 125 additions and 81 deletions.
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@ From cloned GitHub repo for development:
(venv)$ pip install -e .
```

The installation of `gdacs-api v.1.0.6` package depends on the following packages.
The installation of `gdacs-api v.2.0.0` package depends on the following packages.
- Python >=3.6
- Requests >=2.10.0
- Xmltodict
- Cachetools
- Pydantic

## Getting Started
### Import Library
Expand Down Expand Up @@ -53,16 +54,15 @@ events = client.latest_events() # all recent events
events = client.latest_events(limit=10) # 10 most events
```

You can also filter by event types or historical timeline. E.g. In the code snippet below, the first statement will return only Tropical Cyclone (TC) events that occured within the last 24 hours while the second statement will return Flooding (FL) events that occured within the last 7 days.
You can filter event by the different types of natural disasters.

```python
tc_events = client.latest_events(event_type="TC", historical="24h")
fl_events = client.latest_events(event_type="FL", historical="7d")
tc_events = client.latest_events(event_type="TC")
fl_events = client.latest_events(event_type="FL")
```

Optional parameters:
- `event_type` (str): TC (Tropical Cyclones), EQ (Earthquakes), FL (Floods), VO (Volcanoes), WF (Wild Fires) and DR (Droughts)
- `historical` (str): 24h (Last 24 hours), 7d (Last 7 days)
- `limit` (int): returned events count.

### Get Single Event Record
Expand Down
71 changes: 33 additions & 38 deletions gdacs/api.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,17 @@
import json
import requests
import xmltodict
from os.path import join
from cachetools import cached, TTLCache

from gdacs.utils import GDACSAPIError
from gdacs.utils import handle_xml, handle_geojson
from gdacs.utils import download_shp
from gdacs.schemas import GeoJSON
from gdacs.utils import *


CACHE_TTL = 300 # 5minutes
CACHE_TTL = 300 # secs
EVENT_TYPES = [None, 'TC', 'EQ', 'FL', 'VO', 'DR', 'WF']
DATA_FORMATS = [None, 'xml', 'geojson', 'shp']
LATEST_EVENTS_URL = 'https://www.gdacs.org/gdacsapi/api/events/geteventlist/EVENTS4APP'
BASE_URL = "https://www.gdacs.org/datareport/resources"
RSS_FEED_URLS = {
"default": "https://www.gdacs.org/xml/rss.xml",
"24h": "https://www.gdacs.org/xml/rss_24h.xml",
"7d": "https://www.gdacs.org/xml/rss_7d.xml"
}


class GDACSAPIReader:
Expand All @@ -30,28 +24,22 @@ def __repr__(self) -> str:
@cached(cache=TTLCache(maxsize=500, ttl=CACHE_TTL))
def latest_events(self,
event_type: str = None,
historical: str = 'default',
limit: int = None
):
""" Get latest events from GDACS RSS feed. """
if event_type not in EVENT_TYPES:
raise GDACSAPIError("API Error: Used an invalid `event_type` parameter in request.")

if historical not in RSS_FEED_URLS.keys():
raise GDACSAPIError("API Error: Used an invalid `historical` parameter in request.")

res = requests.get(RSS_FEED_URLS[historical])
res = requests.get(LATEST_EVENTS_URL)
if res.status_code != 200:
raise GDACSAPIError("API Error: GDACS RSS feed can not be reached.")

xml_parser = xmltodict.parse(res.content)
events = [
item
for item in xml_parser["rss"]["channel"]["item"]
if event_type in [None, item["gdacs:eventtype"]]
event for event in res.json()['features']
if event_type in [None, event['properties']['eventtype']]
]

return json.loads(json.dumps(events[:limit]))
features = json.loads(json.dumps(events[:limit]))
return GeoJSON(features=features)

@cached(cache=TTLCache(maxsize=500, ttl=CACHE_TTL))
def get_event(self,
Expand All @@ -69,22 +57,29 @@ def get_event(self,
raise GDACSAPIError("API Error: Used an invalid `data_format` parameter in request.")

if source_format == 'geojson':
file_name = "geojson_{}_{}.geojson".format(event_id, episode_id)
geojson_path = join(BASE_URL, event_type, event_id, file_name).replace("\\", "/")
return handle_geojson(geojson_path)

return self.__get_geojson_event(event_type, event_id, episode_id)
elif source_format == 'shp':
file_name = "Shape_{}_{}.zip".format(event_id, episode_id)
shp_path = join(BASE_URL, event_type, event_id, file_name).replace("\\", "/")
return download_shp(shp_path)

return self.__get_shp_event(event_type, event_id, episode_id)
else:
return self.__get_xml_event(event_type, event_id, episode_id, cap_file)

def __get_geojson_event(self, event_type: str, event_id: str, episode_id: str = None):
file_name = f"geojson_{event_id}_{episode_id}.geojson"
geojson_path = join(BASE_URL, event_type, event_id, file_name).replace("\\", "/")
return handle_geojson(geojson_path)

def __get_shp_event(self, event_type: str, event_id: str, episode_id: str = None):
file_name = f"Shape_{event_id}_{episode_id}.zip"
shp_path = join(BASE_URL, event_type, event_id, file_name).replace("\\", "/")
return download_shp(shp_path)

def __get_xml_event(self, event_type: str, event_id: str, episode_id: str = None, cap_file: bool = False):
if cap_file:
file_name = f"cap_{event_id}.xml"
elif not episode_id:
file_name = f"rss_{event_id}.xml"
else:
if cap_file:
file_name = "cap_{}.xml".format(event_id)
elif not episode_id:
file_name = "rss_{}.xml".format(event_id)
else:
file_name = "rss_{}_{}.xml".format(event_id, episode_id)

xml_path = join(BASE_URL, event_type, event_id, file_name).replace("\\", "/")
return handle_xml(xml_path)
file_name = f"rss_{event_id}_{episode_id}.xml"

xml_path = join(BASE_URL, event_type, event_id, file_name).replace("\\", "/")
return handle_xml(xml_path)
17 changes: 17 additions & 0 deletions gdacs/schemas.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from pydantic import BaseModel


class GeoJSON(BaseModel):
type: str = "FeatureCollection"
features: list
bbox: list = None

def __len__(self):
return len(self.features)


class Feature(BaseModel):
type: str
properties: dict
geometry: dict
bbox: list
34 changes: 6 additions & 28 deletions gdacs/tests/test_api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from unittest import TestCase

from gdacs.api import GDACSAPIReader, GDACSAPIError
from gdacs.utils import delete_downloads


class TestGDACSAPI(TestCase):
Expand All @@ -9,8 +10,7 @@ def setUp(self):

def tearDown(self):
self.client = None

# delete all the files
delete_downloads()

def test_latest_events_no_args(self):
'''Test latest_events() without any arguments.'''
Expand All @@ -23,37 +23,15 @@ def test_latest_events_limit(self):
events = self.client.latest_events(limit=limit)
self.assertEqual(len(events), limit)

def test_latest_events_historical(self):
''' Test latest_events() with historical arguments. '''
day_events = self.client.latest_events(historical='24h') # 24 hours
self.assertTrue(day_events) if len(day_events) > 0 else self.assertFalse(day_events)

week_events = self.client.latest_events(historical='7d') # 7 days
self.assertTrue(week_events) if len(week_events) > 0 else self.assertFalse(week_events)

def test_latest_events_event_types(self):
''' Test latest_events() filter by event_types argument. '''
tc_events = self.client.latest_events(event_type="TC") # tropical cyclones
self.assertTrue(tc_events) if len(tc_events) > 0 else self.assertFalse(tc_events)

eq_events = self.client.latest_events(event_type="EQ") # earthquakes
self.assertTrue(eq_events) if len(eq_events) > 0 else self.assertFalse(eq_events)

fl_events = self.client.latest_events(event_type="FL") # floods
self.assertTrue(fl_events) if len(fl_events) > 0 else self.assertFalse(fl_events)

dr_events = self.client.latest_events(event_type="DR") # droughts
self.assertTrue(dr_events) if len(dr_events) > 0 else self.assertFalse(dr_events)

wf_events = self.client.latest_events(event_type="WF") # wild fires
self.assertTrue(wf_events) if len(wf_events) > 0 else self.assertFalse(wf_events)

vo_events = self.client.latest_events(event_type="VO") # volcanoes
self.assertTrue(vo_events) if len(vo_events) > 0 else self.assertFalse(vo_events)
for event_type in ["TC", "EQ", "FL", "DR", "WF", "VO"]:
events = self.client.latest_events(event_type=event_type)
self.assertTrue(events) if len(events) > 0 else self.assertFalse(events)

def test_latest_events_multiple_args(self):
''' Test latest_events() with multiple argumnets defined. '''
events = self.client.latest_events(event_type="TC", historical='24h', limit=5)
events = self.client.latest_events(event_type="EQ", limit=5)
self.assertTrue(events) if len(events) > 0 else self.assertFalse(events)
self.assertEqual(len(events), 5) if len(events) == 5 else self.assertFalse(events)

Expand Down
33 changes: 33 additions & 0 deletions gdacs/tests/test_schemas.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
from unittest import TestCase
from gdacs.schemas import *


class TestSchemas(TestCase):
def setUp(self) -> None:
pass

def tearDown(self) -> None:
pass

def test_geojson(self):
"""
Test the GeoJSON class
:return:
"""
data = GeoJSON(type="FeatureCollection", features=[])
self.assertIsInstance(data, GeoJSON)
self.assertEqual(data.type, "FeatureCollection")
self.assertEqual(type(data.features), list)
self.assertEqual(len(data), 0)

def test_feature(self):
"""
Test the Feature class
:return:
"""
data = Feature(type="Feature", properties={}, geometry={}, bbox=[])
self.assertIsInstance(data, Feature)
self.assertEqual(data.type, "Feature")
self.assertEqual(data.properties, {})
self.assertEqual(data.geometry, {})
self.assertEqual(data.bbox, [])
13 changes: 10 additions & 3 deletions gdacs/tests/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from unittest import TestCase

from gdacs.utils import GDACSAPIError, handle_geojson
from gdacs.utils import handle_xml, download_shp
from gdacs.utils import *


class TestUtils(TestCase):
Expand All @@ -10,7 +9,7 @@ def setUp(self) -> None:

def tearDown(self) -> None:
# delete temporary files
pass
delete_downloads()

def test_handle_geojson(self):
"""
Expand Down Expand Up @@ -63,3 +62,11 @@ def test_download_shp_error(self):
url = 'https://www.gdacs.org/datareport/resources/TC/1000194/Shape_1000194_41.zip'
with self.assertRaises(GDACSAPIError):
download_shp(url)

def test_delete_downloads(self):
"""
Test the delete_downloads function
:return:
"""
data = delete_downloads()
self.assertEqual(data, "Deleted all downloaded files.")
17 changes: 14 additions & 3 deletions gdacs/utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import os
import glob
import json
import requests
import xmltodict
Expand All @@ -8,6 +10,7 @@ class GDACSAPIError(RuntimeError):


def handle_geojson(endpoint):
""" Handle GeoJSON file data types. """
res = requests.get(endpoint)
if res.status_code != 200:
raise GDACSAPIError("API Error: Unable to read GeoJSON data for GDACS event.")
Expand All @@ -34,6 +37,14 @@ def download_shp(endpoint):
shp_file_name = endpoint.split('/')[-1]
with open(shp_file_name, 'wb') as download:
download.write(res.content)
return "Downloaded {} in directory.".format(shp_file_name)
except Exception as error:
raise error
return f"Downloaded {shp_file_name} in directory."
except (FileNotFoundError, ValueError) as error:
raise GDACSAPIError(f"SHP Download Error: {error}") from error


def delete_downloads() -> str:
""" Delete all downloaded files. """
for file in glob.glob("*.zip"):
os.remove(file)

return "Deleted all downloaded files."
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
requests
xmltodict
cachetools
cachetools
pydantic
8 changes: 5 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from setuptools import setup

version = "1.0.0"
version = "2.0.0"

long_description = open('README.md').read()

Expand All @@ -25,21 +25,23 @@
long_description_content_type="text/markdown",
keywords='gdacs disasters earthquakes tropical-cyclones earthquakes floods',
packages=['gdacs'],
python_requires=">=3.5",
python_requires=">=3.6",
install_requires=[
"requests>=2.10.0",
"xmltodict",
"cachetools",
"pydantic",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
]
)

0 comments on commit 6d1617f

Please sign in to comment.