diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml new file mode 100644 index 0000000..005643a --- /dev/null +++ b/.github/workflows/unit-test.yml @@ -0,0 +1,27 @@ +name: Run Tests + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tensorflow==2.12.0 efficientnet==1.1.1 + pip install -e . + + - name: Run tests + run: python -m unittest discover -s tests -p 'test_*.py' diff --git a/API/main.py b/API/main.py index ca55359..85b65e5 100644 --- a/API/main.py +++ b/API/main.py @@ -184,7 +184,7 @@ async def predict_api(request: PredictionRequest): - Predicted results. """ try: - predictions = predict( + predictions = await predict( bbox=request.bbox, model_path=request.checkpoint, zoom_level=request.zoom_level, @@ -192,7 +192,6 @@ async def predict_api(request: PredictionRequest): tile_size=256, confidence=request.confidence, tile_overlap_distance=request.tile_overlap_distance, - merge_adjancent_polygons=request.merge_adjacent_polygons, max_angle_change=request.max_angle_change, skew_tolerance=request.skew_tolerance, tolerance=request.tolerance, diff --git a/predictor/app.py b/predictor/app.py index c6cc669..117bdba 100644 --- a/predictor/app.py +++ b/predictor/app.py @@ -24,7 +24,6 @@ def predict( area_threshold=3, tolerance=0.5, tile_overlap_distance=0.15, - merge_adjancent_polygons=True, use_raster2polygon=False, remove_metadata=True, use_josm_q=False, @@ -43,7 +42,6 @@ def predict( area_threshold (float, optional): Threshold for filtering polygon areas. Defaults to 3 sqm. tolerance (float, optional): Tolerance parameter for simplifying polygons. Defaults to 0.5 m. Percentage Tolerance = (Tolerance in Meters / Arc Length in Meters ​)×100 tile_overlap_distance : Provides tile overlap distance to remove the strip between predictions, Defaults to 0.15m - merge_adjancent_polygons(bool,optional) : Merges adjacent self intersecting or containing each other polygons """ if base_path: base_path = os.path.join(base_path, "prediction", str(uuid.uuid4())) @@ -93,7 +91,6 @@ def predict( output_path=geojson_path, area_threshold=area_threshold, tolerance=tolerance, - merge_adjancent_polygons=merge_adjancent_polygons, ) print(f"It took {round(time.time()-start)} sec to extract polygons") with open(geojson_path, "r") as f: diff --git a/predictor/vectorizer.py b/predictor/vectorizer.py index b3343b2..92e05bd 100644 --- a/predictor/vectorizer.py +++ b/predictor/vectorizer.py @@ -23,7 +23,6 @@ def vectorize( output_path: str = None, tolerance: float = 0.5, area_threshold: float = 5, - merge_adjancent_polygons=True, ) -> None: """Polygonize raster tiles from the input path. @@ -35,7 +34,6 @@ def vectorize( output_path: Path of the output file. tolerance (float, optional): Tolerance parameter for simplifying polygons. Defaults to 0.5 m. Percentage Tolerance = (Tolerance in Meters / Arc Length in Meters ​)×100 area_threshold (float, optional): Threshold for filtering polygon areas. Defaults to 5 sqm. - merge_adjancent_polygons(bool,optional) : Merges adjacent self intersecting or containing each other polygons Example:: @@ -64,45 +62,15 @@ def vectorize( raster.close() polygons = [shape(s) for s, _ in shapes(mosaic, transform=output)] - merged_polygons = polygons - if merge_adjancent_polygons: - # Merge adjacent polygons - merged_polygons = [] + gs = gpd.GeoSeries(polygons, crs=kwargs["crs"]) - for polygon in polygons: - if not merged_polygons: - merged_polygons.append(polygon) - else: - merged = False - for i, merged_polygon in enumerate(merged_polygons): - if ( - polygon.intersects(merged_polygon) - or polygon.contains(merged_polygon) - or merged_polygon.contains(polygon) - ): - merged_polygons[i] = merged_polygon.union(polygon) - merged = True - break - if not merged: - merged_polygons.append(polygon) + # Explode MultiPolygons + gs = gs.explode() - # areas = [poly.area for poly in merged_polygons] - # max_area, median_area = np.max(areas), np.median(areas) - polygons_filtered = [] - for multi_polygon in merged_polygons: - if multi_polygon.is_empty: - continue + # Filter by area threshold + gs = gs[gs.area >= area_threshold] - # If it's a MultiPolygon, iterate through individual polygons - if multi_polygon.geom_type == "MultiPolygon": - for polygon in multi_polygon.geoms: - if polygon.area > area_threshold: - polygons_filtered.append(Polygon(polygon.exterior)) - # If it's a single Polygon, directly append it - elif multi_polygon.area > area_threshold: - polygons_filtered.append(Polygon(multi_polygon.exterior)) - - gs = gpd.GeoSeries(polygons_filtered, crs=kwargs["crs"]).simplify(tolerance) + gs = gs.simplify(tolerance) if gs.empty: raise ValueError("No Features Found") gs.to_crs("EPSG:4326").to_file(output_path) diff --git a/setup.py b/setup.py index ae06270..8f34f41 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( name="fairpredictor", - version="0.0.30", + version="0.0.31", url="https://github.com/kshitijrajsharma/fairpredictor", author="Kshitij Raj Sharma", author_email="skshitizraj@gmail.com", diff --git a/tests/test_predict.py b/tests/test_predict.py index 2b5793d..db25f1a 100644 --- a/tests/test_predict.py +++ b/tests/test_predict.py @@ -1,12 +1,42 @@ -bbox = [-84.1334429383278, 9.953153171808898, -84.13033694028854, 9.954719779271468] -zoom_level = 19 -from predictor import download - -image_download_path = download( - bbox, - zoom_level=zoom_level, - tms_url="bing", - tile_size=256, - download_path="/Users/kshitij/hotosm/fairpredictor/download/test", -) -print(image_download_path) +import os +import shutil +import tempfile +import unittest + +import efficientnet.tfkeras as efn +import requests + +from predictor import predict + +# Global variables +TMS_URL = "https://tiles.openaerialmap.org/6501a65c0906de000167e64d/0/6501a65c0906de000167e64e/{z}/{x}/{y}" +BBOX = [100.56228021333352, 13.685230854641182, 100.56383321235313, 13.685961853747969] +FAIR_BASE_URL = "https://fair-dev.hotosm.org/api/v1/workspace/download" +DATASET_ID = "dataset_65" +TRAINING_ID = "training_297" + + +class TestPredictor(unittest.TestCase): + def setUp(self): + model_url = f"{FAIR_BASE_URL}/{DATASET_ID}/output/{TRAINING_ID}/checkpoint.h5" + self.model_path = tempfile.NamedTemporaryFile(suffix=".h5").name + response = requests.get(model_url, stream=True) + with open(self.model_path, "wb") as out_file: + shutil.copyfileobj(response.raw, out_file) + + def tearDown(self): + if self.model_path: + try: + os.remove(self.model_path) + except OSError: + pass + + def test_predict(self): + zoom_level = 20 + predictions = predict(BBOX, self.model_path, zoom_level, TMS_URL) + self.assertIsInstance(predictions, dict) + self.assertTrue(len(predictions["features"]) > 0) + + +if __name__ == "__main__": + unittest.main()