[refactor] refactor and remove unused code, remove unused deps
Browse files- requirements.txt +2 -2
- requirements_dev.txt +0 -4
- src/app.py +13 -36
- src/io/coordinates_pixel_conversion.py +1 -1
- src/io/geo_helpers.py +70 -0
- src/io/helpers.py +0 -618
- src/io/tiles_to_tiff.py +0 -91
- src/prediction_api/predictors.py +43 -105
- src/utilities/constants.py +0 -23
- src/utilities/type_hints.py +0 -21
- src/utilities/utilities.py +0 -158
requirements.txt
CHANGED
|
@@ -2,10 +2,10 @@ aws-lambda-powertools
|
|
| 2 |
awslambdaric
|
| 3 |
bson
|
| 4 |
geopandas
|
| 5 |
-
httpx
|
| 6 |
jmespath
|
| 7 |
numpy
|
| 8 |
onnxruntime
|
| 9 |
opencv-python
|
| 10 |
pillow
|
| 11 |
-
rasterio
|
|
|
|
|
|
| 2 |
awslambdaric
|
| 3 |
bson
|
| 4 |
geopandas
|
|
|
|
| 5 |
jmespath
|
| 6 |
numpy
|
| 7 |
onnxruntime
|
| 8 |
opencv-python
|
| 9 |
pillow
|
| 10 |
+
rasterio
|
| 11 |
+
requests
|
requirements_dev.txt
CHANGED
|
@@ -1,5 +1,3 @@
|
|
| 1 |
-
aiofiles
|
| 2 |
-
aiohttp
|
| 3 |
aws-lambda-powertools
|
| 4 |
awslambdaric
|
| 5 |
bson
|
|
@@ -9,7 +7,5 @@ numpy
|
|
| 9 |
onnxruntime
|
| 10 |
opencv-python
|
| 11 |
pillow
|
| 12 |
-
pyproj
|
| 13 |
rasterio
|
| 14 |
requests
|
| 15 |
-
shapely
|
|
|
|
|
|
|
|
|
|
| 1 |
aws-lambda-powertools
|
| 2 |
awslambdaric
|
| 3 |
bson
|
|
|
|
| 7 |
onnxruntime
|
| 8 |
opencv-python
|
| 9 |
pillow
|
|
|
|
| 10 |
rasterio
|
| 11 |
requests
|
|
|
src/app.py
CHANGED
|
@@ -27,7 +27,7 @@ def get_response(status: int, start_time: float, request_id: str, response_body:
|
|
| 27 |
str: json response
|
| 28 |
|
| 29 |
"""
|
| 30 |
-
app_logger.
|
| 31 |
response_body["duration_run"] = time.time() - start_time
|
| 32 |
response_body["message"] = CUSTOM_RESPONSE_MESSAGES[status]
|
| 33 |
response_body["request_id"] = request_id
|
|
@@ -38,59 +38,36 @@ def get_response(status: int, start_time: float, request_id: str, response_body:
|
|
| 38 |
"body": json.dumps(response_body),
|
| 39 |
"isBase64Encoded": False
|
| 40 |
}
|
| 41 |
-
app_logger.
|
| 42 |
return json.dumps(response)
|
| 43 |
|
| 44 |
|
| 45 |
def get_parsed_bbox_points(request_input: Dict) -> Dict:
|
| 46 |
app_logger.info(f"try to parsing input request {request_input}...")
|
| 47 |
bbox = request_input["bbox"]
|
| 48 |
-
app_logger.
|
| 49 |
ne = bbox["ne"]
|
| 50 |
sw = bbox["sw"]
|
| 51 |
-
app_logger.
|
| 52 |
-
app_logger.
|
| 53 |
ne_latlng = [float(ne["lat"]), float(ne["lng"])]
|
| 54 |
sw_latlng = [float(sw["lat"]), float(sw["lng"])]
|
| 55 |
bbox = [ne_latlng, sw_latlng]
|
| 56 |
zoom = int(request_input["zoom"])
|
| 57 |
for prompt in request_input["prompt"]:
|
| 58 |
-
app_logger.
|
| 59 |
data = prompt["data"]
|
| 60 |
-
# if prompt["type"] == "rectangle":
|
| 61 |
-
# app_logger.info(f"current data points: {type(data)}, value:{data}.")
|
| 62 |
-
# data_ne = data["ne"]
|
| 63 |
-
# app_logger.info(f"current data_ne point: {type(data_ne)}, value:{data_ne}.")
|
| 64 |
-
# data_sw = data["sw"]
|
| 65 |
-
# app_logger.info(f"current data_sw point: {type(data_sw)}, value:{data_sw}.")
|
| 66 |
-
#
|
| 67 |
-
# diff_pixel_coords_origin_data_ne = get_latlng_to_pixel_coordinates(ne, sw, data_ne, zoom, "ne")
|
| 68 |
-
# app_logger.info(f'current diff prompt ne: {type(data)}, {data} => {diff_pixel_coords_origin_data_ne}.')
|
| 69 |
-
# diff_pixel_coords_origin_data_sw = get_latlng_to_pixel_coordinates(ne, sw, data_sw, zoom, "sw")
|
| 70 |
-
# app_logger.info(f'current diff prompt sw: {type(data)}, {data} => {diff_pixel_coords_origin_data_sw}.')
|
| 71 |
-
# prompt["data"] = [
|
| 72 |
-
# diff_pixel_coords_origin_data_ne["x"], diff_pixel_coords_origin_data_ne["y"],
|
| 73 |
-
# diff_pixel_coords_origin_data_sw["x"], diff_pixel_coords_origin_data_sw["y"]
|
| 74 |
-
# ]
|
| 75 |
-
# # rect_diffs_input = str(Path(ROOT) / "rect_diffs_input.json")
|
| 76 |
-
# # with open(rect_diffs_input, "w") as jj_out3:
|
| 77 |
-
# # json.dump({
|
| 78 |
-
# # "prompt_data": serialize(prompt["data"]),
|
| 79 |
-
# # "diff_pixel_coords_origin_data_ne": serialize(diff_pixel_coords_origin_data_ne),
|
| 80 |
-
# # "diff_pixel_coords_origin_data_sw": serialize(diff_pixel_coords_origin_data_sw),
|
| 81 |
-
# # }, jj_out3)
|
| 82 |
-
# # app_logger.info(f"written json:{rect_diffs_input}.")
|
| 83 |
if prompt["type"] == "point":
|
| 84 |
current_point = get_latlng_to_pixel_coordinates(ne, sw, data, zoom, "point")
|
| 85 |
-
app_logger.
|
| 86 |
new_prompt_data = [current_point['x'], current_point['y']]
|
| 87 |
-
app_logger.
|
| 88 |
prompt["data"] = new_prompt_data
|
| 89 |
else:
|
| 90 |
raise ValueError("valid prompt type is only 'point'")
|
| 91 |
|
| 92 |
-
app_logger.
|
| 93 |
-
app_logger.
|
| 94 |
|
| 95 |
app_logger.info(f"unpacking elaborated {request_input}...")
|
| 96 |
return {
|
|
@@ -117,18 +94,18 @@ def lambda_handler(event: dict, context: LambdaContext):
|
|
| 117 |
app_logger.error(f"e_constants1:{e_constants1}.")
|
| 118 |
body = event
|
| 119 |
|
| 120 |
-
app_logger.
|
| 121 |
|
| 122 |
if isinstance(body, str):
|
| 123 |
body_decoded_str = base64_decode(body)
|
| 124 |
-
app_logger.
|
| 125 |
body = json.loads(body_decoded_str)
|
| 126 |
|
| 127 |
app_logger.info(f"body, #2: {type(body)}, {body}...")
|
| 128 |
|
| 129 |
try:
|
| 130 |
prompt_latlng = body["prompt"]
|
| 131 |
-
app_logger.
|
| 132 |
body_request = get_parsed_bbox_points(body)
|
| 133 |
app_logger.info(f"body_request=> {type(body_request)}, {body_request}.")
|
| 134 |
body_response = samexporter_predict(body_request["bbox"], body_request["prompt"], body_request["zoom"])
|
|
|
|
| 27 |
str: json response
|
| 28 |
|
| 29 |
"""
|
| 30 |
+
app_logger.debug(f"response_body:{response_body}.")
|
| 31 |
response_body["duration_run"] = time.time() - start_time
|
| 32 |
response_body["message"] = CUSTOM_RESPONSE_MESSAGES[status]
|
| 33 |
response_body["request_id"] = request_id
|
|
|
|
| 38 |
"body": json.dumps(response_body),
|
| 39 |
"isBase64Encoded": False
|
| 40 |
}
|
| 41 |
+
app_logger.debug(f"response type:{type(response)} => {response}.")
|
| 42 |
return json.dumps(response)
|
| 43 |
|
| 44 |
|
| 45 |
def get_parsed_bbox_points(request_input: Dict) -> Dict:
|
| 46 |
app_logger.info(f"try to parsing input request {request_input}...")
|
| 47 |
bbox = request_input["bbox"]
|
| 48 |
+
app_logger.debug(f"request bbox: {type(bbox)}, value:{bbox}.")
|
| 49 |
ne = bbox["ne"]
|
| 50 |
sw = bbox["sw"]
|
| 51 |
+
app_logger.debug(f"request ne: {type(ne)}, value:{ne}.")
|
| 52 |
+
app_logger.debug(f"request sw: {type(sw)}, value:{sw}.")
|
| 53 |
ne_latlng = [float(ne["lat"]), float(ne["lng"])]
|
| 54 |
sw_latlng = [float(sw["lat"]), float(sw["lng"])]
|
| 55 |
bbox = [ne_latlng, sw_latlng]
|
| 56 |
zoom = int(request_input["zoom"])
|
| 57 |
for prompt in request_input["prompt"]:
|
| 58 |
+
app_logger.debug(f"current prompt: {type(prompt)}, value:{prompt}.")
|
| 59 |
data = prompt["data"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
if prompt["type"] == "point":
|
| 61 |
current_point = get_latlng_to_pixel_coordinates(ne, sw, data, zoom, "point")
|
| 62 |
+
app_logger.debug(f"current prompt: {type(current_point)}, value:{current_point}.")
|
| 63 |
new_prompt_data = [current_point['x'], current_point['y']]
|
| 64 |
+
app_logger.debug(f"new_prompt_data: {type(new_prompt_data)}, value:{new_prompt_data}.")
|
| 65 |
prompt["data"] = new_prompt_data
|
| 66 |
else:
|
| 67 |
raise ValueError("valid prompt type is only 'point'")
|
| 68 |
|
| 69 |
+
app_logger.debug(f"bbox => {bbox}.")
|
| 70 |
+
app_logger.debug(f'## request_input-prompt updated => {request_input["prompt"]}.')
|
| 71 |
|
| 72 |
app_logger.info(f"unpacking elaborated {request_input}...")
|
| 73 |
return {
|
|
|
|
| 94 |
app_logger.error(f"e_constants1:{e_constants1}.")
|
| 95 |
body = event
|
| 96 |
|
| 97 |
+
app_logger.debug(f"body, #1: {type(body)}, {body}...")
|
| 98 |
|
| 99 |
if isinstance(body, str):
|
| 100 |
body_decoded_str = base64_decode(body)
|
| 101 |
+
app_logger.debug(f"body_decoded_str: {type(body_decoded_str)}, {body_decoded_str}...")
|
| 102 |
body = json.loads(body_decoded_str)
|
| 103 |
|
| 104 |
app_logger.info(f"body, #2: {type(body)}, {body}...")
|
| 105 |
|
| 106 |
try:
|
| 107 |
prompt_latlng = body["prompt"]
|
| 108 |
+
app_logger.debug(f"prompt_latlng:{prompt_latlng}.")
|
| 109 |
body_request = get_parsed_bbox_points(body)
|
| 110 |
app_logger.info(f"body_request=> {type(body_request)}, {body_request}.")
|
| 111 |
body_response = samexporter_predict(body_request["bbox"], body_request["prompt"], body_request["zoom"])
|
src/io/coordinates_pixel_conversion.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
import math
|
| 2 |
-
from typing import TypedDict
|
| 3 |
|
| 4 |
from src import app_logger
|
| 5 |
from src.utilities.constants import TILE_SIZE
|
|
|
|
| 1 |
import math
|
| 2 |
+
from typing import TypedDict
|
| 3 |
|
| 4 |
from src import app_logger
|
| 5 |
from src.utilities.constants import TILE_SIZE
|
src/io/geo_helpers.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Async download raster tiles"""
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
from typing import List
|
| 4 |
+
|
| 5 |
+
import numpy as np
|
| 6 |
+
|
| 7 |
+
from src import app_logger, PROJECT_ROOT_FOLDER
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def load_affine_transformation_from_matrix(matrix_source_coeffs: List):
|
| 11 |
+
from affine import Affine
|
| 12 |
+
|
| 13 |
+
if len(matrix_source_coeffs) != 6:
|
| 14 |
+
raise ValueError(f"Expected 6 coefficients, found {len(matrix_source_coeffs)}; "
|
| 15 |
+
f"argument type: {type(matrix_source_coeffs)}.")
|
| 16 |
+
|
| 17 |
+
try:
|
| 18 |
+
a, d, b, e, c, f = (float(x) for x in matrix_source_coeffs)
|
| 19 |
+
center = tuple.__new__(Affine, [a, b, c, d, e, f, 0.0, 0.0, 1.0])
|
| 20 |
+
return center * Affine.translation(-0.5, -0.5)
|
| 21 |
+
except Exception as e:
|
| 22 |
+
app_logger.error(f"exception:{e}, check https://github.com/rasterio/affine project for updates")
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def get_vectorized_raster_as_geojson(rio_output, mask):
|
| 26 |
+
try:
|
| 27 |
+
from rasterio import open as rio_open
|
| 28 |
+
from rasterio.features import shapes
|
| 29 |
+
from geopandas import GeoDataFrame
|
| 30 |
+
|
| 31 |
+
app_logger.info(f"read downloaded geotiff:{rio_output} to create the shapes_generator...")
|
| 32 |
+
|
| 33 |
+
with rio_open(rio_output, "r", driver="GTiff") as rio_src:
|
| 34 |
+
raster = rio_src.read()
|
| 35 |
+
transform = rio_src.transform
|
| 36 |
+
crs = rio_src.crs
|
| 37 |
+
|
| 38 |
+
app_logger.debug(f"geotiff band:{raster.shape}, type: {type(raster)}, dtype: {raster.dtype}.")
|
| 39 |
+
app_logger.debug(f"rio_src crs:{crs}.")
|
| 40 |
+
app_logger.debug(f"rio_src transform:{transform}.")
|
| 41 |
+
|
| 42 |
+
# mask = band != 0
|
| 43 |
+
shapes_generator = ({
|
| 44 |
+
'properties': {'raster_val': v}, 'geometry': s}
|
| 45 |
+
for i, (s, v)
|
| 46 |
+
# in enumerate(shapes(mask, mask=(band != 0), transform=rio_src.transform))
|
| 47 |
+
# use mask=None to avoid using source
|
| 48 |
+
in enumerate(shapes(mask, mask=None, transform=transform))
|
| 49 |
+
)
|
| 50 |
+
app_logger.info(f"created shapes_generator, transform it to a polygon list...")
|
| 51 |
+
shapes_list = list(shapes_generator)
|
| 52 |
+
app_logger.info(f"created {len(shapes_list)} polygons.")
|
| 53 |
+
gpd_polygonized_raster = GeoDataFrame.from_features(shapes_list, crs="EPSG:3857")
|
| 54 |
+
app_logger.info(f"created a GeoDataFrame, export to geojson...")
|
| 55 |
+
geojson = gpd_polygonized_raster.to_json(to_wgs84=True)
|
| 56 |
+
app_logger.info(f"created geojson, preparing API response...")
|
| 57 |
+
return {
|
| 58 |
+
"geojson": geojson,
|
| 59 |
+
"n_shapes_geojson": len(shapes_list)
|
| 60 |
+
}
|
| 61 |
+
except Exception as e_shape_band:
|
| 62 |
+
app_logger.error(f"e_shape_band:{e_shape_band}.")
|
| 63 |
+
raise e_shape_band
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
if __name__ == '__main__':
|
| 67 |
+
npy_file = "prediction_masks_46.27697017893455_9.616470336914064_46.11441972281433_9.264907836914064.npy"
|
| 68 |
+
prediction_masks = np.load(Path(PROJECT_ROOT_FOLDER) / "tmp" / "try_by_steps" / "t0" / npy_file)
|
| 69 |
+
|
| 70 |
+
print("#")
|
src/io/helpers.py
DELETED
|
@@ -1,618 +0,0 @@
|
|
| 1 |
-
"""Helpers dedicated to georeferencing duties"""
|
| 2 |
-
import base64
|
| 3 |
-
import glob
|
| 4 |
-
import json
|
| 5 |
-
import os
|
| 6 |
-
import zlib
|
| 7 |
-
from math import log, tan, radians, cos, pi, floor, degrees, atan, sinh
|
| 8 |
-
|
| 9 |
-
import rasterio
|
| 10 |
-
|
| 11 |
-
from src import app_logger
|
| 12 |
-
from src.utilities.constants import GEOJSON_SQUARE_TEMPLATE, OUTPUT_CRS_STRING, INPUT_CRS_STRING, SKIP_CONDITIONS_LIST
|
| 13 |
-
from src.utilities.type_hints import ts_llist_float2, ts_geojson, ts_dict_str2b, ts_tuple_flat2, ts_tuple_flat4, \
|
| 14 |
-
ts_list_float4, ts_llist2, ts_tuple_int4, ts_ddict2
|
| 15 |
-
|
| 16 |
-
ZIPJSON_KEY = 'base64(zip(o))'
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
def get_geojson_square_angles(bounding_box:ts_llist_float2, name:str="buffer", debug:bool=False) -> ts_geojson:
|
| 20 |
-
"""
|
| 21 |
-
Create a geojson-like dict rectangle from the input latitude/longitude bounding box
|
| 22 |
-
|
| 23 |
-
Args:
|
| 24 |
-
bounding_box: float latitude/longitude bounding box
|
| 25 |
-
name: geojson-like rectangle name
|
| 26 |
-
debug: bool, default=False
|
| 27 |
-
logging debug argument
|
| 28 |
-
|
| 29 |
-
Returns:
|
| 30 |
-
dict: geojson-like object rectangle
|
| 31 |
-
|
| 32 |
-
"""
|
| 33 |
-
import copy
|
| 34 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 35 |
-
|
| 36 |
-
#app_logger = setup_logging(debug)
|
| 37 |
-
app_logger.info(f"bounding_box:{bounding_box}.")
|
| 38 |
-
top = bounding_box[0][0]
|
| 39 |
-
right = bounding_box[0][1]
|
| 40 |
-
bottom = bounding_box[1][0]
|
| 41 |
-
left = bounding_box[1][1]
|
| 42 |
-
bottom_left = [left, bottom]
|
| 43 |
-
top_left = [left, top]
|
| 44 |
-
top_right = [right, top]
|
| 45 |
-
bottom_right = [right, bottom]
|
| 46 |
-
coords = [bottom_left, top_left, top_right, bottom_right]
|
| 47 |
-
app_logger.info(f"coords:{coords}.")
|
| 48 |
-
geojson = copy.copy(GEOJSON_SQUARE_TEMPLATE)
|
| 49 |
-
geojson["name"] = name
|
| 50 |
-
geojson["features"][0]["geometry"]["coordinates"] = [[coords]]
|
| 51 |
-
app_logger.info(f"geojson:{geojson}.")
|
| 52 |
-
return geojson
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
def crop_raster(merged_raster_path:str, area_crop_geojson:dict, debug:bool=False) -> ts_dict_str2b:
|
| 56 |
-
"""
|
| 57 |
-
Crop a raster using a geojson-like object rectangle
|
| 58 |
-
|
| 59 |
-
Args:
|
| 60 |
-
merged_raster_path: filename path pointing string to the raster to crop
|
| 61 |
-
area_crop_geojson: geojson-like object rectangle
|
| 62 |
-
debug: bool, default=False
|
| 63 |
-
logging debug argument
|
| 64 |
-
|
| 65 |
-
Returns:
|
| 66 |
-
dict: the cropped raster numpy array and the transform object with the georeferencing reference
|
| 67 |
-
|
| 68 |
-
"""
|
| 69 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 70 |
-
|
| 71 |
-
#app_logger = setup_logging(debug)
|
| 72 |
-
try:
|
| 73 |
-
import rasterio
|
| 74 |
-
from rasterio.mask import mask
|
| 75 |
-
|
| 76 |
-
app_logger.info(f"area_crop_geojson::{area_crop_geojson}.")
|
| 77 |
-
geojson_reprojected = get_geojson_reprojected(area_crop_geojson, debug=debug)
|
| 78 |
-
shapes = [feature["geometry"] for feature in geojson_reprojected["features"]]
|
| 79 |
-
app_logger.info(f"geojson_reprojected:{geojson_reprojected}.")
|
| 80 |
-
|
| 81 |
-
app_logger.info(f"reading merged_raster_path while masking it from path:{merged_raster_path}.")
|
| 82 |
-
with rasterio.open(merged_raster_path, "r") as src:
|
| 83 |
-
masked_raster, masked_transform = mask(src, shapes, crop=True)
|
| 84 |
-
masked_meta = src.meta
|
| 85 |
-
app_logger.info(f"merged_raster_path, src:{src}.")
|
| 86 |
-
masked_meta.update({
|
| 87 |
-
"driver": "GTiff", "height": masked_raster.shape[1],
|
| 88 |
-
"width": masked_raster.shape[2], "transform": masked_transform}
|
| 89 |
-
)
|
| 90 |
-
return {"masked_raster": masked_raster, "masked_meta": masked_meta, "masked_transform": masked_transform}
|
| 91 |
-
except Exception as e:
|
| 92 |
-
app_logger.error(e)
|
| 93 |
-
raise e
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
def get_geojson_reprojected(geojson:dict, output_crs:str=OUTPUT_CRS_STRING, debug:bool=False) -> dict:
|
| 97 |
-
"""
|
| 98 |
-
change projection for input geojson-like object polygon
|
| 99 |
-
|
| 100 |
-
Args:
|
| 101 |
-
geojson: input geojson-like object polygon
|
| 102 |
-
output_crs: output crs string - Coordinate Reference Systems
|
| 103 |
-
debug: logging debug argument
|
| 104 |
-
|
| 105 |
-
Returns:
|
| 106 |
-
dict: reprojected geojson-like object
|
| 107 |
-
|
| 108 |
-
"""
|
| 109 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 110 |
-
|
| 111 |
-
#app_logger = setup_logging(debug)
|
| 112 |
-
if not isinstance(geojson, dict):
|
| 113 |
-
raise ValueError(f"geojson here should be a dict, not of type {type(geojson)}.")
|
| 114 |
-
app_logger.info(f"start reprojecting geojson:{geojson}.")
|
| 115 |
-
try:
|
| 116 |
-
features = geojson['features']
|
| 117 |
-
|
| 118 |
-
output_crs_json = {"type": "name", "properties": {"name": f"urn:ogc:def:crs:{output_crs}"}}
|
| 119 |
-
geojson_output = {'features': [], 'type': 'FeatureCollection', "name": "converted", "crs": output_crs_json}
|
| 120 |
-
|
| 121 |
-
# Iterate through each feature of the feature collection
|
| 122 |
-
for feature in features:
|
| 123 |
-
feature_out = feature.copy()
|
| 124 |
-
new_coords = []
|
| 125 |
-
feat = feature['geometry']
|
| 126 |
-
app_logger.info(f"feat:{feat}.")
|
| 127 |
-
coords = feat['coordinates']
|
| 128 |
-
app_logger.info(f"coordinates:{coords}.")
|
| 129 |
-
# iterate over "coordinates" lists with 3 nested loops, practically with only one element but last loop
|
| 130 |
-
for coord_a in coords:
|
| 131 |
-
new_coords_a = []
|
| 132 |
-
for cord_b in coord_a:
|
| 133 |
-
new_coords_b = []
|
| 134 |
-
# Project/transform coordinate pairs of each ring
|
| 135 |
-
# (iteration required in case geometry type is MultiPolygon, or there are holes)
|
| 136 |
-
for xconv, yconf in cord_b:
|
| 137 |
-
app_logger.info(f"xconv, yconf:{xconv},{yconf}.")
|
| 138 |
-
x2, y2 = latlon_to_mercator(xconv, yconf)
|
| 139 |
-
app_logger.info(f"x2, y2:{x2},{y2}.")
|
| 140 |
-
new_coords_b.append([x2, y2])
|
| 141 |
-
new_coords_a.append(new_coords_b)
|
| 142 |
-
new_coords.append(new_coords_a)
|
| 143 |
-
feature_out['geometry']['coordinates'] = new_coords
|
| 144 |
-
geojson_output['features'].append(feature_out)
|
| 145 |
-
app_logger.info(f"geojson_output:{geojson_output}.")
|
| 146 |
-
return geojson_output
|
| 147 |
-
except KeyError as ke_get_geojson_reprojected:
|
| 148 |
-
msg = f"ke_get_geojson_reprojected:{ke_get_geojson_reprojected}."
|
| 149 |
-
app_logger.error(msg)
|
| 150 |
-
raise KeyError(msg)
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
def latlon_to_mercator(
|
| 154 |
-
lat:float, lon:float, input_crs:str=INPUT_CRS_STRING, output_crs:str=OUTPUT_CRS_STRING, always_xy:bool=True, debug:bool=False
|
| 155 |
-
) -> ts_tuple_flat2:
|
| 156 |
-
"""
|
| 157 |
-
Return a tuple of latitude, longitude float coordinates values transformed to mercator
|
| 158 |
-
|
| 159 |
-
Args:
|
| 160 |
-
|
| 161 |
-
lat: input latitude float value
|
| 162 |
-
lon: input longitude float value
|
| 163 |
-
input_crs: string, input Coordinate Reference Systems
|
| 164 |
-
output_crs: string, output Coordinate Reference Systems
|
| 165 |
-
always_xy: bool, default=True.
|
| 166 |
-
If true, the transform method will accept as input and return as output
|
| 167 |
-
coordinates using the traditional GIS order, that is longitude, latitude
|
| 168 |
-
for geographic CRS and easting, northing for most projected CRS.
|
| 169 |
-
debug: bool, default=False.
|
| 170 |
-
logging debug argument
|
| 171 |
-
|
| 172 |
-
Returns:
|
| 173 |
-
tuple latitude/longitude float values
|
| 174 |
-
|
| 175 |
-
"""
|
| 176 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 177 |
-
#app_logger = setup_logging(debug)
|
| 178 |
-
try:
|
| 179 |
-
from pyproj import Transformer
|
| 180 |
-
app_logger.info(f"lat:{lat},lon:{lon}.")
|
| 181 |
-
transformer = Transformer.from_crs(input_crs, output_crs, always_xy=always_xy)
|
| 182 |
-
out_lat, out_lon = transformer.transform(lat, lon)
|
| 183 |
-
app_logger.info(f"out_lat:{out_lat},out_lon:{out_lon}.")
|
| 184 |
-
return out_lat, out_lon
|
| 185 |
-
except Exception as e_latlon_to_mercator:
|
| 186 |
-
app_logger.error(f"e_latlon_to_mercator:{e_latlon_to_mercator}.")
|
| 187 |
-
raise e_latlon_to_mercator
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
def sec(x:float) -> float:
|
| 191 |
-
"""
|
| 192 |
-
Return secant (the reciprocal of the cosine) for given value
|
| 193 |
-
|
| 194 |
-
Args:
|
| 195 |
-
x: input float value
|
| 196 |
-
|
| 197 |
-
Returns:
|
| 198 |
-
float: secant of given float value
|
| 199 |
-
|
| 200 |
-
"""
|
| 201 |
-
return 1 / cos(x)
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
def latlon_to_xyz(lat:float, lon:float, z:int) -> ts_tuple_flat2:
|
| 205 |
-
"""
|
| 206 |
-
Return x/y coordinates points for tiles from latitude/longitude values point.
|
| 207 |
-
|
| 208 |
-
Args:
|
| 209 |
-
lon: float longitude value
|
| 210 |
-
lat: float latitude value
|
| 211 |
-
z: float zoom value
|
| 212 |
-
|
| 213 |
-
Returns:
|
| 214 |
-
tuple: x, y values tiles coordinates
|
| 215 |
-
|
| 216 |
-
"""
|
| 217 |
-
tile_count = pow(2, z)
|
| 218 |
-
x = (lon + 180) / 360
|
| 219 |
-
y = (1 - log(tan(radians(lat)) + sec(radians(lat))) / pi) / 2
|
| 220 |
-
return tile_count * x, tile_count * y
|
| 221 |
-
|
| 222 |
-
|
| 223 |
-
def bbox_to_xyz(lon_min:float, lon_max:float, lat_min:float, lat_max:float, z:int) -> ts_tuple_flat4:
|
| 224 |
-
"""
|
| 225 |
-
Return xyz reference coordinates for tiles from latitude/longitude min and max values.
|
| 226 |
-
|
| 227 |
-
Args:
|
| 228 |
-
lon_min: float min longitude value
|
| 229 |
-
lon_max: float max longitude value
|
| 230 |
-
lat_min: float min latitude value
|
| 231 |
-
lat_max: float max latitude value
|
| 232 |
-
z: float zoom value
|
| 233 |
-
|
| 234 |
-
Returns:
|
| 235 |
-
tuple: float x min, x max, y min, y max values tiles coordinates
|
| 236 |
-
|
| 237 |
-
"""
|
| 238 |
-
x_min, y_max = latlon_to_xyz(lat_min, lon_min, z)
|
| 239 |
-
x_max, y_min = latlon_to_xyz(lat_max, lon_max, z)
|
| 240 |
-
return (floor(x_min), floor(x_max),
|
| 241 |
-
floor(y_min), floor(y_max))
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
def mercator_to_lat(mercator_y:float) -> float:
|
| 245 |
-
"""
|
| 246 |
-
Return latitude value coordinate from mercator coordinate value
|
| 247 |
-
|
| 248 |
-
Args:
|
| 249 |
-
mercator_y: float mercator value coordinate
|
| 250 |
-
|
| 251 |
-
Returns:
|
| 252 |
-
float: latitude value coordinate
|
| 253 |
-
|
| 254 |
-
"""
|
| 255 |
-
return degrees(atan(sinh(mercator_y)))
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
def y_to_lat_edges(y:float, z:int) -> ts_tuple_flat2:
|
| 259 |
-
"""
|
| 260 |
-
Return edge float latitude values coordinates from x,z tiles coordinates
|
| 261 |
-
|
| 262 |
-
Args:
|
| 263 |
-
y: float x tile value coordinate
|
| 264 |
-
z: float zoom tile value coordinate
|
| 265 |
-
|
| 266 |
-
Returns:
|
| 267 |
-
tuple: two float latitude values coordinates
|
| 268 |
-
|
| 269 |
-
"""
|
| 270 |
-
tile_count = pow(2, z)
|
| 271 |
-
unit = 1 / tile_count
|
| 272 |
-
relative_y1 = y * unit
|
| 273 |
-
relative_y2 = relative_y1 + unit
|
| 274 |
-
lat1 = mercator_to_lat(pi * (1 - 2 * relative_y1))
|
| 275 |
-
lat2 = mercator_to_lat(pi * (1 - 2 * relative_y2))
|
| 276 |
-
return lat1, lat2
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
def x_to_lon_edges(x:float, z:int) -> ts_tuple_flat2:
|
| 280 |
-
"""
|
| 281 |
-
Return edge float longitude values coordinates from x,z tiles coordinates
|
| 282 |
-
|
| 283 |
-
Args:
|
| 284 |
-
x: float x tile value coordinate
|
| 285 |
-
z: float zoom tile value coordinate
|
| 286 |
-
|
| 287 |
-
Returns:
|
| 288 |
-
tuple: two float longitude values coordinates
|
| 289 |
-
|
| 290 |
-
"""
|
| 291 |
-
tile_count = pow(2, z)
|
| 292 |
-
unit = 360 / tile_count
|
| 293 |
-
lon1 = -180 + x * unit
|
| 294 |
-
lon2 = lon1 + unit
|
| 295 |
-
return lon1, lon2
|
| 296 |
-
|
| 297 |
-
|
| 298 |
-
def tile_edges(x:float, y:float, z:int) -> ts_list_float4:
|
| 299 |
-
"""
|
| 300 |
-
Return edge float latitude/longitude value coordinates from xyz tiles coordinates
|
| 301 |
-
|
| 302 |
-
Args:
|
| 303 |
-
x: float x tile value coordinate
|
| 304 |
-
y: float y tile value coordinate
|
| 305 |
-
z: float zoom tile value coordinate
|
| 306 |
-
|
| 307 |
-
Returns:
|
| 308 |
-
tuple: float latitude/longitude values coordinates
|
| 309 |
-
|
| 310 |
-
"""
|
| 311 |
-
lat1, lat2 = y_to_lat_edges(y, z)
|
| 312 |
-
lon1, lon2 = x_to_lon_edges(x, z)
|
| 313 |
-
return [lon1, lat1, lon2, lat2]
|
| 314 |
-
|
| 315 |
-
|
| 316 |
-
def merge_tiles(input_pattern:str, output_path:str, temp_dir:str, debug:bool=False) -> None:
|
| 317 |
-
"""
|
| 318 |
-
Merge given raster glob input pattern into one unique georeferenced raster.
|
| 319 |
-
|
| 320 |
-
Args:
|
| 321 |
-
input_pattern: input glob pattern needed for search the raster filenames
|
| 322 |
-
output_path: output path where to write the merged raster
|
| 323 |
-
temp_dir: temporary folder needed for create
|
| 324 |
-
debug: bool, default=False.
|
| 325 |
-
logging debug argument
|
| 326 |
-
|
| 327 |
-
"""
|
| 328 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 329 |
-
|
| 330 |
-
#app_logger = setup_logging(debug)
|
| 331 |
-
try:
|
| 332 |
-
from osgeo import gdal
|
| 333 |
-
except ModuleNotFoundError as module_error_merge_tiles:
|
| 334 |
-
msg = f"module_error_merge_tiles:{module_error_merge_tiles}."
|
| 335 |
-
app_logger.error(msg)
|
| 336 |
-
raise module_error_merge_tiles
|
| 337 |
-
|
| 338 |
-
try:
|
| 339 |
-
vrt_path = os.path.join(temp_dir, "tiles.vrt")
|
| 340 |
-
os_list_dir1 = os.listdir(temp_dir)
|
| 341 |
-
app_logger.info(f"os_list_dir1:{os_list_dir1}.")
|
| 342 |
-
|
| 343 |
-
gdal.BuildVRT(vrt_path, glob.glob(input_pattern))
|
| 344 |
-
gdal.Translate(output_path, vrt_path)
|
| 345 |
-
|
| 346 |
-
os_list_dir2 = os.listdir(temp_dir)
|
| 347 |
-
app_logger.info(f"os_list_dir2:{os_list_dir2}.")
|
| 348 |
-
except IOError as ioe_merge_tiles:
|
| 349 |
-
msg = f"ioe_merge_tiles:{ioe_merge_tiles}."
|
| 350 |
-
app_logger.error(msg)
|
| 351 |
-
raise ioe_merge_tiles
|
| 352 |
-
|
| 353 |
-
|
| 354 |
-
def get_lat_lon_coords(bounding_box: ts_llist2) -> ts_tuple_int4:
|
| 355 |
-
"""
|
| 356 |
-
Return couples of float latitude/longitude values from bounding box input list.
|
| 357 |
-
|
| 358 |
-
Args:
|
| 359 |
-
bounding_box: bounding box input list of latitude/longitude coordinates
|
| 360 |
-
|
| 361 |
-
Returns:
|
| 362 |
-
tuple: float longitude min, latitude min, longitude max, longitude max values coordinates
|
| 363 |
-
|
| 364 |
-
"""
|
| 365 |
-
top_right, bottom_left = bounding_box
|
| 366 |
-
lat_max, lon_max = top_right
|
| 367 |
-
lat_min, lon_min = bottom_left
|
| 368 |
-
if lon_min == lon_max or lat_min == lat_max:
|
| 369 |
-
raise ValueError(f"latitude and/or longitude coordinates should not be equal each others... {bounding_box}.")
|
| 370 |
-
return lon_min, lat_min, lon_max, lat_max
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
def get_prediction_georeferenced(prediction_obj:dict, transform:rasterio.transform, skip_conditions_list:list=None, debug:bool=False) -> dict:
|
| 374 |
-
"""
|
| 375 |
-
Return a georeferenced geojson-like object starting from a dict containing "predictions" -> "points" list.
|
| 376 |
-
Apply the affine transform matrix of georeferenced raster submitted to the machine learning model.
|
| 377 |
-
|
| 378 |
-
Args:
|
| 379 |
-
prediction_obj: input dict
|
| 380 |
-
transform: 'rasterio.transform' or dict list, affine tranform matrix
|
| 381 |
-
skip_conditions_list: dict list, skip condition list
|
| 382 |
-
debug: bool, default=False.
|
| 383 |
-
logging debug argument
|
| 384 |
-
|
| 385 |
-
Returns:
|
| 386 |
-
dict
|
| 387 |
-
|
| 388 |
-
"""
|
| 389 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 390 |
-
|
| 391 |
-
if skip_conditions_list is None:
|
| 392 |
-
skip_conditions_list = SKIP_CONDITIONS_LIST
|
| 393 |
-
|
| 394 |
-
#app_logger = setup_logging(debug)
|
| 395 |
-
app_logger.info(f"prediction_obj::{prediction_obj}, transform::{transform}.")
|
| 396 |
-
crs = {"type": "name", "properties": {"name": "urn:ogc:def:crs:EPSG::3857"}}
|
| 397 |
-
geojson_obj = {'features': [], 'type': 'FeatureCollection', "name": "geojson_name", "crs": crs}
|
| 398 |
-
for n, prediction in enumerate(prediction_obj["predictions"]):
|
| 399 |
-
points_dict_ = prediction["points"]
|
| 400 |
-
points_list = [[p["x"], p["y"]] for p in points_dict_]
|
| 401 |
-
app_logger.info(f"points_list::{points_list}.")
|
| 402 |
-
# if check_skip_conditions(prediction, skip_conditions_list, debug=debug):
|
| 403 |
-
# continue
|
| 404 |
-
feature = populate_features_geojson(n, points_list, confidence=prediction["confidence"], geomorphic_class=prediction["class"])
|
| 405 |
-
app_logger.info(f"geojson::feature:{feature}.")
|
| 406 |
-
feature["geometry"] = apply_transform(feature["geometry"], transform, debug=debug)
|
| 407 |
-
geojson_obj["features"].append(feature)
|
| 408 |
-
app_logger.info(f"geojson::post_update:{geojson_obj}.")
|
| 409 |
-
return geojson_obj
|
| 410 |
-
|
| 411 |
-
|
| 412 |
-
def populate_features_geojson(idx: int, coordinates_list: list, **kwargs) -> ts_ddict2:
|
| 413 |
-
"""
|
| 414 |
-
Return a list of coordinate points in a geojson-like feature-like object.
|
| 415 |
-
|
| 416 |
-
Args:
|
| 417 |
-
idx: int, feature index
|
| 418 |
-
coordinates_list: dict list, coordinate points
|
| 419 |
-
**kwargs: optional arguments to merge within the geojson properties feature
|
| 420 |
-
|
| 421 |
-
Returns:
|
| 422 |
-
dict
|
| 423 |
-
|
| 424 |
-
"""
|
| 425 |
-
return {
|
| 426 |
-
"type": "Feature",
|
| 427 |
-
"properties": {"id": idx, **kwargs},
|
| 428 |
-
"geometry": {
|
| 429 |
-
"type": "MultiPolygon",
|
| 430 |
-
"coordinates": [[coordinates_list]],
|
| 431 |
-
}
|
| 432 |
-
}
|
| 433 |
-
|
| 434 |
-
|
| 435 |
-
def check_skip_conditions(prediction:dict, skip_conditions_list:list, debug:bool=False) -> bool:
|
| 436 |
-
"""
|
| 437 |
-
Loop over elements within skip_condition_list and return a boolean if no condition to skip (or exceptions).
|
| 438 |
-
|
| 439 |
-
Args:
|
| 440 |
-
prediction: input dict to check
|
| 441 |
-
skip_conditions_list: dict list with conditions to evaluate
|
| 442 |
-
debug: bool, default=False
|
| 443 |
-
logging debug argument
|
| 444 |
-
|
| 445 |
-
Returns:
|
| 446 |
-
bool
|
| 447 |
-
|
| 448 |
-
"""
|
| 449 |
-
for obj in skip_conditions_list:
|
| 450 |
-
return skip_feature(prediction, obj["skip_key"], obj["skip_value"], obj["skip_condition"], debug=debug)
|
| 451 |
-
return False
|
| 452 |
-
|
| 453 |
-
|
| 454 |
-
def skip_feature(prediction:dict, skip_key:float, skip_value:str, skip_condition:str, debug:bool=False) -> bool:
|
| 455 |
-
"""
|
| 456 |
-
Return False if values from input dict shouldn't be skipped,
|
| 457 |
-
True in case of exceptions, empty skip_condition or when chosen condition meets skip_value and skip_condition.
|
| 458 |
-
|
| 459 |
-
E.g. confidence should be major than 0.8: if confidence is equal to 0.65 then return True (0.65 < 0.8) and skip!
|
| 460 |
-
|
| 461 |
-
Args:
|
| 462 |
-
prediction: input dict to check
|
| 463 |
-
skip_key: skip condition key string
|
| 464 |
-
skip_value: skip condition value string
|
| 465 |
-
skip_condition: string (major | minor | equal)
|
| 466 |
-
debug: bool, default=False
|
| 467 |
-
logging debug argument
|
| 468 |
-
|
| 469 |
-
Returns:
|
| 470 |
-
bool
|
| 471 |
-
|
| 472 |
-
"""
|
| 473 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 474 |
-
#app_logger = setup_logging(debug)
|
| 475 |
-
try:
|
| 476 |
-
v = prediction[skip_key]
|
| 477 |
-
match skip_condition:
|
| 478 |
-
case "major":
|
| 479 |
-
return v > skip_value
|
| 480 |
-
case "minor":
|
| 481 |
-
return v < skip_value
|
| 482 |
-
case "equal":
|
| 483 |
-
return v == skip_value
|
| 484 |
-
case "":
|
| 485 |
-
return False
|
| 486 |
-
except KeyError as ke_filter_feature:
|
| 487 |
-
app_logger.error(f"ke_filter_feature:{ke_filter_feature}.")
|
| 488 |
-
return False
|
| 489 |
-
except Exception as e_filter_feature:
|
| 490 |
-
app_logger.error(f"e_filter_feature:{e_filter_feature}.")
|
| 491 |
-
return False
|
| 492 |
-
|
| 493 |
-
|
| 494 |
-
def apply_transform(geometry:object, transform:list[object], debug:bool=False) -> dict:
|
| 495 |
-
"""
|
| 496 |
-
Returns a GeoJSON-like mapping from a transformed geometry using an affine transformation matrix.
|
| 497 |
-
|
| 498 |
-
The coefficient matrix is provided as a list or tuple with 6 items
|
| 499 |
-
for 2D transformations. The 6 parameter matrix is::
|
| 500 |
-
|
| 501 |
-
[a, b, d, e, xoff, yoff]
|
| 502 |
-
|
| 503 |
-
which represents the augmented matrix::
|
| 504 |
-
|
| 505 |
-
[x'] / a b xoff \ [x]
|
| 506 |
-
[y'] = | d e yoff | [y]
|
| 507 |
-
[1 ] \ 0 0 1 / [1]
|
| 508 |
-
|
| 509 |
-
or the equations for the transformed coordinates::
|
| 510 |
-
|
| 511 |
-
x' = a * x + b * y + xoff
|
| 512 |
-
y' = d * x + e * y + yoff
|
| 513 |
-
|
| 514 |
-
Args:
|
| 515 |
-
geometry: geometry value from a geojson dict
|
| 516 |
-
transform: list of float values (affine transformation matrix)
|
| 517 |
-
debug: bool, default=False
|
| 518 |
-
logging debug argument
|
| 519 |
-
|
| 520 |
-
Returns:
|
| 521 |
-
dict
|
| 522 |
-
|
| 523 |
-
"""
|
| 524 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 525 |
-
|
| 526 |
-
#app_logger = setup_logging(debug)
|
| 527 |
-
|
| 528 |
-
try:
|
| 529 |
-
from shapely.affinity import affine_transform
|
| 530 |
-
from shapely.geometry import mapping, shape
|
| 531 |
-
try:
|
| 532 |
-
geometry_transformed = affine_transform(shape(geometry), [transform.a, transform.b, transform.d, transform.e, transform.xoff, transform.yoff])
|
| 533 |
-
except AttributeError as ae:
|
| 534 |
-
app_logger.warning(f"ae:{ae}.")
|
| 535 |
-
geometry_transformed = affine_transform(shape(geometry), [transform[0], transform[1], transform[2], transform[3], transform[4], transform[5]])
|
| 536 |
-
geometry_serialized = mapping(geometry_transformed)
|
| 537 |
-
app_logger.info(f"geometry_serialized:{geometry_serialized}.")
|
| 538 |
-
return geometry_serialized
|
| 539 |
-
except ImportError as ie_apply_transform:
|
| 540 |
-
app_logger.error(f"ie_apply_transform:{ie_apply_transform}.")
|
| 541 |
-
raise ie_apply_transform
|
| 542 |
-
except Exception as e_apply_transform:
|
| 543 |
-
app_logger.error(f"e_apply_transform:{e_apply_transform}.")
|
| 544 |
-
raise e_apply_transform
|
| 545 |
-
|
| 546 |
-
|
| 547 |
-
def get_perc(nan_count:int, total_count:int) -> str:
|
| 548 |
-
"""
|
| 549 |
-
Return a formatted string with a percentage value representing the ratio between NaN and total number elements within a numpy array
|
| 550 |
-
|
| 551 |
-
Args:
|
| 552 |
-
nan_count: NaN value elements
|
| 553 |
-
total_count: total count of elements
|
| 554 |
-
|
| 555 |
-
Returns:
|
| 556 |
-
str
|
| 557 |
-
|
| 558 |
-
"""
|
| 559 |
-
return f"{100*nan_count/total_count:.2f}"
|
| 560 |
-
|
| 561 |
-
|
| 562 |
-
def json_unzip(j:dict, debug:bool=False) -> str:
|
| 563 |
-
"""
|
| 564 |
-
Return uncompressed content from input dict using 'zlib' library
|
| 565 |
-
|
| 566 |
-
Args:
|
| 567 |
-
j: input dict to uncompress. key must be 'base64(zip(o))'
|
| 568 |
-
debug: logging debug argument
|
| 569 |
-
|
| 570 |
-
Returns:
|
| 571 |
-
dict: uncompressed dict
|
| 572 |
-
|
| 573 |
-
"""
|
| 574 |
-
from json import JSONDecodeError
|
| 575 |
-
from zlib import error as zlib_error
|
| 576 |
-
|
| 577 |
-
#from src.surferdtm_prediction_api.utilities.utilities import setup_logging
|
| 578 |
-
|
| 579 |
-
#app_logger = setup_logging(debug)
|
| 580 |
-
|
| 581 |
-
try:
|
| 582 |
-
j = zlib.decompress(base64.b64decode(j[ZIPJSON_KEY]))
|
| 583 |
-
except KeyError as ke:
|
| 584 |
-
ke_error_msg = f"Could not decode/unzip the content because of wrong/missing dict key:{ke}."
|
| 585 |
-
raise KeyError(ke_error_msg)
|
| 586 |
-
except zlib_error as zlib_error2:
|
| 587 |
-
zlib_error2_msg = f"Could not decode/unzip the content because of:{zlib_error2}."
|
| 588 |
-
app_logger.error(zlib_error2_msg)
|
| 589 |
-
raise RuntimeError(zlib_error2_msg)
|
| 590 |
-
|
| 591 |
-
try:
|
| 592 |
-
j = json.loads(j)
|
| 593 |
-
except JSONDecodeError as json_e1:
|
| 594 |
-
msg = f"Could interpret the unzipped content because of JSONDecodeError with msg:{json_e1.msg}, pos:{json_e1.pos}, broken json:'{json_e1.doc}'"
|
| 595 |
-
app_logger.error(msg)
|
| 596 |
-
raise RuntimeError(msg)
|
| 597 |
-
|
| 598 |
-
return j
|
| 599 |
-
|
| 600 |
-
|
| 601 |
-
def json_zip(j:dict) -> dict[str]:
|
| 602 |
-
"""
|
| 603 |
-
Return compressed content from input dict using 'zlib' library
|
| 604 |
-
|
| 605 |
-
Args:
|
| 606 |
-
j: input dict to compress
|
| 607 |
-
|
| 608 |
-
Returns:
|
| 609 |
-
dict: compressed dict
|
| 610 |
-
|
| 611 |
-
"""
|
| 612 |
-
return {
|
| 613 |
-
ZIPJSON_KEY: base64.b64encode(
|
| 614 |
-
zlib.compress(
|
| 615 |
-
json.dumps(j).encode('utf-8')
|
| 616 |
-
)
|
| 617 |
-
).decode('ascii')
|
| 618 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/io/tiles_to_tiff.py
DELETED
|
@@ -1,91 +0,0 @@
|
|
| 1 |
-
"""Async download raster tiles"""
|
| 2 |
-
from pathlib import Path
|
| 3 |
-
|
| 4 |
-
import numpy as np
|
| 5 |
-
|
| 6 |
-
from src import app_logger, PROJECT_ROOT_FOLDER
|
| 7 |
-
from src.io.tms2geotiff import download_extent
|
| 8 |
-
from src.utilities.constants import COMPLETE_URL_TILES, DEFAULT_TMS
|
| 9 |
-
from src.utilities.type_hints import ts_llist2
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
COOKIE_SESSION = {
|
| 13 |
-
"Accept": "*/*",
|
| 14 |
-
"Accept-Encoding": "gzip, deflate",
|
| 15 |
-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0",
|
| 16 |
-
}
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
def load_affine_transformation_from_matrix(matrix_source_coeffs):
|
| 20 |
-
from affine import Affine
|
| 21 |
-
|
| 22 |
-
if len(matrix_source_coeffs) != 6:
|
| 23 |
-
raise ValueError(f"Expected 6 coefficients, found {len(matrix_source_coeffs)};"
|
| 24 |
-
f"argument type: {type(matrix_source_coeffs)}.")
|
| 25 |
-
|
| 26 |
-
try:
|
| 27 |
-
a, d, b, e, c, f = (float(x) for x in matrix_source_coeffs)
|
| 28 |
-
center = tuple.__new__(Affine, [a, b, c, d, e, f, 0.0, 0.0, 1.0])
|
| 29 |
-
return center * Affine.translation(-0.5, -0.5)
|
| 30 |
-
except Exception as e:
|
| 31 |
-
app_logger.error(f"exception:{e}, check https://github.com/rasterio/affine project for updates")
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
# @timing_decorator
|
| 35 |
-
def convert(bounding_box: ts_llist2, zoom: int) -> tuple:
|
| 36 |
-
"""
|
| 37 |
-
Starting from a bounding box of two couples of latitude and longitude coordinate values, recognize a stratovolcano from an RGB image. The algorithm
|
| 38 |
-
create the image composing three channels as slope, DEM (Digital Elevation Model) and curvature. In more detail:
|
| 39 |
-
|
| 40 |
-
- download a series of terrain DEM (Digital Elevation Model) raster tiles enclosed within that bounding box
|
| 41 |
-
- merge all the downloaded rasters
|
| 42 |
-
- crop the merged raster
|
| 43 |
-
- process the cropped raster to extract slope and curvature (1st and 2nd degree derivative)
|
| 44 |
-
- produce three raster channels (DEM, slope and curvature rasters) to produce an RGB raster image
|
| 45 |
-
- submit the RGB image to a remote machine learning service to try to recognize a polygon representing a stratovolcano
|
| 46 |
-
- the output of the machine learning service is a json, so we need to georeferencing it
|
| 47 |
-
- finally we return a dict as response containing
|
| 48 |
-
- uploaded_file_name
|
| 49 |
-
- bucket_name
|
| 50 |
-
- prediction georeferenced geojson-like dict
|
| 51 |
-
|
| 52 |
-
Args:
|
| 53 |
-
bounding_box: float latitude/longitude bounding box
|
| 54 |
-
zoom: integer zoom value
|
| 55 |
-
|
| 56 |
-
Returns:
|
| 57 |
-
dict: uploaded_file_name (str), bucket_name (str), prediction_georef (dict), n_total_obj_prediction (str)
|
| 58 |
-
|
| 59 |
-
"""
|
| 60 |
-
|
| 61 |
-
tile_source = COMPLETE_URL_TILES
|
| 62 |
-
app_logger.info(f"start_args: tile_source:{tile_source},bounding_box:{bounding_box},zoom:{zoom}.")
|
| 63 |
-
|
| 64 |
-
try:
|
| 65 |
-
import rasterio
|
| 66 |
-
|
| 67 |
-
app_logger.info(f'tile_source: {tile_source}!')
|
| 68 |
-
pt0, pt1 = bounding_box
|
| 69 |
-
app_logger.info("downloading...")
|
| 70 |
-
img, matrix = download_extent(DEFAULT_TMS, pt0[0], pt0[1], pt1[0], pt1[1], zoom)
|
| 71 |
-
|
| 72 |
-
app_logger.info(f'img: type {type(img)}, len_matrix:{len(matrix)}, matrix {matrix}.')
|
| 73 |
-
app_logger.info(f'img: size (shape if PIL) {img.size}.')
|
| 74 |
-
try:
|
| 75 |
-
np_img = np.array(img)
|
| 76 |
-
app_logger.info(f'img: shape (numpy) {np_img.shape}.')
|
| 77 |
-
except Exception as e_shape:
|
| 78 |
-
app_logger.info(f'e_shape {e_shape}.')
|
| 79 |
-
raise e_shape
|
| 80 |
-
|
| 81 |
-
return img, matrix
|
| 82 |
-
except ImportError as e_import_convert:
|
| 83 |
-
app_logger.error(f"e0:{e_import_convert}.")
|
| 84 |
-
raise e_import_convert
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
if __name__ == '__main__':
|
| 88 |
-
npy_file = "prediction_masks_46.27697017893455_9.616470336914064_46.11441972281433_9.264907836914064.npy"
|
| 89 |
-
prediction_masks = np.load(Path(PROJECT_ROOT_FOLDER) / "tmp" / "try_by_steps" / "t0" / npy_file)
|
| 90 |
-
|
| 91 |
-
print("#")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/prediction_api/predictors.py
CHANGED
|
@@ -1,59 +1,21 @@
|
|
| 1 |
# Press the green button in the gutter to run the script.
|
| 2 |
import tempfile
|
| 3 |
from pathlib import Path
|
| 4 |
-
from typing import List
|
| 5 |
|
| 6 |
import numpy as np
|
| 7 |
-
import rasterio
|
| 8 |
|
| 9 |
from src import app_logger, MODEL_FOLDER
|
| 10 |
-
from src.io.
|
| 11 |
-
from src.io.tms2geotiff import save_geotiff_gdal
|
| 12 |
from src.prediction_api.sam_onnx import SegmentAnythingONNX
|
| 13 |
-
from src.utilities.constants import MODEL_ENCODER_NAME, MODEL_DECODER_NAME
|
| 14 |
|
| 15 |
|
| 16 |
models_dict = {"fastsam": {"instance": None}}
|
| 17 |
|
| 18 |
|
| 19 |
-
def zip_arrays(arr1, arr2):
|
| 20 |
-
try:
|
| 21 |
-
arr1_list = arr1.tolist()
|
| 22 |
-
arr2_list = arr2.tolist()
|
| 23 |
-
# return {serialize(k): serialize(v) for k, v in zip(arr1_list, arr2_list)}
|
| 24 |
-
d = {}
|
| 25 |
-
for n1, n2 in zip(arr1_list, arr2_list):
|
| 26 |
-
app_logger.info(f"n1:{n1}, type {type(n1)}, n2:{n2}, type {type(n2)}.")
|
| 27 |
-
n1f = str(n1)
|
| 28 |
-
n2f = str(n2)
|
| 29 |
-
app_logger.info(f"n1:{n1}=>{n1f}, n2:{n2}=>{n2f}.")
|
| 30 |
-
d[n1f] = n2f
|
| 31 |
-
app_logger.info(f"zipped dict:{d}.")
|
| 32 |
-
return d
|
| 33 |
-
except Exception as e_zip_arrays:
|
| 34 |
-
app_logger.info(f"exception zip_arrays:{e_zip_arrays}.")
|
| 35 |
-
return {}
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
def load_affine_transformation_from_matrix(matrix_source_coeffs: List):
|
| 39 |
-
from affine import Affine
|
| 40 |
-
|
| 41 |
-
if len(matrix_source_coeffs) != 6:
|
| 42 |
-
raise ValueError(f"Expected 6 coefficients, found {len(matrix_source_coeffs)}; argument type: {type(matrix_source_coeffs)}.")
|
| 43 |
-
|
| 44 |
-
try:
|
| 45 |
-
a, d, b, e, c, f = (float(x) for x in matrix_source_coeffs)
|
| 46 |
-
center = tuple.__new__(Affine, [a, b, c, d, e, f, 0.0, 0.0, 1.0])
|
| 47 |
-
return center * Affine.translation(-0.5, -0.5)
|
| 48 |
-
except Exception as e:
|
| 49 |
-
app_logger.error(f"exception:{e}, check https://github.com/rasterio/affine project for updates")
|
| 50 |
-
|
| 51 |
-
|
| 52 |
def samexporter_predict(bbox, prompt: list[dict], zoom: float, model_name: str = "fastsam") -> dict:
|
| 53 |
try:
|
| 54 |
-
from rasterio.features import shapes
|
| 55 |
-
from geopandas import GeoDataFrame
|
| 56 |
-
|
| 57 |
if models_dict[model_name]["instance"] is None:
|
| 58 |
app_logger.info(f"missing instance model {model_name}, instantiating it now!")
|
| 59 |
model_instance = SegmentAnythingONNX(
|
|
@@ -65,71 +27,47 @@ def samexporter_predict(bbox, prompt: list[dict], zoom: float, model_name: str =
|
|
| 65 |
models_instance = models_dict[model_name]["instance"]
|
| 66 |
|
| 67 |
with tempfile.TemporaryDirectory() as input_tmp_dir:
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
)
|
| 72 |
-
app_logger.debug(f"
|
| 73 |
|
| 74 |
pt0, pt1 = bbox
|
| 75 |
rio_output = str(Path(input_tmp_dir) / f"downloaded_rio_{pt0[0]}_{pt0[1]}_{pt1[0]}_{pt1[1]}.tif")
|
| 76 |
-
app_logger.debug(f"saving downloaded geotiff
|
| 77 |
save_geotiff_gdal(img, rio_output, matrix)
|
| 78 |
-
app_logger.info(f"saved downloaded geotiff image to {rio_output}...")
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
# mask_band = band != 0
|
| 113 |
-
shapes_generator = ({
|
| 114 |
-
'properties': {'raster_val': v}, 'geometry': s}
|
| 115 |
-
for i, (s, v)
|
| 116 |
-
# in enumerate(shapes(mask, mask=(band != 0), transform=rio_src.transform))
|
| 117 |
-
# use mask=None to avoid using source
|
| 118 |
-
in enumerate(shapes(mask, mask=None, transform=rio_src.transform))
|
| 119 |
-
)
|
| 120 |
-
app_logger.info(f"created shapes_generator, transform it to a polygon list...")
|
| 121 |
-
shapes_list = list(shapes_generator)
|
| 122 |
-
app_logger.info(f"created {len(shapes_list)} polygons.")
|
| 123 |
-
gpd_polygonized_raster = GeoDataFrame.from_features(shapes_list, crs="EPSG:3857")
|
| 124 |
-
app_logger.info(f"created a GeoDataFrame, export to geojson...")
|
| 125 |
-
geojson = gpd_polygonized_raster.to_json(to_wgs84=True)
|
| 126 |
-
app_logger.info(f"created geojson...")
|
| 127 |
-
|
| 128 |
-
return {
|
| 129 |
-
"geojson": geojson,
|
| 130 |
-
"n_shapes_geojson": len(shapes_list),
|
| 131 |
-
"n_predictions": len(prediction_masks),
|
| 132 |
-
# "n_pixels_predictions": zip_arrays(mask_unique_values, mask_unique_values_count),
|
| 133 |
-
}
|
| 134 |
-
except ImportError as e:
|
| 135 |
-
app_logger.error(f"Error trying import module:{e}.")
|
|
|
|
| 1 |
# Press the green button in the gutter to run the script.
|
| 2 |
import tempfile
|
| 3 |
from pathlib import Path
|
|
|
|
| 4 |
|
| 5 |
import numpy as np
|
|
|
|
| 6 |
|
| 7 |
from src import app_logger, MODEL_FOLDER
|
| 8 |
+
from src.io.geo_helpers import get_vectorized_raster_as_geojson
|
| 9 |
+
from src.io.tms2geotiff import save_geotiff_gdal, download_extent
|
| 10 |
from src.prediction_api.sam_onnx import SegmentAnythingONNX
|
| 11 |
+
from src.utilities.constants import MODEL_ENCODER_NAME, MODEL_DECODER_NAME, DEFAULT_TMS
|
| 12 |
|
| 13 |
|
| 14 |
models_dict = {"fastsam": {"instance": None}}
|
| 15 |
|
| 16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
def samexporter_predict(bbox, prompt: list[dict], zoom: float, model_name: str = "fastsam") -> dict:
|
| 18 |
try:
|
|
|
|
|
|
|
|
|
|
| 19 |
if models_dict[model_name]["instance"] is None:
|
| 20 |
app_logger.info(f"missing instance model {model_name}, instantiating it now!")
|
| 21 |
model_instance = SegmentAnythingONNX(
|
|
|
|
| 27 |
models_instance = models_dict[model_name]["instance"]
|
| 28 |
|
| 29 |
with tempfile.TemporaryDirectory() as input_tmp_dir:
|
| 30 |
+
app_logger.info(f'tile_source: {DEFAULT_TMS}!')
|
| 31 |
+
pt0, pt1 = bbox
|
| 32 |
+
app_logger.info("downloading...")
|
| 33 |
+
img, matrix = download_extent(DEFAULT_TMS, pt0[0], pt0[1], pt1[0], pt1[1], zoom)
|
| 34 |
+
app_logger.debug(f"img type {type(img)} with shape/size:{img.size}, matrix:{matrix}.")
|
| 35 |
|
| 36 |
pt0, pt1 = bbox
|
| 37 |
rio_output = str(Path(input_tmp_dir) / f"downloaded_rio_{pt0[0]}_{pt0[1]}_{pt1[0]}_{pt1[1]}.tif")
|
| 38 |
+
app_logger.debug(f"saving downloaded image as geotiff using matrix {matrix} to {rio_output}...")
|
| 39 |
save_geotiff_gdal(img, rio_output, matrix)
|
| 40 |
+
app_logger.info(f"saved downloaded geotiff image to {rio_output}, preparing inference...")
|
| 41 |
+
|
| 42 |
+
mask, prediction_masks = get_raster_inference(img, prompt, models_instance, model_name)
|
| 43 |
+
n_predictions = len(prediction_masks)
|
| 44 |
+
app_logger.info(f"created {n_predictions} masks, preparing conversion to geojson...")
|
| 45 |
+
return {
|
| 46 |
+
"n_predictions": n_predictions,
|
| 47 |
+
**get_vectorized_raster_as_geojson(rio_output, mask)
|
| 48 |
+
}
|
| 49 |
+
except ImportError as e_import_module:
|
| 50 |
+
app_logger.error(f"Error trying import module:{e_import_module}.")
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def get_raster_inference(img, prompt, models_instance, model_name):
|
| 54 |
+
np_img = np.array(img)
|
| 55 |
+
app_logger.info(f"img type {type(np_img)}, prompt:{prompt}.")
|
| 56 |
+
app_logger.debug(f"onnxruntime input shape/size (shape if PIL) {np_img.size}.")
|
| 57 |
+
try:
|
| 58 |
+
app_logger.debug(f"onnxruntime input shape (NUMPY) {np_img.shape}.")
|
| 59 |
+
except Exception as e_shape:
|
| 60 |
+
app_logger.error(f"e_shape:{e_shape}.")
|
| 61 |
+
app_logger.info(f"instantiated model {model_name}, ENCODER {MODEL_ENCODER_NAME}, "
|
| 62 |
+
f"DECODER {MODEL_DECODER_NAME} from {MODEL_FOLDER}: Creating embedding...")
|
| 63 |
+
embedding = models_instance.encode(np_img)
|
| 64 |
+
app_logger.debug(f"embedding created, running predict_masks with prompt {prompt}...")
|
| 65 |
+
inference_out = models_instance.predict_masks(embedding, prompt)
|
| 66 |
+
app_logger.info(f"Created {len(inference_out)} prediction_masks,"
|
| 67 |
+
f"shape:{inference_out.shape}, dtype:{inference_out.dtype}.")
|
| 68 |
+
mask = np.zeros((inference_out.shape[2], inference_out.shape[3]), dtype=np.uint8)
|
| 69 |
+
for n, m in enumerate(inference_out[0, :, :, :]):
|
| 70 |
+
app_logger.debug(f"{n}th of prediction_masks shape {inference_out.shape}"
|
| 71 |
+
f" => mask shape:{mask.shape}, {mask.dtype}.")
|
| 72 |
+
mask[m > 0.0] = 255
|
| 73 |
+
return mask, inference_out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/utilities/constants.py
CHANGED
|
@@ -1,27 +1,7 @@
|
|
| 1 |
"""Project constants"""
|
| 2 |
-
CHANNEL_EXAGGERATIONS_LIST = [2.5, 1.1, 2.0]
|
| 3 |
INPUT_CRS_STRING = "EPSG:4326"
|
| 4 |
OUTPUT_CRS_STRING = "EPSG:3857"
|
| 5 |
-
# DOMAIN_URL_TILES = "elevation-tiles-prod-eu.s3.eu-central-1.amazonaws.com"
|
| 6 |
-
# RELATIVE_URL_TILES = "geotiff/{z}/{x}/{y}.tif"
|
| 7 |
-
# COMPLETE_URL_TILES = f"https://{DOMAIN_URL_TILES}/{RELATIVE_URL_TILES}"
|
| 8 |
ROOT = "/tmp"
|
| 9 |
-
NODATA_VALUES = -32768
|
| 10 |
-
MODEL_PROJECT_NAME = "surferdtm"
|
| 11 |
-
MODEL_VERSION = 4
|
| 12 |
-
SKIP_CONDITIONS_LIST = [{"skip_key": "confidence", "skip_value": 0.5, "skip_condition": "major"}]
|
| 13 |
-
FEATURE_SQUARE_TEMPLATE = [
|
| 14 |
-
{'type': 'Feature', 'properties': {'id': 1},
|
| 15 |
-
'geometry': {
|
| 16 |
-
'type': 'MultiPolygon',
|
| 17 |
-
'coordinates': [[]]
|
| 18 |
-
}}
|
| 19 |
-
]
|
| 20 |
-
GEOJSON_SQUARE_TEMPLATE = {
|
| 21 |
-
'type': 'FeatureCollection', 'name': 'etna_wgs84p',
|
| 22 |
-
'crs': {'type': 'name', 'properties': {'name': 'urn:ogc:def:crs:OGC:1.3:CRS84'}},
|
| 23 |
-
'features': FEATURE_SQUARE_TEMPLATE
|
| 24 |
-
}
|
| 25 |
CUSTOM_RESPONSE_MESSAGES = {
|
| 26 |
200: "ok",
|
| 27 |
422: "Missing required parameter",
|
|
@@ -29,8 +9,6 @@ CUSTOM_RESPONSE_MESSAGES = {
|
|
| 29 |
}
|
| 30 |
MODEL_ENCODER_NAME = "mobile_sam.encoder.onnx"
|
| 31 |
MODEL_DECODER_NAME = "sam_vit_h_4b8939.decoder.onnx"
|
| 32 |
-
ZOOM = 13
|
| 33 |
-
SOURCE_TYPE = "Satellite"
|
| 34 |
TILE_SIZE = 256
|
| 35 |
EARTH_EQUATORIAL_RADIUS = 6378137.0
|
| 36 |
DEFAULT_TMS = 'https://tile.openstreetmap.org/{z}/{x}/{y}.png'
|
|
@@ -38,4 +16,3 @@ WKT_3857 = 'PROJCS["WGS 84 / Pseudo-Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",S
|
|
| 38 |
WKT_3857 += 'AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Mercator_1SP"],PARAMETER["central_meridian",0],'
|
| 39 |
WKT_3857 += 'PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH],EXTENSION["PROJ4",'
|
| 40 |
WKT_3857 += '"+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs"],AUTHORITY["EPSG","3857"]]'
|
| 41 |
-
COMPLETE_URL_TILES = DEFAULT_TMS
|
|
|
|
| 1 |
"""Project constants"""
|
|
|
|
| 2 |
INPUT_CRS_STRING = "EPSG:4326"
|
| 3 |
OUTPUT_CRS_STRING = "EPSG:3857"
|
|
|
|
|
|
|
|
|
|
| 4 |
ROOT = "/tmp"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
CUSTOM_RESPONSE_MESSAGES = {
|
| 6 |
200: "ok",
|
| 7 |
422: "Missing required parameter",
|
|
|
|
| 9 |
}
|
| 10 |
MODEL_ENCODER_NAME = "mobile_sam.encoder.onnx"
|
| 11 |
MODEL_DECODER_NAME = "sam_vit_h_4b8939.decoder.onnx"
|
|
|
|
|
|
|
| 12 |
TILE_SIZE = 256
|
| 13 |
EARTH_EQUATORIAL_RADIUS = 6378137.0
|
| 14 |
DEFAULT_TMS = 'https://tile.openstreetmap.org/{z}/{x}/{y}.png'
|
|
|
|
| 16 |
WKT_3857 += 'AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Mercator_1SP"],PARAMETER["central_meridian",0],'
|
| 17 |
WKT_3857 += 'PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH],EXTENSION["PROJ4",'
|
| 18 |
WKT_3857 += '"+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs"],AUTHORITY["EPSG","3857"]]'
|
|
|
src/utilities/type_hints.py
CHANGED
|
@@ -1,24 +1,3 @@
|
|
| 1 |
"""custom type hints"""
|
| 2 |
-
from typing import List, Tuple
|
| 3 |
-
import numpy as np
|
| 4 |
-
|
| 5 |
-
ts_list_str1 = list[str]
|
| 6 |
-
ts_http2 = tuple[ts_list_str1, ts_list_str1]
|
| 7 |
-
ts_list_float2 = list[float, float]
|
| 8 |
-
ts_llist_float2 = list[ts_list_float2, ts_list_float2]
|
| 9 |
-
ts_geojson = dict[str, str, dict[str, dict[str]], list[str, dict[int], dict[str, list]]]
|
| 10 |
-
ts_float64_1 = tuple[np.float64, np.float64, np.float64, np.float64, np.float64, np.float64]
|
| 11 |
-
ts_float64_2 = tuple[np.float64, np.float64, np.float64, np.float64, np.float64, np.float64, np.float64]
|
| 12 |
ts_dict_str2 = dict[str, str]
|
| 13 |
ts_dict_str3 = dict[str, str, any]
|
| 14 |
-
ts_dict_str2b = dict[str, any]
|
| 15 |
-
ts_ddict1 = dict[str, dict[str, any], dict, dict, any]
|
| 16 |
-
ts_ddict2 = dict[str, dict, dict[str, list]]
|
| 17 |
-
ts_tuple_str2 = tuple[str, str]
|
| 18 |
-
ts_tuple_arr2 = tuple[np.ndarray, np.ndarray]
|
| 19 |
-
ts_tuple_flat2 = tuple[float, float]
|
| 20 |
-
ts_tuple_flat4 = tuple[float, float, float, float]
|
| 21 |
-
ts_list_float4 = list[float, float, float, float]
|
| 22 |
-
ts_tuple_int4 = tuple[int, int, int, int]
|
| 23 |
-
ts_llist2 = list[[int, int], [int, int]]
|
| 24 |
-
ts_ddict3 = dict[list[dict[float | int | str]], dict[float | int]]
|
|
|
|
| 1 |
"""custom type hints"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
ts_dict_str2 = dict[str, str]
|
| 3 |
ts_dict_str3 = dict[str, str, any]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/utilities/utilities.py
CHANGED
|
@@ -1,8 +1,4 @@
|
|
| 1 |
"""Various utilities (logger, time benchmark, args dump, numerical and stats info)"""
|
| 2 |
-
import numpy as np
|
| 3 |
-
|
| 4 |
-
from src import app_logger
|
| 5 |
-
from src.utilities.type_hints import ts_float64_1, ts_float64_2
|
| 6 |
|
| 7 |
|
| 8 |
def is_base64(sb):
|
|
@@ -21,22 +17,6 @@ def is_base64(sb):
|
|
| 21 |
return False
|
| 22 |
|
| 23 |
|
| 24 |
-
def get_system_info():
|
| 25 |
-
import multiprocessing
|
| 26 |
-
import torch.multiprocessing as mp
|
| 27 |
-
import os
|
| 28 |
-
import subprocess
|
| 29 |
-
|
| 30 |
-
app_logger.info(f"mp::cpu_count:{mp.cpu_count()}.")
|
| 31 |
-
app_logger.info(f"multiprocessing::cpu_count:{multiprocessing.cpu_count()}.")
|
| 32 |
-
app_logger.info(f"os::cpu_count:{os.cpu_count()}")
|
| 33 |
-
app_logger.info(f"os::sched_getaffinity:{len(os.sched_getaffinity(0))}")
|
| 34 |
-
lscpu_output = subprocess.run("/usr/bin/lscpu", capture_output=True)
|
| 35 |
-
app_logger.info(f"lscpu:{lscpu_output.stdout.decode('utf-8')}.")
|
| 36 |
-
free_mem_output = subprocess.run(["/usr/bin/free", "-m"], capture_output=True)
|
| 37 |
-
app_logger.info(f"free_mem_output:{free_mem_output.stdout.decode('utf-8')}.")
|
| 38 |
-
|
| 39 |
-
|
| 40 |
def base64_decode(s):
|
| 41 |
import base64
|
| 42 |
|
|
@@ -44,141 +24,3 @@ def base64_decode(s):
|
|
| 44 |
return base64.b64decode(s, validate=True).decode("utf-8")
|
| 45 |
|
| 46 |
return s
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
def get_constants(event: dict, debug=False) -> dict:
|
| 50 |
-
"""
|
| 51 |
-
Return constants we need to use from event, context and environment variables (both production and test).
|
| 52 |
-
|
| 53 |
-
Args:
|
| 54 |
-
event: request event
|
| 55 |
-
debug: logging debug argument
|
| 56 |
-
|
| 57 |
-
Returns:
|
| 58 |
-
dict: project constants object
|
| 59 |
-
|
| 60 |
-
"""
|
| 61 |
-
import json
|
| 62 |
-
|
| 63 |
-
try:
|
| 64 |
-
body = event["body"]
|
| 65 |
-
except Exception as e_constants1:
|
| 66 |
-
app_logger.error(f"e_constants1:{e_constants1}.")
|
| 67 |
-
body = event
|
| 68 |
-
|
| 69 |
-
if isinstance(body, str):
|
| 70 |
-
body = json.loads(event["body"])
|
| 71 |
-
|
| 72 |
-
try:
|
| 73 |
-
debug = body["debug"]
|
| 74 |
-
app_logger.info(f"re-try get debug value:{debug}, log_level:{app_logger.level}.")
|
| 75 |
-
except KeyError:
|
| 76 |
-
app_logger.error("get_constants:: no debug key, pass...")
|
| 77 |
-
app_logger.info(f"constants debug:{debug}, log_level:{app_logger.level}, body:{body}.")
|
| 78 |
-
|
| 79 |
-
try:
|
| 80 |
-
return {
|
| 81 |
-
"bbox": body["bbox"],
|
| 82 |
-
"point": body["point"],
|
| 83 |
-
"debug": debug
|
| 84 |
-
}
|
| 85 |
-
except KeyError as e_key_constants2:
|
| 86 |
-
app_logger.error(f"e_key_constants2:{e_key_constants2}.")
|
| 87 |
-
raise KeyError(f"e_key_constants2:{e_key_constants2}.")
|
| 88 |
-
except Exception as e_constants2:
|
| 89 |
-
app_logger.error(f"e_constants2:{e_constants2}.")
|
| 90 |
-
raise e_constants2
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
def get_rasters_info(rasters_list:list, names_list:list, title:str="", debug:bool=False) -> str:
|
| 94 |
-
"""
|
| 95 |
-
Analyze numpy arrays' list to extract a string containing some useful information. For every raster:
|
| 96 |
-
|
| 97 |
-
- type of raster
|
| 98 |
-
- raster.dtype if that's instance of np.ndarray
|
| 99 |
-
- raster shape
|
| 100 |
-
- min of raster value, over all axis (flattening the array)
|
| 101 |
-
- max of raster value, over all axis (flattening the array)
|
| 102 |
-
- mean of raster value, over all axis (flattening the array)
|
| 103 |
-
- median of raster value, over all axis (flattening the array)
|
| 104 |
-
- standard deviation of raster value, over all axis (flattening the array)
|
| 105 |
-
- variance of raster value, over all axis (flattening the array)
|
| 106 |
-
|
| 107 |
-
Raises:
|
| 108 |
-
ValueError if raster_list and names_list have a different number of elements
|
| 109 |
-
|
| 110 |
-
Args:
|
| 111 |
-
rasters_list: list of numpy array raster to analyze
|
| 112 |
-
names_list: string list of numpy array
|
| 113 |
-
title: title of current analytic session
|
| 114 |
-
debug: logging debug argument
|
| 115 |
-
|
| 116 |
-
Returns:
|
| 117 |
-
str: the collected information
|
| 118 |
-
|
| 119 |
-
"""
|
| 120 |
-
|
| 121 |
-
msg = f"get_rasters_info::title:{title},\n"
|
| 122 |
-
if not len(rasters_list) == len(names_list):
|
| 123 |
-
msg = "raster_list and names_list should have the same number of elements:\n"
|
| 124 |
-
msg += f"len(rasters_list):{len(rasters_list)}, len(names_list):{len(names_list)}."
|
| 125 |
-
raise ValueError(msg)
|
| 126 |
-
try:
|
| 127 |
-
for raster, name in zip(rasters_list, names_list):
|
| 128 |
-
try:
|
| 129 |
-
if isinstance(raster, np.ndarray):
|
| 130 |
-
shape_or_len = raster.shape
|
| 131 |
-
elif isinstance(raster, list):
|
| 132 |
-
shape_or_len = len(raster)
|
| 133 |
-
else:
|
| 134 |
-
raise ValueError(f"wrong argument type:{raster}, variable:{raster}.")
|
| 135 |
-
zmin, zmax, zmean, zmedian, zstd, zvar = get_stats_raster(raster, debug=debug)
|
| 136 |
-
msg += "name:{}:type:{},dtype:{},shape:{},min:{},max:{},mean:{},median:{},std:{},var:{}\n".format(
|
| 137 |
-
name, type(raster), raster.dtype if isinstance(raster, np.ndarray) else None, shape_or_len, zmin,
|
| 138 |
-
zmax, zmean, zmedian, zstd, zvar
|
| 139 |
-
)
|
| 140 |
-
except Exception as get_rasters_types_e:
|
| 141 |
-
msg = f"get_rasters_types_e::{get_rasters_types_e}, type_raster:{type(raster)}."
|
| 142 |
-
app_logger.error(msg)
|
| 143 |
-
raise ValueError(msg)
|
| 144 |
-
except IndexError as get_rasters_types_ie:
|
| 145 |
-
app_logger.error(f"get_rasters_types::len:rasters_list:{len(rasters_list)}, len_names_list:{len(names_list)}.")
|
| 146 |
-
raise get_rasters_types_ie
|
| 147 |
-
return msg + "\n=============================\n"
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
def get_stats_raster(raster: np.ndarray, get_rms:bool=False, debug:bool=False) -> ts_float64_1 or ts_float64_2:
|
| 151 |
-
"""
|
| 152 |
-
Analyze a numpy arrays to extract a tuple of useful information:
|
| 153 |
-
|
| 154 |
-
- type of raster
|
| 155 |
-
- raster.dtype if that's instance of np.ndarray
|
| 156 |
-
- raster shape
|
| 157 |
-
- min of raster value, over all axis (flattening the array)
|
| 158 |
-
- max of raster value, over all axis (flattening the array)
|
| 159 |
-
- mean of raster value, over all axis (flattening the array)
|
| 160 |
-
- median of raster value, over all axis (flattening the array)
|
| 161 |
-
- standard deviation of raster value, over all axis (flattening the array)
|
| 162 |
-
- variance of raster value, over all axis (flattening the array)
|
| 163 |
-
|
| 164 |
-
Args:
|
| 165 |
-
raster: numpy array to analyze
|
| 166 |
-
get_rms: bool to get Root Mean Square Error
|
| 167 |
-
debug: logging debug argument
|
| 168 |
-
|
| 169 |
-
Returns:
|
| 170 |
-
tuple: float values (min, max, mean, median, standard deviation, variance of raster)
|
| 171 |
-
|
| 172 |
-
"""
|
| 173 |
-
std = np.nanstd(raster)
|
| 174 |
-
if get_rms:
|
| 175 |
-
try:
|
| 176 |
-
rms = np.sqrt(np.nanmean(np.square(raster)))
|
| 177 |
-
except Exception as rms_e:
|
| 178 |
-
rms = None
|
| 179 |
-
app_logger.error(f"get_stats_raster::rms_Exception:{rms_e}.")
|
| 180 |
-
app_logger.info(f"nanmin:{type(np.nanmin(raster))}.")
|
| 181 |
-
return (np.nanmin(raster), np.nanmax(raster), np.nanmean(raster), np.nanmedian(raster), std,
|
| 182 |
-
np.nanvar(raster), rms)
|
| 183 |
-
return (np.nanmin(raster), np.nanmax(raster), np.nanmean(raster), np.nanmedian(raster), np.nanstd(raster),
|
| 184 |
-
np.nanvar(raster))
|
|
|
|
| 1 |
"""Various utilities (logger, time benchmark, args dump, numerical and stats info)"""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
|
| 4 |
def is_base64(sb):
|
|
|
|
| 17 |
return False
|
| 18 |
|
| 19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
def base64_decode(s):
|
| 21 |
import base64
|
| 22 |
|
|
|
|
| 24 |
return base64.b64decode(s, validate=True).decode("utf-8")
|
| 25 |
|
| 26 |
return s
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|