Skip to content

Commit

Permalink
style: Format repository
Browse files Browse the repository at this point in the history
  • Loading branch information
jsakv committed Jul 9, 2024
1 parent e1c1172 commit 4a17f69
Show file tree
Hide file tree
Showing 6 changed files with 20 additions and 46 deletions.
5 changes: 1 addition & 4 deletions app/api/routes/risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,4 @@
async def get_pyrorisk(country: str, date: str):
"""Using the country identifier, this will compute the wildfire risk for all known subregions"""
preds = predictor.predict(date)
return [
RegionRisk(geocode=k, score=v["score"], explainability=v["explainability"])
for k, v in preds.items()
]
return [RegionRisk(geocode=k, score=v["score"], explainability=v["explainability"]) for k, v in preds.items()]
6 changes: 3 additions & 3 deletions pyrorisks/platform_fwi/get_fwi_effis_score.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
from pyro_risks.utils.s3 import S3Bucket

__all__ = ["get_score"]


def point_fwi_category(row, point_coords):
if row["geometry"].contains(point_coords):
return row["fwi_category"]
Expand Down Expand Up @@ -35,8 +37,6 @@ def get_score(lat, lon):

gdf = gpd.GeoDataFrame.from_features(json_content["features"])

gdf["fwi_category_for_point"] = gdf.apply(
lambda row: point_fwi_category(row, point_coords), axis=1
)
gdf["fwi_category_for_point"] = gdf.apply(lambda row: point_fwi_category(row, point_coords), axis=1)
point_fwi_score = gdf.dropna().iloc[0]["fwi_category"]
return point_fwi_score
4 changes: 1 addition & 3 deletions pyrorisks/platform_fwi/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@ def main(retrieved_date):
fwi = FWIHelpers()
gdf_fwi = fwi.get_fwi(effis_tiff_file_url)
gdf_fwi = fwi.fwi_sea_remover(gdf_fwi)
gdf_fwi["fwi_category"] = gdf_fwi.apply(
lambda row: fwi.fwi_category(row["fwi_pixel_value"]), axis=1
)
gdf_fwi["fwi_category"] = gdf_fwi.apply(lambda row: fwi.fwi_category(row["fwi_pixel_value"]), axis=1)
gdf_fwi = gdf_fwi.drop("fwi_pixel_value", axis=1)

new_json_fwi = fwi.fwi_geojson_maker(gdf_fwi)
Expand Down
12 changes: 3 additions & 9 deletions pyrorisks/utils/fwi_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,11 @@ def get_fwi(self, tiff_url: str) -> Optional[gpd.GeoDataFrame]:
image = src.read(1) # first band
results = (
{"properties": {"fwi_pixel_value": v}, "geometry": s}
for i, (s, v) in enumerate(
shapes(image, mask=mask, transform=data["transform"])
)
for i, (s, v) in enumerate(shapes(image, mask=mask, transform=data["transform"]))
)

geoms = list(results)
gpd_polygonized_raster = gpd.GeoDataFrame.from_features(
geoms, crs=str(data["crs"])
)
gpd_polygonized_raster = gpd.GeoDataFrame.from_features(geoms, crs=str(data["crs"]))
return gpd_polygonized_raster

except Exception as e:
Expand All @@ -66,9 +62,7 @@ def fwi_sea_remover(self, geodataframe: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
Returns:
geodataframe (geopandas.GeoDataFrame): The GeoDataFrame without the sea.
"""
geodataframe = geodataframe.loc[
(geodataframe["fwi_pixel_value"] != 0)
] # remove the sea
geodataframe = geodataframe.loc[(geodataframe["fwi_pixel_value"] != 0)] # remove the sea
return geodataframe

def fwi_category(self, fwi_pixel_val: int) -> int:
Expand Down
36 changes: 11 additions & 25 deletions pyrorisks/utils/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,7 @@ def write_json_to_s3(self, json_data: json, object_key: str) -> None:
json_data (json): The JSON data we want to upload.
object_key (str): The S3 key (path) where the file will be stored.
"""
self.bucket.put_object(
Key=object_key, Body=bytes(json.dumps(json_data).encode("UTF-8"))
)
self.bucket.put_object(Key=object_key, Body=bytes(json.dumps(json_data).encode("UTF-8")))

def read_json_from_s3(self, object_key: str) -> None:
"""
Expand All @@ -116,9 +114,7 @@ def read_json_from_s3(self, object_key: str) -> None:
Args:
object_key (str): The S3 key (path) where the file is stored.
"""
file_content = (
self.bucket.Object(object_key).get()["Body"].read().decode("utf-8")
)
file_content = self.bucket.Object(object_key).get()["Body"].read().decode("utf-8")
json_content = json.loads(file_content)
return json_content

Expand Down Expand Up @@ -202,9 +198,7 @@ def list_files(
if limit != 0:
object_filter = object_filter.limit(limit)
for obj in object_filter:
if not patterns or (
type(patterns) == list and any([p in obj.key for p in patterns])
):
if not patterns or (type(patterns) == list and any([p in obj.key for p in patterns])):
files.append(obj.key)
return files

Expand All @@ -222,9 +216,7 @@ def get_file_metadata(self, object_key: str) -> dict:
metadata = obj.metadata
return metadata

def get_files_metadata(
self, patterns: list[str] = None, prefix: str = "", delimiter: str = ""
) -> list[dict]:
def get_files_metadata(self, patterns: list[str] = None, prefix: str = "", delimiter: str = "") -> list[dict]:
"""
Lists files in the S3 bucket with their size in bytes and last modified dates.
Expand All @@ -238,16 +230,12 @@ def get_files_metadata(
"""
files = []
for obj in self.bucket.objects.filter(Prefix=prefix, Delimiter=delimiter):
if not patterns or (
type(patterns) == list and any([p in obj.key for p in patterns])
):
files.append(
{
"file_name": obj.key,
"file_size": round(obj.size * 1.0 / (1024), 2),
"file_last_modified": obj.last_modified,
}
)
if not patterns or (type(patterns) == list and any([p in obj.key for p in patterns])):
files.append({
"file_name": obj.key,
"file_size": round(obj.size * 1.0 / (1024), 2),
"file_last_modified": obj.last_modified,
})
return files


Expand Down Expand Up @@ -277,7 +265,5 @@ def read_credentials(
for line in lines:
if "region" in line:
credentials["region_name"] = line.split("=")[1].strip()
credentials["endpoint_url"] = (
"https://s3." + credentials["region_name"] + ".io.cloud.ovh.net/"
)
credentials["endpoint_url"] = "https://s3." + credentials["region_name"] + ".io.cloud.ovh.net/"
return credentials
3 changes: 1 addition & 2 deletions test/test_headers.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,7 @@ def test_headers(self):
# Compare it
self.assertTrue(
any(
"".join(current_header[: min(len(option), len(current_header))])
== "".join(option)
"".join(current_header[: min(len(option), len(current_header))]) == "".join(option)
for option in self.headers
),
msg=f"Invalid header in {source_path}",
Expand Down

0 comments on commit 4a17f69

Please sign in to comment.