Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

adding more with-statements where close missing #1152

Merged
merged 2 commits into from
May 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 23 additions & 23 deletions pyaerocom/colocation.py
Original file line number Diff line number Diff line change
Expand Up @@ -972,7 +972,6 @@

if not inplace:
coldata = coldata.copy()
temp = xr.open_dataset(const.ERA5_SURFTEMP_FILE)["t2m"]

arr = coldata.data

Expand All @@ -987,37 +986,38 @@
mintemps = []
maxtemps = []
ps = []
for i, (lat, lon, alt, name) in enumerate(coords):
logger.info(name, ", Lat", lat, ", Lon", lon)
p = pressure(alt)
logger.info("Alt", alt)
logger.info("P=", p / 100, "hPa")
with xr.open_dataset(const.ERA5_SURFTEMP_FILE)["t2m"] as temp:
for i, (lat, lon, alt, name) in enumerate(coords):
logger.info(name, ", Lat", lat, ", Lon", lon)
p = pressure(alt)
logger.info("Alt", alt)
logger.info("P=", p / 100, "hPa")

Check warning on line 994 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L989-L994

Added lines #L989 - L994 were not covered by tests

ps.append(p / 100)
ps.append(p / 100)

Check warning on line 996 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L996

Added line #L996 was not covered by tests

temps = temp.sel(latitude=lat, longitude=lon, method="nearest").data
temps = temp.sel(latitude=lat, longitude=lon, method="nearest").data

Check warning on line 998 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L998

Added line #L998 was not covered by tests

meantemps.append(temps.mean())
mintemps.append(temps.min())
maxtemps.append(temps.min())
meantemps.append(temps.mean())
mintemps.append(temps.min())
maxtemps.append(temps.min())

Check warning on line 1002 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L1000-L1002

Added lines #L1000 - L1002 were not covered by tests

if not len(temps) == len(arr.time):
raise NotImplementedError("Check timestamps")
logger.info("Mean Temp: ", temps.mean() - t0, " C")
if not len(temps) == len(arr.time):

Check warning on line 1004 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L1004

Added line #L1004 was not covered by tests
raise NotImplementedError("Check timestamps")
logger.info("Mean Temp: ", temps.mean() - t0, " C")

Check warning on line 1006 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L1006

Added line #L1006 was not covered by tests

corrfacs = (p0 / p) * (temps / t0)
corrfacs = (p0 / p) * (temps / t0)

Check warning on line 1008 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L1008

Added line #L1008 was not covered by tests

logger.info("Corr fac:", corrfacs.mean(), "+/-", corrfacs.std())
logger.info("Corr fac:", corrfacs.mean(), "+/-", corrfacs.std())

Check warning on line 1010 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L1010

Added line #L1010 was not covered by tests

cfacs.append(corrfacs.mean())
cfacs.append(corrfacs.mean())

Check warning on line 1012 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L1012

Added line #L1012 was not covered by tests

# mularr = xr.DataArray(corrfacs)
# mularr = xr.DataArray(corrfacs)

if not arr.station_name.values[i] == name:
raise Exception
elif not arr.dims[1] == "time":
raise Exception
arr[1, :, i] *= corrfacs
if not arr.station_name.values[i] == name:
raise Exception
elif not arr.dims[1] == "time":
raise Exception
arr[1, :, i] *= corrfacs

Check warning on line 1020 in pyaerocom/colocation.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/colocation.py#L1016-L1020

Added lines #L1016 - L1020 were not covered by tests

cfacs = np.asarray(cfacs)

Expand Down
7 changes: 3 additions & 4 deletions pyaerocom/extras/satellite_l2/aeolus_l2a.py
Original file line number Diff line number Diff line change
Expand Up @@ -3179,16 +3179,15 @@ def read_model_file(self, file_name, topofile=None, vars_to_keep=None):
if topofile is not None:
# read topography since that needs to be added to the ground following height of the model
self.logger.info("reading topography file {}".format(options["topofile"]))
topo_data = xr.open_dataset(options["topofile"])
topo_altitudes = np.squeeze(topo_data[self.EMEP_TOPO_FILE_VAR_NAME])
topo_data.close()
with xr.open_dataset(options["topofile"]) as topo_data:
topo_altitudes = np.squeeze(topo_data[self.EMEP_TOPO_FILE_VAR_NAME])

if not os.path.exists(file_name):
obj.logger.info(f"file does not exist: {file_name}. skipping colocation ...")
return False
# read netcdf file if it has not yet been loaded
obj.logger.info(f"reading model file {file_name}")
nc_data = xr.open_dataset(file_name)
nc_data = xr.load_dataset(file_name)
nc_data[self._LATITUDENAME] = nc_data[self.EMEP_VAR_NAME_DICT[self._LATITUDENAME]]
nc_data[self._LONGITUDENAME] = nc_data[self.EMEP_VAR_NAME_DICT[self._LONGITUDENAME]]
nc_data[self._TIME_NAME] = nc_data[self.EMEP_VAR_NAME_DICT[self._TIME_NAME]]
Expand Down
4 changes: 2 additions & 2 deletions pyaerocom/helpers_landsea_masks.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,10 @@ def load_region_mask_xr(*regions):
for i, fil in enumerate(get_htap_mask_files(*regions)):
r = regions[i]
if i == 0:
masks = xr.open_dataset(fil)[r + "htap"]
masks = xr.load_dataset(fil)[r + "htap"]
name = r
else:
masks += xr.open_dataset(fil)[r + "htap"]
masks += xr.load_dataset(fil)[r + "htap"]
name += f"-{r}"
if masks is not None:
mask = masks.where(masks < 1, 1)
Expand Down
39 changes: 20 additions & 19 deletions pyaerocom/io/cachehandler_ungridded.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,35 +235,36 @@

delete_existing = const.RM_CACHE_OUTDATED if not force_use_outdated else False

in_handle = open(fp, "rb")
if force_use_outdated:
last_meta = pickle.load(in_handle)
assert len(last_meta) == len(self.CACHE_HEAD_KEYS)
ok = True
else:
try:
ok = self._check_pkl_head_vs_database(in_handle)
except Exception as e:
ok = False
delete_existing = True
logger.exception(
f"File error in cached data file {fp}. "
f"File will be removed and data reloaded. Error: {repr(e)}"
)
with open(fp, "rb") as in_handle:
if force_use_outdated:
last_meta = pickle.load(in_handle)
assert len(last_meta) == len(self.CACHE_HEAD_KEYS)
ok = True
else:
try:
ok = self._check_pkl_head_vs_database(in_handle)
except Exception as e:
ok = False
delete_existing = True
logger.exception(

Check warning on line 249 in pyaerocom/io/cachehandler_ungridded.py

View check run for this annotation

Codecov / codecov/patch

pyaerocom/io/cachehandler_ungridded.py#L246-L249

Added lines #L246 - L249 were not covered by tests
f"File error in cached data file {fp}. "
f"File will be removed and data reloaded. Error: {repr(e)}"
)
if ok:
# everything is okay, or forced
data = pickle.load(in_handle)

if not ok:
# TODO: Should we delete the cache file if it is outdated ???
# Delete the cache file if it is outdated, after handle is closed
logger.info(
f"Aborting reading cache file {fp}. Aerocom database "
f"or pyaerocom version has changed compared to cached version"
)
in_handle.close()
if delete_existing: # something was wrong
logger.info(f"Deleting outdated cache file: {fp}")
os.remove(fp)
return False

# everything is okay
data = pickle.load(in_handle)
if not isinstance(data, UngriddedData):
raise TypeError(
f"Unexpected data type stored in cache file, need instance of UngriddedData, "
Expand Down
125 changes: 63 additions & 62 deletions pyaerocom/io/ghost/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,68 +339,69 @@ def read_file(self, filename, var_to_read=None, invalidate_flags=None, var_to_wr
if var_to_write is None:
var_to_write = self.var_names_data_inv[var_to_read]

ds = xr.open_dataset(filename)

if not {"station", "time"}.issubset(ds.dims): # pragma: no cover
raise AttributeError("Missing dimensions")
if not "station_name" in ds: # pragma: no cover
raise AttributeError("No variable station_name found")

stats = []

# get all station metadata values as numpy arrays, since xarray isel,
# __getitem__, __getattr__ are slow... this can probably be solved
# more elegantly
meta_glob = {}
for meta_key in self.META_KEYS:
try:
meta_glob[meta_key] = ds[meta_key].values
except KeyError: # pragma: no cover
logger.warning(f"No such metadata key in GHOST data file: {Path(filename).name}")

for meta_key, to_unit in self.CONVERT_UNITS_META.items():
from_unit = ds[meta_key].attrs["units"]

if from_unit != to_unit:
cfac = cf_units.Unit(from_unit).convert(1, to_unit)
meta_glob[meta_key] *= cfac

tvals = ds["time"].values

vardata = ds[var_to_read] # DataArray
varinfo = vardata.attrs

# ToDo: it is important that station comes first since we use numpy
# indexing below and not xarray.isel or similar, due to performance
# issues. This may need to be updated in case of profile data.
assert vardata.dims == ("station", "time")
data_np = vardata.values

# evaluate flags
invalid = self._eval_flags(vardata, invalidate_flags, ds)

for idx in ds.station.values:
stat = {}
meta = StationMetaData()
meta["ts_type"] = self.TS_TYPE
stat["time"] = tvals
stat["meta"] = meta
meta["var_info"] = {}

for meta_key, vals in meta_glob.items():
meta[meta_key] = vals[idx]

# vardata = subset[var_name]
stat[var_to_write] = data_np[idx]

meta["var_info"][var_to_write] = {}
meta["var_info"][var_to_write].update(varinfo)

# import flagdata (2D array with time and flag dimensions)
# invalid = self._eval_flags(vardata, invalidate_flags)
stat["data_flagged"] = {}
stat["data_flagged"][var_to_write] = invalid[idx]
stats.append(stat)
with xr.open_dataset(filename) as ds:
if not {"station", "time"}.issubset(ds.dims): # pragma: no cover
raise AttributeError("Missing dimensions")
if not "station_name" in ds: # pragma: no cover
raise AttributeError("No variable station_name found")

stats = []

# get all station metadata values as numpy arrays, since xarray isel,
# __getitem__, __getattr__ are slow... this can probably be solved
# more elegantly
meta_glob = {}
for meta_key in self.META_KEYS:
try:
meta_glob[meta_key] = ds[meta_key].values
except KeyError: # pragma: no cover
logger.warning(
f"No such metadata key in GHOST data file: {Path(filename).name}"
)

for meta_key, to_unit in self.CONVERT_UNITS_META.items():
from_unit = ds[meta_key].attrs["units"]

if from_unit != to_unit:
cfac = cf_units.Unit(from_unit).convert(1, to_unit)
meta_glob[meta_key] *= cfac

tvals = ds["time"].values

vardata = ds[var_to_read] # DataArray
varinfo = vardata.attrs

# ToDo: it is important that station comes first since we use numpy
# indexing below and not xarray.isel or similar, due to performance
# issues. This may need to be updated in case of profile data.
assert vardata.dims == ("station", "time")
data_np = vardata.values

# evaluate flags
invalid = self._eval_flags(vardata, invalidate_flags, ds)

for idx in ds.station.values:
stat = {}
meta = StationMetaData()
meta["ts_type"] = self.TS_TYPE
stat["time"] = tvals
stat["meta"] = meta
meta["var_info"] = {}

for meta_key, vals in meta_glob.items():
meta[meta_key] = vals[idx]

# vardata = subset[var_name]
stat[var_to_write] = data_np[idx]

meta["var_info"][var_to_write] = {}
meta["var_info"][var_to_write].update(varinfo)

# import flagdata (2D array with time and flag dimensions)
# invalid = self._eval_flags(vardata, invalidate_flags)
stat["data_flagged"] = {}
stat["data_flagged"][var_to_write] = invalid[idx]
stats.append(stat)

return stats

Expand Down
Loading