From ae6b715313b8f1b89db56f4eb12d90ddae7015e0 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 21 Nov 2023 07:48:06 -0700 Subject: [PATCH 01/18] assert to exception --- mhkit/river/io/d3d.py | 645 ++++++++++++++++++++++-------------------- 1 file changed, 344 insertions(+), 301 deletions(-) diff --git a/mhkit/river/io/d3d.py b/mhkit/river/io/d3d.py index d4db2e266..3354aef01 100644 --- a/mhkit/river/io/d3d.py +++ b/mhkit/river/io/d3d.py @@ -10,7 +10,7 @@ def get_all_time(data): ''' Returns all of the time stamps from a D3D simulation passed to the function as a NetCDF object (data) - + Parameters ---------- data: NetCDF4 object @@ -24,8 +24,9 @@ def get_all_time(data): the simulation started and that the data object contains a snapshot of simulation conditions at that time. ''' - - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be NetCDF4 object' + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be a NetCDF4 object') seconds_run = np.ma.getdata(data.variables['time'][:], False) @@ -57,7 +58,7 @@ def seconds_to_index(data, seconds_run): ''' The function will return the nearest 'time_index' in the data if passed an integer number of 'seconds_run' - + Parameters ---------- data: NetCDF4 object @@ -103,26 +104,34 @@ def _convert_time(data, time_index=None, seconds_run=None): and incrementing until in simulation is complete. The 'seconds_run' is the seconds corresponding to the 'time_index' increments. ''' - - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be NetCDF4 object' - assert time_index or seconds_run, 'input of time_index or seconds_run needed' - assert not(time_index and seconds_run), f'only one time_index or seconds_run' - assert isinstance(time_index, (int, float)) or isinstance(seconds_run, (int, - float)),'time_index or seconds_run input must be a int or float' - + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be NetCDF4 object') + + if not (time_index or seconds_run): + raise ValueError('Input of time_index or seconds_run needed') + + if time_index and seconds_run: + raise ValueError( + 'Only one of time_index or seconds_run should be provided') + + if not (isinstance(time_index, (int, float)) or isinstance(seconds_run, (int, float))): + raise TypeError( + 'time_index or seconds_run input must be an int or float') + times = get_all_time(data) - + if time_index: - QoI= times[time_index] + QoI = times[time_index] if seconds_run: - try: - idx=np.where(times == seconds_run) - QoI=idx[0][0] - except: + try: + idx = np.where(times == seconds_run) + QoI = idx[0][0] + except: idx = (np.abs(times - seconds_run)).argmin() - QoI= idx - warnings.warn( f'Warning: seconds_run not found. Closest time stamp' - +'found {times[idx]}', stacklevel= 2) + QoI = idx + warnings.warn(f'Warning: seconds_run not found. Closest time stamp' + + 'found {times[idx]}', stacklevel=2) return QoI @@ -154,104 +163,118 @@ def get_layer_data(data, variable, layer_index=-1, time_index=-1): simulation has run. The waterdepth is measured from the water surface and the "waterlevel" is the water level diffrencein meters from the zero water level. ''' - - assert isinstance(time_index, int), 'time_index must be an int' - assert isinstance(layer_index, int), 'layer_index must be an int' - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be NetCDF4 object' - assert variable in data.variables.keys(), 'variable not recognized' + + if not isinstance(time_index, int): + raise TypeError('time_index must be an int') + + if not isinstance(layer_index, int): + raise TypeError('layer_index must be an int') + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be NetCDF4 object') + + if variable not in data.variables.keys(): + raise ValueError('variable not recognized') coords = str(data.variables[variable].coordinates).split() - var=data.variables[variable][:] - max_time_index= data['time'].shape[0]-1 # to account for zero index - assert abs(time_index) <= max_time_index, (f'time_index must be less than' - +'the absolute value of the max time index {max_time_index}') - - x=np.ma.getdata(data.variables[coords[0]][:], False) - y=np.ma.getdata(data.variables[coords[1]][:], False) - - - if type(var[0][0]) == np.ma.core.MaskedArray: - max_layer= len(var[0][0]) - - assert abs(layer_index) <= max_layer,( f'layer_index must be less than' - +'the max layer {max_layer}') - v= np.ma.getdata(var[time_index,:,layer_index], False) - dimensions= 3 - - else: - assert type(var[0][0])== np.float64, 'data not recognized' - dimensions= 2 - v= np.ma.getdata(var[time_index,:], False) - - #waterdepth + var = data.variables[variable][:] + max_time_index = data['time'].shape[0]-1 # to account for zero index + if abs(time_index) > max_time_index: + raise ValueError( + f'time_index must be less than the absolute value of the max time index {max_time_index}') + + x = np.ma.getdata(data.variables[coords[0]][:], False) + y = np.ma.getdata(data.variables[coords[1]][:], False) + + if type(var[0][0]) == np.ma.core.MaskedArray: + max_layer = len(var[0][0]) + + if abs(layer_index) > max_layer: + raise ValueError( + f'layer_index must be less than the max layer {max_layer}') + + v = np.ma.getdata(var[time_index, :, layer_index], False) + dimensions = 3 + + else: + if type(var[0][0]) != np.float64: + raise TypeError('data not recognized') + dimensions = 2 + v = np.ma.getdata(var[time_index, :], False) + + # waterdepth if "mesh2d" in variable: - cords_to_layers= {'mesh2d_face_x mesh2d_face_y': {'name':'mesh2d_nLayers', - 'coords':data.variables['mesh2d_layer_sigma'][:]}, - 'mesh2d_edge_x mesh2d_edge_y': {'name':'mesh2d_nInterfaces', - 'coords':data.variables['mesh2d_interface_sigma'][:]}} - bottom_depth=np.ma.getdata(data.variables['mesh2d_waterdepth'][time_index, :], False) - waterlevel= np.ma.getdata(data.variables['mesh2d_s1'][time_index, :], False) + cords_to_layers = {'mesh2d_face_x mesh2d_face_y': {'name': 'mesh2d_nLayers', + 'coords': data.variables['mesh2d_layer_sigma'][:]}, + 'mesh2d_edge_x mesh2d_edge_y': {'name': 'mesh2d_nInterfaces', + 'coords': data.variables['mesh2d_interface_sigma'][:]}} + bottom_depth = np.ma.getdata( + data.variables['mesh2d_waterdepth'][time_index, :], False) + waterlevel = np.ma.getdata( + data.variables['mesh2d_s1'][time_index, :], False) coords = str(data.variables['waterdepth'].coordinates).split() - + else: - cords_to_layers= {'FlowElem_xcc FlowElem_ycc':{'name':'laydim', - 'coords':data.variables['LayCoord_cc'][:]}, - 'FlowLink_xu FlowLink_yu': {'name':'wdim', - 'coords':data.variables['LayCoord_w'][:]}} - bottom_depth=np.ma.getdata(data.variables['waterdepth'][time_index, :], False) - waterlevel= np.ma.getdata(data.variables['s1'][time_index, :], False) + cords_to_layers = {'FlowElem_xcc FlowElem_ycc': {'name': 'laydim', + 'coords': data.variables['LayCoord_cc'][:]}, + 'FlowLink_xu FlowLink_yu': {'name': 'wdim', + 'coords': data.variables['LayCoord_w'][:]}} + bottom_depth = np.ma.getdata( + data.variables['waterdepth'][time_index, :], False) + waterlevel = np.ma.getdata(data.variables['s1'][time_index, :], False) coords = str(data.variables['waterdepth'].coordinates).split() - - layer_dim = str(data.variables[variable].coordinates) - - cord_sys= cords_to_layers[layer_dim]['coords'] - layer_percentages= np.ma.getdata(cord_sys, False) #accumulative + + layer_dim = str(data.variables[variable].coordinates) + + cord_sys = cords_to_layers[layer_dim]['coords'] + layer_percentages = np.ma.getdata(cord_sys, False) # accumulative if layer_dim == 'FlowLink_xu FlowLink_yu': - #interpolate - x_laydim=np.ma.getdata(data.variables[coords[0]][:], False) - y_laydim=np.ma.getdata(data.variables[coords[1]][:], False) - points_laydim = np.array([ [x, y] for x, y in zip(x_laydim, y_laydim)]) - + # interpolate + x_laydim = np.ma.getdata(data.variables[coords[0]][:], False) + y_laydim = np.ma.getdata(data.variables[coords[1]][:], False) + points_laydim = np.array([[x, y] for x, y in zip(x_laydim, y_laydim)]) + coords_request = str(data.variables[variable].coordinates).split() - x_wdim=np.ma.getdata(data.variables[coords_request[0]][:], False) - y_wdim=np.ma.getdata(data.variables[coords_request[1]][:], False) - points_wdim=np.array([ [x, y] for x, y in zip(x_wdim, y_wdim)]) - + x_wdim = np.ma.getdata(data.variables[coords_request[0]][:], False) + y_wdim = np.ma.getdata(data.variables[coords_request[1]][:], False) + points_wdim = np.array([[x, y] for x, y in zip(x_wdim, y_wdim)]) + bottom_depth_wdim = interp.griddata(points_laydim, bottom_depth, points_wdim) - water_level_wdim= interp.griddata(points_laydim, waterlevel, - points_wdim) - - idx_bd= np.where(np.isnan(bottom_depth_wdim)) - - for i in idx_bd: - bottom_depth_wdim[i]= interp.griddata(points_laydim, bottom_depth, - points_wdim[i], method='nearest') - water_level_wdim[i]= interp.griddata(points_laydim, waterlevel, - points_wdim[i], method='nearest') - - - waterdepth=[] - - if dimensions== 2: - if layer_dim == 'FlowLink_xu FlowLink_yu': + water_level_wdim = interp.griddata(points_laydim, waterlevel, + points_wdim) + + idx_bd = np.where(np.isnan(bottom_depth_wdim)) + + for i in idx_bd: + bottom_depth_wdim[i] = interp.griddata(points_laydim, bottom_depth, + points_wdim[i], method='nearest') + water_level_wdim[i] = interp.griddata(points_laydim, waterlevel, + points_wdim[i], method='nearest') + + waterdepth = [] + + if dimensions == 2: + if layer_dim == 'FlowLink_xu FlowLink_yu': z = [bottom_depth_wdim] - waterlevel=water_level_wdim + waterlevel = water_level_wdim else: z = [bottom_depth] else: - if layer_dim == 'FlowLink_xu FlowLink_yu': + if layer_dim == 'FlowLink_xu FlowLink_yu': z = [bottom_depth_wdim*layer_percentages[layer_index]] - waterlevel=water_level_wdim + waterlevel = water_level_wdim else: z = [bottom_depth*layer_percentages[layer_index]] - waterdepth=np.append(waterdepth, z) + waterdepth = np.append(waterdepth, z) - time= np.ma.getdata(data.variables['time'][time_index], False)*np.ones(len(x)) + time = np.ma.getdata( + data.variables['time'][time_index], False)*np.ones(len(x)) - layer= np.array([ [x_i, y_i, d_i, w_i, v_i, t_i] for x_i, y_i, d_i, w_i, v_i, t_i in - zip(x, y, waterdepth, waterlevel, v, time)]) - layer_data = pd.DataFrame(layer, columns=['x', 'y', 'waterdepth','waterlevel', 'v', 'time']) + layer = np.array([[x_i, y_i, d_i, w_i, v_i, t_i] for x_i, y_i, d_i, w_i, v_i, t_i in + zip(x, y, waterdepth, waterlevel, v, time)]) + layer_data = pd.DataFrame( + layer, columns=['x', 'y', 'waterdepth', 'waterlevel', 'v', 'time']) return layer_data @@ -262,7 +285,7 @@ def create_points(x, y, waterdepth): In any order the three inputs can consist of 3 points, 2 points and 1 array, or 1 point and 2 arrays. The final output DataFrame will be the unique combinations of the 3 inputs. - + Parameters ---------- x: float, array or int @@ -276,17 +299,17 @@ def create_points(x, y, waterdepth): ------- points: DateFrame DataFrame with columns x, y and waterdepth points. - + Example ------- If the inputs are 2 arrays: and [3,4,5] and 1 point [6], the output will contain 6 array combinations of the 3 inputs as shown. - + x=np.array([1,2]) y=np.array([3,4,5]) waterdepth= 6 d3d.create_points(x,y,waterdepth) - + x y waterdepth 0 1.0 3.0 6.0 1 2.0 3.0 6.0 @@ -295,86 +318,88 @@ def create_points(x, y, waterdepth): 4 1.0 5.0 6.0 5 2.0 5.0 6.0 ''' - - assert isinstance(x, (int, float, np.ndarray)), ('x must be a int, float' - +' or array') - assert isinstance(y, (int, float, np.ndarray)), ('y must be a int, float' - +' or array') - assert isinstance(waterdepth, (int, float, np.ndarray)), ('waterdepth must be a int, float' - +' or array') - - directions = {0:{'name': 'x', - 'values': x}, - 1:{'name': 'y', - 'values': y}, - 2:{'name': 'waterdepth', - 'values': waterdepth}} + + if not isinstance(x, (int, float, np.ndarray)): + raise TypeError('x must be an int, float, or array') + + if not isinstance(y, (int, float, np.ndarray)): + raise TypeError('y must be an int, float, or array') + + if not isinstance(waterdepth, (int, float, np.ndarray)): + raise TypeError('waterdepth must be an int, float, or array') + + directions = {0: {'name': 'x', + 'values': x}, + 1: {'name': 'y', + 'values': y}, + 2: {'name': 'waterdepth', + 'values': waterdepth}} for i in directions: try: - N=len(directions[i]['values']) + N = len(directions[i]['values']) except: - directions[i]['values'] = np.array([directions[i]['values']]) - N=len(directions[i]['values']) - if N == 1 : - directions[i]['type']= 'point' - elif N > 1 : - directions[i]['type']= 'array' + directions[i]['values'] = np.array([directions[i]['values']]) + N = len(directions[i]['values']) + if N == 1: + directions[i]['type'] = 'point' + elif N > 1: + directions[i]['type'] = 'array' else: raise Exception(f'length of direction {directions[i]["name"]} was' - +'neagative or zero') - - # Check how many times point is in "types" - types= [directions[i]['type'] for i in directions] + + 'neagative or zero') + + # Check how many times point is in "types" + types = [directions[i]['type'] for i in directions] N_points = types.count('point') if N_points >= 2: - lens = np.array([len(directions[d]['values']) for d in directions]) + lens = np.array([len(directions[d]['values']) for d in directions]) max_len_idx = lens.argmax() - not_max_idxs= [i for i in directions.keys()] - + not_max_idxs = [i for i in directions.keys()] + del not_max_idxs[max_len_idx] - - for not_max in not_max_idxs: - N= len(directions[max_len_idx]['values']) - vals =np.ones(N)*directions[not_max]['values'] + + for not_max in not_max_idxs: + N = len(directions[max_len_idx]['values']) + vals = np.ones(N)*directions[not_max]['values'] directions[not_max]['values'] = np.array(vals) - + x_new = directions[0]['values'] y_new = directions[1]['values'] depth_new = directions[2]['values'] - - request= np.array([ [x_i, y_i, depth_i] for x_i, y_i, depth_i in zip(x_new, - y_new, depth_new)]) - points= pd.DataFrame(request, columns=[ 'x', 'y', 'waterdepth']) - - elif N_points == 1: + + request = np.array([[x_i, y_i, depth_i] for x_i, y_i, depth_i in zip(x_new, + y_new, depth_new)]) + points = pd.DataFrame(request, columns=['x', 'y', 'waterdepth']) + + elif N_points == 1: # treat as plane - #find index of point + # find index of point idx_point = types.index('point') - max_idxs= [i for i in directions.keys()] + max_idxs = [i for i in directions.keys()] print(max_idxs) del max_idxs[idx_point] - #find vectors + # find vectors XX, YY = np.meshgrid(directions[max_idxs[0]]['values'], - directions[max_idxs[1]]['values'] ) - N_X=np.shape(XX)[1] - N_Y=np.shape(YY)[0] - ZZ= np.ones((N_Y,N_X))*directions[idx_point]['values'] - - request= np.array([ [x_i, y_i, z_i] for x_i, y_i, z_i in zip(XX.ravel(), - YY.ravel() , ZZ.ravel())]) - columns=[ directions[max_idxs[0]]['name'], - directions[max_idxs[1]]['name'], directions[idx_point]['name']] - - points= pd.DataFrame(request, columns=columns) - else: - raise Exception('Can provide at most two arrays') - - return points - - -def variable_interpolation(data, variables, points='cells', edges= 'none'): + directions[max_idxs[1]]['values']) + N_X = np.shape(XX)[1] + N_Y = np.shape(YY)[0] + ZZ = np.ones((N_Y, N_X))*directions[idx_point]['values'] + + request = np.array([[x_i, y_i, z_i] for x_i, y_i, z_i in zip(XX.ravel(), + YY.ravel(), ZZ.ravel())]) + columns = [directions[max_idxs[0]]['name'], + directions[max_idxs[1]]['name'], directions[idx_point]['name']] + + points = pd.DataFrame(request, columns=columns) + else: + raise ValueError('Can provide at most two arrays') + + return points + + +def variable_interpolation(data, variables, points='cells', edges='none'): ''' Interpolate multiple variables from the Delft3D onto the same points. @@ -395,53 +420,57 @@ def variable_interpolation(data, variables, points='cells', edges= 'none'): edges: sting: 'nearest' If edges is set to 'nearest' the code will fill in nan values with nearest interpolation. Otherwise only linear interpolarion will be used. - + Returns ------- transformed_data: DataFrame Variables on specified grid points saved under the input variable names and the x, y, and waterdepth coordinates of those points. ''' - - assert isinstance(points, (str, pd.DataFrame)),('points must be a string ' - +'or DataFrame') - if isinstance ( points, str): - assert any([points == 'cells', points=='faces']), ('points must be' - +' cells or faces') - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be nerCDF4 object' + + if not isinstance(points, (str, pd.DataFrame)): + raise TypeError('points must be a string or DataFrame') + + if isinstance(points, str): + if not any([points == 'cells', points == 'faces']): + raise ValueError('points must be cells or faces') + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be netCDF4 object') data_raw = {} for var in variables: - var_data_df = get_all_data_points(data, var,time_index=-1) - var_data_df=var_data_df.loc[:,~var_data_df.T.duplicated(keep='first')] - data_raw[var] = var_data_df - if type(points) == pd.DataFrame: + var_data_df = get_all_data_points(data, var, time_index=-1) + var_data_df = var_data_df.loc[:, ~ + var_data_df.T.duplicated(keep='first')] + data_raw[var] = var_data_df + if type(points) == pd.DataFrame: print('points provided') - elif points=='faces': - points = data_raw['ucx'][['x','y','waterdepth']] - elif points=='cells': - points = data_raw['turkin1'][['x','y','waterdepth']] - - transformed_data= points.copy(deep=True) - - for var in variables : - transformed_data[var] = interp.griddata(data_raw[var][['x','y','waterdepth']], - data_raw[var][var], points[['x','y','waterdepth']]) - if edges == 'nearest' : - idx= np.where(np.isnan(transformed_data[var])) - + elif points == 'faces': + points = data_raw['ucx'][['x', 'y', 'waterdepth']] + elif points == 'cells': + points = data_raw['turkin1'][['x', 'y', 'waterdepth']] + + transformed_data = points.copy(deep=True) + + for var in variables: + transformed_data[var] = interp.griddata(data_raw[var][['x', 'y', 'waterdepth']], + data_raw[var][var], points[['x', 'y', 'waterdepth']]) + if edges == 'nearest': + idx = np.where(np.isnan(transformed_data[var])) + if len(idx[0]): - for i in idx[0]: - transformed_data[var][i]= (interp - .griddata(data_raw[var][['x','y','waterdepth']], - data_raw[var][var], - [points['x'][i],points['y'][i], - points['waterdepth'][i]], method='nearest')) - + for i in idx[0]: + transformed_data[var][i] = (interp + .griddata(data_raw[var][['x', 'y', 'waterdepth']], + data_raw[var][var], + [points['x'][i], points['y'][i], + points['waterdepth'][i]], method='nearest')) + return transformed_data -def get_all_data_points(data, variable, time_index=-1): +def get_all_data_points(data, variable, time_index=-1): ''' Get data points for a passed variable for all layers at a specified time from the Delft3D NetCDF4 object by iterating over the `get_layer_data` function. @@ -457,75 +486,80 @@ def get_all_data_points(data, variable, time_index=-1): time_index: int An integer to pull the time step from the dataset. Default is last time step, found with the input -1. - + Returns ------- all_data: DataFrame Dataframe with columns x, y, waterdepth, waterlevel, variable, and time. The waterdepth is measured from the water surface and the "waterlevel" is the water level diffrence in meters from the zero water level. - - ''' - - assert isinstance(time_index, int), 'time_index must be a int' - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be NetCDF4 object' - assert variable in data.variables.keys(), 'variable not recognized' + + ''' + + if not isinstance(time_index, int): + raise TypeError('time_index must be an int') + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be NetCDF4 object') + + if variable not in data.variables.keys(): + raise ValueError('variable not recognized') max_time_index = len(data.variables[variable][:]) - assert abs(time_index) <= max_time_index, (f'time_index must be less than' - +'the max time index {max_time_index}') + if abs(time_index) > max_time_index: + raise ValueError( + f'time_index must be less than the max time index {max_time_index}') if "mesh2d" in variable: - cords_to_layers= {'mesh2d_face_x mesh2d_face_y': {'name':'mesh2d_nLayers', - 'coords':data.variables['mesh2d_layer_sigma'][:]}, - 'mesh2d_edge_x mesh2d_edge_y': {'name':'mesh2d_nInterfaces', - 'coords':data.variables['mesh2d_interface_sigma'][:]}} + cords_to_layers = {'mesh2d_face_x mesh2d_face_y': {'name': 'mesh2d_nLayers', + 'coords': data.variables['mesh2d_layer_sigma'][:]}, + 'mesh2d_edge_x mesh2d_edge_y': {'name': 'mesh2d_nInterfaces', + 'coords': data.variables['mesh2d_interface_sigma'][:]}} else: - cords_to_layers= {'FlowElem_xcc FlowElem_ycc':{'name':'laydim', - 'coords':data.variables['LayCoord_cc'][:]}, - 'FlowLink_xu FlowLink_yu': {'name':'wdim', - 'coords':data.variables['LayCoord_w'][:]}} - - layer_dim = str(data.variables[variable].coordinates) - - try: - cord_sys= cords_to_layers[layer_dim]['coords'] - except: + cords_to_layers = {'FlowElem_xcc FlowElem_ycc': {'name': 'laydim', + 'coords': data.variables['LayCoord_cc'][:]}, + 'FlowLink_xu FlowLink_yu': {'name': 'wdim', + 'coords': data.variables['LayCoord_w'][:]}} + + layer_dim = str(data.variables[variable].coordinates) + + try: + cord_sys = cords_to_layers[layer_dim]['coords'] + except: raise Exception('Coordinates not recognized.') - else: - layer_percentages= np.ma.getdata(cord_sys, False) - - x_all=[] - y_all=[] - depth_all=[] - water_level_all=[] - v_all=[] - time_all=[] - + else: + layer_percentages = np.ma.getdata(cord_sys, False) + + x_all = [] + y_all = [] + depth_all = [] + water_level_all = [] + v_all = [] + time_all = [] + layers = range(len(layer_percentages)) for layer in layers: - layer_data= get_layer_data(data, variable, layer, time_index) - - x_all=np.append(x_all, layer_data.x) - y_all=np.append(y_all, layer_data.y) - depth_all=np.append(depth_all, layer_data.waterdepth) - water_level_all=np.append(water_level_all, layer_data.waterlevel) - v_all=np.append(v_all, layer_data.v) - time_all= np.append(time_all, layer_data.time) - - known_points = np.array([ [x, y, waterdepth, waterlevel, v, time] - for x, y, waterdepth, waterlevel, v, time in zip(x_all, y_all, - depth_all, water_level_all, v_all, time_all)]) - - all_data= pd.DataFrame(known_points, columns=['x','y','waterdepth', 'waterlevel' - ,f'{variable}', 'time']) + layer_data = get_layer_data(data, variable, layer, time_index) - return all_data + x_all = np.append(x_all, layer_data.x) + y_all = np.append(y_all, layer_data.y) + depth_all = np.append(depth_all, layer_data.waterdepth) + water_level_all = np.append(water_level_all, layer_data.waterlevel) + v_all = np.append(v_all, layer_data.v) + time_all = np.append(time_all, layer_data.time) + known_points = np.array([[x, y, waterdepth, waterlevel, v, time] + for x, y, waterdepth, waterlevel, v, time in zip(x_all, y_all, + depth_all, water_level_all, v_all, time_all)]) + all_data = pd.DataFrame(known_points, columns=[ + 'x', 'y', 'waterdepth', 'waterlevel', f'{variable}', 'time']) -def turbulent_intensity(data, points='cells', time_index= -1, - intermediate_values = False ): + return all_data + + +def turbulent_intensity(data, points='cells', time_index=-1, + intermediate_values=False): ''' Calculate the turbulent intensity percentage for a given data set for the specified points. Assumes variable names: ucx, ucy, ucz and turkin1. @@ -548,7 +582,7 @@ def turbulent_intensity(data, points='cells', time_index= -1, If false the function will return position and turbulent intensity values. If true the function will return position(x,y,z) and values need to calculate turbulent intensity (ucx, uxy, uxz and turkin1) in a Dataframe. Default False. - + Returns ------- TI_data : Dataframe @@ -565,64 +599,73 @@ def turbulent_intensity(data, points='cells', time_index= -1, ucy- velocity in the y direction ucz- velocity in the vertical direction ''' - - assert isinstance(points, (str, pd.DataFrame)),('points must a string or' - +' DataFrame') - if isinstance ( points, str): - assert any([points == 'cells', points=='faces']), ('points must be cells' - +' or faces') - assert isinstance(time_index, int), 'time_index must be a int' - max_time_index= data['time'].shape[0]-1 # to account for zero index - assert abs(time_index) <= max_time_index, (f'time_index must be less than' - +'the absolute value of the max time index {max_time_index}') - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be nerCDF4 object' - assert 'turkin1' in data.variables.keys(), ('Varaiable turkin1 not' - +' present in Data') - assert 'ucx' in data.variables.keys(),'Varaiable ucx 1 not present in Data' - assert 'ucy' in data.variables.keys(),'Varaiable ucy 1 not present in Data' - assert 'ucz' in data.variables.keys(),'Varaiable ucz 1 not present in Data' - - TI_vars= ['turkin1', 'ucx', 'ucy', 'ucz'] + + if not isinstance(points, (str, pd.DataFrame)): + raise TypeError('points must be a string or DataFrame') + + if isinstance(points, str): + if not (points == 'cells' or points == 'faces'): + raise ValueError('points must be cells or faces') + + if not isinstance(time_index, int): + raise TypeError('time_index must be an int') + + max_time_index = data['time'].shape[0] - 1 # to account for zero index + if abs(time_index) > max_time_index: + raise ValueError( + f'time_index must be less than the absolute value of the max time index {max_time_index}') + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be netCDF4 object') + + for variable in ['turkin1', 'ucx', 'ucy', 'ucz']: + if variable not in data.variables.keys(): + raise ValueError(f'Variable {variable} not present in Data') + + TI_vars = ['turkin1', 'ucx', 'ucy', 'ucz'] TI_data_raw = {} for var in TI_vars: - var_data_df = get_all_data_points(data, var ,time_index) - TI_data_raw[var] = var_data_df - if type(points) == pd.DataFrame: + var_data_df = get_all_data_points(data, var, time_index) + TI_data_raw[var] = var_data_df + if type(points) == pd.DataFrame: print('points provided') - elif points=='faces': - points = TI_data_raw['turkin1'].drop(['waterlevel','turkin1'],axis=1) - elif points=='cells': - points = TI_data_raw['ucx'].drop(['waterlevel','ucx'],axis=1) - + elif points == 'faces': + points = TI_data_raw['turkin1'].drop(['waterlevel', 'turkin1'], axis=1) + elif points == 'cells': + points = TI_data_raw['ucx'].drop(['waterlevel', 'ucx'], axis=1) + TI_data = points.copy(deep=True) - for var in TI_vars: - TI_data[var] = interp.griddata(TI_data_raw[var][['x','y','waterdepth']], - TI_data_raw[var][var], points[['x','y','waterdepth']]) - idx= np.where(np.isnan(TI_data[var])) - + for var in TI_vars: + TI_data[var] = interp.griddata(TI_data_raw[var][['x', 'y', 'waterdepth']], + TI_data_raw[var][var], points[['x', 'y', 'waterdepth']]) + idx = np.where(np.isnan(TI_data[var])) + if len(idx[0]): - for i in idx[0]: - TI_data[var][i]= interp.griddata(TI_data_raw[var][['x','y','waterdepth']], - TI_data_raw[var][var], - [points['x'][i],points['y'][i], points['waterdepth'][i]], - method='nearest') - - u_mag=unorm(np.array(TI_data['ucx']),np.array(TI_data['ucy']), - np.array(TI_data['ucz'])) - - neg_index=np.where( TI_data['turkin1']<0) - zero_bool= np.isclose( TI_data['turkin1'][ TI_data['turkin1']<0].array, - np.zeros(len( TI_data['turkin1'][TI_data['turkin1']<0].array)), - atol=1.0e-4) - zero_ind= neg_index[0][zero_bool] - non_zero_ind= neg_index[0][~zero_bool] - TI_data.loc[zero_ind,'turkin1']=np.zeros(len(zero_ind)) - TI_data.loc[non_zero_ind,'turkin1']=[np.nan]*len(non_zero_ind) - - TI_data['turbulent_intensity']= np.sqrt(2/3*TI_data['turkin1'])/u_mag * 100 #% - + for i in idx[0]: + TI_data[var][i] = interp.griddata(TI_data_raw[var][['x', 'y', 'waterdepth']], + TI_data_raw[var][var], + [points['x'][i], points['y'] + [i], points['waterdepth'][i]], + method='nearest') + + u_mag = unorm(np.array(TI_data['ucx']), np.array(TI_data['ucy']), + np.array(TI_data['ucz'])) + + neg_index = np.where(TI_data['turkin1'] < 0) + zero_bool = np.isclose(TI_data['turkin1'][TI_data['turkin1'] < 0].array, + np.zeros( + len(TI_data['turkin1'][TI_data['turkin1'] < 0].array)), + atol=1.0e-4) + zero_ind = neg_index[0][zero_bool] + non_zero_ind = neg_index[0][~zero_bool] + TI_data.loc[zero_ind, 'turkin1'] = np.zeros(len(zero_ind)) + TI_data.loc[non_zero_ind, 'turkin1'] = [np.nan]*len(non_zero_ind) + + TI_data['turbulent_intensity'] = np.sqrt( + 2/3*TI_data['turkin1'])/u_mag * 100 # % + if intermediate_values == False: - TI_data= TI_data.drop(TI_vars, axis = 1) - + TI_data = TI_data.drop(TI_vars, axis=1) + return TI_data From 7614867df05d0ae3b04c4b04898ce10f33e6baaf Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 21 Nov 2023 09:08:09 -0700 Subject: [PATCH 02/18] Revert "Ensure interpolation values are within range when sampling contours (#278)" This reverts commit 2387a257b218709b6e82052d9d2fc864db5d4509. --- mhkit/tests/wave/test_contours.py | 50 +++++++++++-------------------- mhkit/wave/contours.py | 43 +++++++------------------- 2 files changed, 27 insertions(+), 66 deletions(-) diff --git a/mhkit/tests/wave/test_contours.py b/mhkit/tests/wave/test_contours.py index a555fec28..7b870325b 100644 --- a/mhkit/tests/wave/test_contours.py +++ b/mhkit/tests/wave/test_contours.py @@ -1,15 +1,24 @@ from os.path import abspath, dirname, join, isfile, normpath, relpath -import unittest -import pickle -import json -import os - +from pandas.testing import assert_frame_equal from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO import pandas as pd import numpy as np - -import mhkit.wave as wave +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os testdir = dirname(abspath(__file__)) @@ -45,10 +54,6 @@ def setUpClass(self): self.wdrt_dt = 3600 self.wdrt_period = 50 - # `samples_contour`Example data - self.hs_contour = np.array([8.56637939, 9.27612515, 8.70427774]) - self.te_contour = np.array([10, 15, 20]) - @classmethod def tearDownClass(self): pass @@ -235,28 +240,7 @@ def test_kde_copulas(self): self.wdrt_copulas['bivariate_KDE_log_x2'])] self.assertTrue(all(close)) - def test_samples_contours_type_validation(self): - with self.assertRaises(TypeError): - wave.contours.samples_contour( - 'not an array', self.te_contour, self.hs_contour) - with self.assertRaises(TypeError): - wave.contours.samples_contour( - self.te_contour, 'not an array', self.hs_contour) - with self.assertRaises(TypeError): - wave.contours.samples_contour( - self.te_contour, self.hs_contour, 'not an array') - - def test_samples_contours_length_mismatch(self): - with self.assertRaises(ValueError): - wave.contours.samples_contour( - self.te_contour, self.hs_contour, np.array([1, 2])) - - def test_samples_contours_range_validation(self): - with self.assertRaises(ValueError): - wave.contours.samples_contour( - np.array([5, 25]), self.te_contour, self.hs_contour) - - def test_samples_contours_correct_interpolation(self): + def test_samples_contours(self): te_samples = np.array([10, 15, 20]) hs_samples_0 = np.array([8.56637939, 9.27612515, 8.70427774]) hs_contour = np.array(self.wdrt_copulas["gaussian_x1"]) diff --git a/mhkit/wave/contours.py b/mhkit/wave/contours.py index c4695e85f..3007448b8 100644 --- a/mhkit/wave/contours.py +++ b/mhkit/wave/contours.py @@ -1792,51 +1792,28 @@ def samples_contour(t_samples, t_contour, hs_contour): raise TypeError(f't_contour must be of type np.ndarray. Got: {type(t_contour)}') if not isinstance(hs_contour, np.ndarray): raise TypeError(f'hs_contour must be of type np.ndarray. Got: {type(hs_contour)}') - if len(t_contour) != len(hs_contour): - raise ValueError( - "t_contour and hs_contour must be of the same length.") - if np.any(t_samples < np.min(t_contour)) or np.any(t_samples > np.max(t_contour)): - raise ValueError( - "All t_samples must be within the range of t_contour.") - - - # Find minimum and maximum energy period values + # finds minimum and maximum energy period values amin = np.argmin(t_contour) amax = np.argmax(t_contour) aamin = np.min([amin, amax]) aamax = np.max([amin, amax]) - - # Separate points along the contour into upper & lower half + # finds points along the contour w1 = hs_contour[aamin:aamax] w2 = np.concatenate((hs_contour[aamax:], hs_contour[:aamin])) - - # Get samples min and max - t_min, t_max = np.min(t_samples), np.max(t_samples) - - # Choose the half of the contour with the largest wave height - if np.max(w1) > np.max(w2): - # Check if the max or min Tp values are within the contour half - include_aamax = t_max >= t_contour[aamax] or t_min <= t_contour[aamin] - # Set the x and y values for interpolation - x1 = t_contour[aamin:aamax + int(include_aamax)] - y1 = hs_contour[aamin:aamax + int(include_aamax)] + if (np.max(w1) > np.max(w2)): + x1 = t_contour[aamin:aamax] + y1 = hs_contour[aamin:aamax] else: - # Check if the max or min Tp values are within the contour half - include_aamin = t_max >= t_contour[aamin] or t_min <= t_contour[aamax] - # Set the x and y values for interpolation - x1 = np.concatenate( - (t_contour[aamax:], t_contour[:aamin + int(include_aamin)])) - y1 = np.concatenate( - (hs_contour[aamax:], hs_contour[:aamin + int(include_aamin)])) - - # Sort data based on the max and min Tp values + x1 = np.concatenate((t_contour[aamax:], t_contour[:aamin])) + y1 = np.concatenate((hs_contour[aamax:], hs_contour[:aamin])) + # sorts data based on the max and min energy period values ms = np.argsort(x1) x = x1[ms] y = y1[ms] - # Interpolation function + # interpolates the sorted data si = interp.interp1d(x, y) - # Interpolate Tp samples values to get Hs values + # finds the wave height based on the user specified energy period values hs_samples = si(t_samples) return hs_samples From f1f8a242c37c9bf3bb3aa1b601c64cd875690542 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 10:28:44 -0600 Subject: [PATCH 03/18] checkout v4 --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9fa11f189..a4c124f9e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -30,7 +30,7 @@ jobs: should-run-hindcast: ${{ steps.hindcast-logic.outputs.should-run-hindcast }} steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Check for changes in wave/io/hindcast id: changes From 9bf60f8d760440d2186a870af9c6de0b71f7dfb2 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 10:29:21 -0600 Subject: [PATCH 04/18] path filters v3 --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a4c124f9e..b16b43bdb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -34,7 +34,7 @@ jobs: - name: Check for changes in wave/io/hindcast id: changes - uses: dorny/paths-filter@v2 + uses: dorny/paths-filter@v3 with: filters: | wave_io_hindcast: From 7c334e6bf869f886721ac1c1d71d9ba9c5b459e3 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 10:30:18 -0600 Subject: [PATCH 05/18] checkout v4 --- .github/workflows/main.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b16b43bdb..d63aa4182 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -55,7 +55,7 @@ jobs: PYTHON_VER: 3.9 steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Conda uses: s-weigand/setup-conda@v1 @@ -112,7 +112,7 @@ jobs: PYTHON_VER: ${{ matrix.python-version }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Conda uses: s-weigand/setup-conda@v1 @@ -176,7 +176,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Set up Git repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download data from artifact uses: actions/download-artifact@v3 @@ -214,7 +214,7 @@ jobs: python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Conda uses: s-weigand/setup-conda@v1 From 93d0b4d3221ac9b64abc7ffbb8e9dcba804fc695 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 10:35:51 -0600 Subject: [PATCH 06/18] hindcast wave and wind tests as 2 steps --- .github/workflows/main.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d63aa4182..cc9128f44 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -85,12 +85,16 @@ jobs: pytest mhkit/tests/tidal/test_io.py pytest mhkit/tests/wave/io/test_cdip.py - - name: Prepare Hindcast data + - name: Prepare Wave Hindcast data if: (needs.check-changes.outputs.should-run-hindcast == 'true') run: | - # pytest tests/test_specific_file.py::TestClass::test_function source activate TEST pytest mhkit/tests/wave/io/hindcast/test_hindcast.py + + - name: Prepare Wave Hindcast data + if: (needs.check-changes.outputs.should-run-hindcast == 'true') + run: | + source activate TEST pytest mhkit/tests/wave/io/hindcast/test_wind_toolkit.py - name: Upload data as artifact From f1299cfd79b5580030d528efcfb3ed02c53b6563 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 10:36:20 -0600 Subject: [PATCH 07/18] correct name to wind --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cc9128f44..a21c5535e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -91,7 +91,7 @@ jobs: source activate TEST pytest mhkit/tests/wave/io/hindcast/test_hindcast.py - - name: Prepare Wave Hindcast data + - name: Prepare Wind Hindcast data if: (needs.check-changes.outputs.should-run-hindcast == 'true') run: | source activate TEST From 60aa4a84a314f14dc5ed2bee278f6915ecbbe30e Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 10:38:09 -0600 Subject: [PATCH 08/18] small change to force hindcast tests to run --- mhkit/wave/io/hindcast/hindcast.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mhkit/wave/io/hindcast/hindcast.py b/mhkit/wave/io/hindcast/hindcast.py index 126902bfb..5922edbc7 100644 --- a/mhkit/wave/io/hindcast/hindcast.py +++ b/mhkit/wave/io/hindcast/hindcast.py @@ -105,7 +105,7 @@ def request_wpto_point_data( Note: To access the WPTO hindcast data, you will need to configure h5pyd for data access on HSDS. Please see the WPTO_hindcast_example - notebook for more information. + notebook for setup instructions. Parameters ---------- From 0924d85dc0fe021be4e69c961f2ec3d095c3c7bb Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 10:47:21 -0600 Subject: [PATCH 09/18] fix hindcast change path --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a21c5535e..b006a9156 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -38,7 +38,7 @@ jobs: with: filters: | wave_io_hindcast: - - 'mhkit/tests/wave/io/hindcast/**' + - 'mhkit/wave/io/hindcast/**' - id: hindcast-logic run: | From b46b720acdadbc9ffe2ab1c1ff13e6aabfc64259 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 19:32:15 -0600 Subject: [PATCH 10/18] create 3 seperate cache jobs to adhere to 6 hour job limit --- .github/workflows/main.yml | 84 +++++++++++++++++++++++++++++++++----- 1 file changed, 74 insertions(+), 10 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b006a9156..45902356a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -48,7 +48,7 @@ jobs: echo "should-run-hindcast=false" >> "$GITHUB_OUTPUT" fi - prepare-cache: + prepare-nonhindcast-cache: needs: [check-changes] runs-on: ubuntu-latest env: @@ -68,7 +68,7 @@ jobs: run: | conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority source activate TEST - export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" # so setup.py finds nc-config + export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" pip install -e . --no-deps --force-reinstall - name: Install dependencies @@ -78,34 +78,92 @@ jobs: python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . - - name: Prepare data + - name: Prepare non-hindcast API data run: | source activate TEST pytest mhkit/tests/river/test_io_usgs.py pytest mhkit/tests/tidal/test_io.py pytest mhkit/tests/wave/io/test_cdip.py + - name: Upload data as artifact + uses: actions/upload-artifact@v3 + with: + name: data + path: ~/.cache/mhkit + + prepare-wave-hindcast-cache: + needs: [check-changes] + runs-on: ubuntu-latest + env: + PYTHON_VER: 3.9 + if: (needs.check-changes.outputs.should-run-hindcast == 'true') + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Conda + uses: s-weigand/setup-conda@v1 + with: + activate-conda: false + conda-channels: conda-forge + + - name: Python setup + shell: bash -l {0} + run: | + conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority + source activate TEST + export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" + pip install -e . --no-deps --force-reinstall + - name: Prepare Wave Hindcast data - if: (needs.check-changes.outputs.should-run-hindcast == 'true') run: | source activate TEST pytest mhkit/tests/wave/io/hindcast/test_hindcast.py + - name: Upload Wave Hindcast data as artifact + uses: actions/upload-artifact@v3 + with: + name: wave-hindcast-data + path: ~/.cache/mhkit + + prepare-wind-hindcast-cache: + needs: [check-changes] + runs-on: ubuntu-latest + env: + PYTHON_VER: 3.9 + if: (needs.check-changes.outputs.should-run-hindcast == 'true') + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Conda + uses: s-weigand/setup-conda@v1 + with: + activate-conda: false + conda-channels: conda-forge + + - name: Python setup + shell: bash -l {0} + run: | + conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority + source activate TEST + export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" + pip install -e . --no-deps --force-reinstall + - name: Prepare Wind Hindcast data - if: (needs.check-changes.outputs.should-run-hindcast == 'true') run: | source activate TEST pytest mhkit/tests/wave/io/hindcast/test_wind_toolkit.py - - name: Upload data as artifact + - name: Upload Wind Hindcast data as artifact uses: actions/upload-artifact@v3 with: - name: data + name: wind-hindcast-data path: ~/.cache/mhkit conda-build: name: conda-${{ matrix.os }}/${{ matrix.python-version }} - needs: [set-os, prepare-cache] + needs: [set-os, prepare-nonhindcast-cache] runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -163,7 +221,7 @@ jobs: pip-build: name: pip-${{ matrix.os }}/${{ matrix.python-version }} - needs: [set-os, prepare-cache] + needs: [set-os, prepare-nonhindcast-cache] runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -206,7 +264,13 @@ jobs: hindcast-calls: name: hindcast-${{ matrix.os }}/${{ matrix.python-version }} - needs: [check-changes, prepare-cache, set-os] + needs: + [ + check-changes, + prepare-wave-hindcast-cache, + prepare-wind-hindcast-cache, + set-os, + ] if: (needs.check-changes.outputs.should-run-hindcast == 'true') runs-on: ${{ matrix.os }} From bd143530979fe0aae641ff802468befbc6684858 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 19:38:37 -0600 Subject: [PATCH 11/18] upload and consolidate wave and wind data --- .github/workflows/main.yml | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 45902356a..ab2d1c075 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -298,11 +298,24 @@ jobs: export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" # so setup.py finds nc-config pip install -e . --no-deps --force-reinstall - - name: Download data from artifact + - name: Download Wave Hindcast data from artifact uses: actions/download-artifact@v3 with: - name: data - path: ~/.cache/mhkit + name: wave-hindcast-data + path: ~/.cache/mhkit/wave-hindcast + + - name: Download Wind Hindcast data from artifact + uses: actions/download-artifact@v3 + with: + name: wind-hindcast-data + path: ~/.cache/mhkit/wind-hindcast + + - name: Consolidate hindcast data + run: | + mkdir -p ~/.cache/mhkit/hindcast + mv ~/.cache/mhkit/wave-hindcast/* ~/.cache/mhkit/hindcast/ + mv ~/.cache/mhkit/wind-hindcast/* ~/.cache/mhkit/hindcast/ + shell: bash - name: Install MHKiT and run pytest shell: bash -l {0} From da821c9b40584d2b1f349b93ba7fc3d048d0db17 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 19:46:07 -0600 Subject: [PATCH 12/18] add install dependencies --- .github/workflows/main.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ab2d1c075..fdf47da72 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -115,6 +115,13 @@ jobs: export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" pip install -e . --no-deps --force-reinstall + - name: Install dependencies + shell: bash -l {0} + run: | + source activate TEST + python -m pip install --upgrade pip wheel + pip install coverage pytest coveralls . + - name: Prepare Wave Hindcast data run: | source activate TEST @@ -150,6 +157,13 @@ jobs: export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" pip install -e . --no-deps --force-reinstall + - name: Install dependencies + shell: bash -l {0} + run: | + source activate TEST + python -m pip install --upgrade pip wheel + pip install coverage pytest coveralls . + - name: Prepare Wind Hindcast data run: | source activate TEST From 5850af04686b9cf4813a5bb4e55d1a3d9bb94b51 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 25 Apr 2024 19:48:53 -0600 Subject: [PATCH 13/18] non hindcast data does not need check-change job --- .github/workflows/main.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fdf47da72..32bda3018 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -49,7 +49,6 @@ jobs: fi prepare-nonhindcast-cache: - needs: [check-changes] runs-on: ubuntu-latest env: PYTHON_VER: 3.9 From cf4289caa8b485fc7fcf14c566c8d0ba7b6bd4b3 Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 26 Apr 2024 07:15:37 -0600 Subject: [PATCH 14/18] use hindcast folder inside cache for consolidation --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 32bda3018..6a6272190 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -326,8 +326,8 @@ jobs: - name: Consolidate hindcast data run: | mkdir -p ~/.cache/mhkit/hindcast - mv ~/.cache/mhkit/wave-hindcast/* ~/.cache/mhkit/hindcast/ - mv ~/.cache/mhkit/wind-hindcast/* ~/.cache/mhkit/hindcast/ + mv ~/.cache/mhkit/wave-hindcast/hindcast/* ~/.cache/mhkit/hindcast/ + mv ~/.cache/mhkit/wind-hindcast/hindcast/* ~/.cache/mhkit/hindcast/ shell: bash - name: Install MHKiT and run pytest From b0a5295d8b7a2e033d35ea71aa5bc8bb8779a4fa Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 26 Apr 2024 07:43:53 -0600 Subject: [PATCH 15/18] run on changes to the test as well --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6a6272190..e24ea14fa 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -39,6 +39,7 @@ jobs: filters: | wave_io_hindcast: - 'mhkit/wave/io/hindcast/**' + - 'mhkit/tests/wave/io/hindcast/**' - id: hindcast-logic run: | From 936233b99924ec58fae618fc8a5188fb9a2908fb Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 26 Apr 2024 09:35:12 -0600 Subject: [PATCH 16/18] artifact v4 --- .github/workflows/main.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e24ea14fa..a3da3a96b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -86,7 +86,7 @@ jobs: pytest mhkit/tests/wave/io/test_cdip.py - name: Upload data as artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: data path: ~/.cache/mhkit @@ -128,7 +128,7 @@ jobs: pytest mhkit/tests/wave/io/hindcast/test_hindcast.py - name: Upload Wave Hindcast data as artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: wave-hindcast-data path: ~/.cache/mhkit @@ -170,7 +170,7 @@ jobs: pytest mhkit/tests/wave/io/hindcast/test_wind_toolkit.py - name: Upload Wind Hindcast data as artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: wind-hindcast-data path: ~/.cache/mhkit @@ -212,7 +212,7 @@ jobs: pip install coverage pytest coveralls . - name: Download data from artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: data path: ~/.cache/mhkit @@ -255,7 +255,7 @@ jobs: uses: actions/checkout@v4 - name: Download data from artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: data path: ~/.cache/mhkit @@ -313,13 +313,13 @@ jobs: pip install -e . --no-deps --force-reinstall - name: Download Wave Hindcast data from artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: wave-hindcast-data path: ~/.cache/mhkit/wave-hindcast - name: Download Wind Hindcast data from artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: wind-hindcast-data path: ~/.cache/mhkit/wind-hindcast From 411a385aedaf52e27a4305f3e81dc66075c99c61 Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 26 Apr 2024 09:36:29 -0600 Subject: [PATCH 17/18] miniconda v3 --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a3da3a96b..62317a953 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -244,7 +244,7 @@ jobs: python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - - uses: conda-incubator/setup-miniconda@v2 + - uses: conda-incubator/setup-miniconda@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v3 From bb1bcea2d610786698a111aa6017fad7632a276c Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 26 Apr 2024 09:37:41 -0600 Subject: [PATCH 18/18] actions/setup-python@v5 --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 62317a953..8ad4e4428 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -247,7 +247,7 @@ jobs: - uses: conda-incubator/setup-miniconda@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }}