diff --git a/eyeseg/io_utils/input_pipe.py b/eyeseg/io_utils/input_pipe.py index 1b97db4..53d905e 100644 --- a/eyeseg/io_utils/input_pipe.py +++ b/eyeseg/io_utils/input_pipe.py @@ -18,6 +18,7 @@ def _parse_function(input_proto): other_features = { "volume": tf.io.FixedLenFeature([], tf.string), "bscan": tf.io.FixedLenFeature([], tf.string), + "layer_positions": tf.io.FixedLenFeature([], tf.string), "image": tf.io.FixedLenFeature([], tf.string), "group": tf.io.FixedLenFeature([], tf.string), } @@ -32,7 +33,12 @@ def _parse_function(input_proto): data = tf.io.parse_single_example(input_proto, image_feature_description) image = tf.io.parse_tensor(data["image"], tf.uint8) - image = tf.reshape(image, input_shape + (1,)) + image = tf.reshape(image, input_shape + (1,), name="reshape_1") + + layer_positions = tf.io.parse_tensor(data["layer_positions"], tf.float32) + layer_positions = tf.reshape( + layer_positions, input_shape + (len(mapping),), name="reshape_1.5" + ) # Sort mapping for guaranteed order layerout = tf.stack( @@ -42,13 +48,16 @@ def _parse_function(input_proto): ], axis=-1, ) - layerout = tf.reshape(layerout, (input_shape[1], len(mapping))) + layerout = tf.reshape( + layerout, (input_shape[1], len(mapping)), name="reshape_2" + ) volume = data["volume"] bscan = data["bscan"] group = data["group"] return { + "layer_positions": layer_positions, "image": image, "layerout": layerout, "Volume": volume, @@ -80,7 +89,11 @@ def _augment(in_data): lambda: image, ) - return {"image": image, "layerout": layerout} + return { + "image": image, + "layerout": layerout, + "layer_positions": in_data["layer_positions"], + } return _augment @@ -97,14 +110,41 @@ def _normalize(in_data): image = tf.cast(image, tf.float32) image = image - tf.math.reduce_mean(image) image = image / tf.math.reduce_std(image) - return {**in_data, **{"image": image, "layerout": layerout}} + return { + **in_data, + **{ + "image": image, + "layerout": layerout, + "layer_positions": in_data["layer_positions"], + }, + } @tf.function def _prepare_train(in_data): - image, layerout = in_data["image"], in_data["layerout"] + image, layerout, layer_positions = ( + in_data["image"], + in_data["layerout"], + in_data["layer_positions"], + ) + + return image, {"layer_output": layerout, "columnwise_softmax": layer_positions} + + +@tf.function +def _prepare_test(in_data): + volume, bscan, group, image, layerout = ( + in_data["Volume"], + in_data["Bscan"], + in_data["Group"], + in_data["image"], + in_data["layerout"], + ) return image, { "layer_output": layerout, + "Volume": volume, + "Bscan": bscan, + "Group": group, } @@ -119,6 +159,7 @@ def _prepare_train(in_data): ) return image, { "layer_output": layerout, + "columnwise_softmax": in_data["layer_positions"], } return _prepare_train @@ -228,13 +269,15 @@ def _transform(in_data): tf.linalg.inv(combined_matrix) ), interpolation="bilinear", + output_shape=input_shape, ) - # combined_matrix = tf.linalg.inv(combined_matrix) # Warp 1D data x_vals = ( tf.tile( - tf.reshape(tf.range(0, width, dtype=tf.float32), (1, width)), + tf.reshape( + tf.range(0, width, dtype=tf.float32), (1, width), name="reshape_3" + ), [num_classes, 1], ) + 0.5 @@ -326,13 +369,21 @@ def get_split( parsed_data.shuffle( 14000, seed, reshuffle_each_iteration=True ) # .map(_augment) - .map(_transform) + # .map(_transform) .map(_normalize) .batch(batch_size) .map(_prepare_train) .repeat(epochs) .prefetch(tf.data.experimental.AUTOTUNE) ) + elif split == "test": + dataset = ( + parsed_data.map(_normalize) + .batch(batch_size) + .map(_prepare_test) + .repeat(epochs) + .prefetch(tf.data.experimental.AUTOTUNE) + ) else: dataset = ( parsed_data.map(_normalize) diff --git a/eyeseg/io_utils/losses.py b/eyeseg/io_utils/losses.py index 79d1b18..3ac577a 100644 --- a/eyeseg/io_utils/losses.py +++ b/eyeseg/io_utils/losses.py @@ -1,6 +1,41 @@ import tensorflow as tf +def layer_ce(true, pred): + # true : batch, columns, channels + + # pred: batch, rows, columns, channels + all = tf.math.log(tf.reduce_sum(true * pred, axis=1)) + mask = tf.logical_not(tf.math.is_inf(all)) + all_clean = tf.ragged.boolean_mask(all, mask) + + return tf.reduce_sum(-tf.reduce_sum(all_clean, axis=1), axis=-1) + + ctrue = tf.cast(tf.round(true), dtype=tf.int32) + + cols = tf.range(1024, dtype=tf.int32) + layers = tf.range(9, dtype=tf.int32) + + batch_losses = [] + for batch in tf.range(1, dtype=tf.int32): + layer_losses = [] + for layer in layers: + layer_results = [] + for c in cols: + row = ctrue[batch, c, layer] + # if True: #not tf.math.is_nan(true[batch, c, layer]): + value = pred[batch, row, c, layer] + layer_results.append(value) + layer_losses.append( + -tf.reduce_sum( + tf.math.log(tf.cast(tf.stack(layer_results), tf.float32)) + ) + ) + batch_losses.append(tf.reduce_mean(layer_losses)) + + return tf.stack(batch_losses) + + class MovingMeanFocalSSE(tf.keras.losses.Loss): # initialize instance attributes def __init__(self, window_size, curv_weight=0): diff --git a/eyeseg/io_utils/test.ipynb b/eyeseg/io_utils/test.ipynb new file mode 100644 index 0000000..de56ea2 --- /dev/null +++ b/eyeseg/io_utils/test.ipynb @@ -0,0 +1,136 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 58, + "metadata": { + "collapsed": true + }, + "outputs": [ + { + "data": { + "text/plain": "" + }, + "execution_count": 58, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import tensorflow as tf\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " #heights = tf.cast(tf.math.round(true), tf.int32)\n", + "\n", + " #return tf.reduce_mean(-tf.reduce_sum(tf.gather(pred, indices, )\n", + "\n", + "layer_ce(true, pred)" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "outputs": [], + "source": [ + "import tensorflow as tf\n", + "true = tf.tile(tf.expand_dims(tf.tile(tf.expand_dims(tf.range(10), 1), multiples=[1, 5]), 0), [2, 1,1])\n", + "pred = tf.tile(tf.expand_dims(tf.range(10), 1), [1,10])\n", + "pred = tf.tile(tf.expand_dims(tf.tile(tf.expand_dims(pred, -1) , [1,1,5]), 0), [2,1,1,1]) / 10" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 42, + "outputs": [], + "source": [ + "x_indices = tf.reshape(tf.range(true.shape[1]), (1,10,1))\n", + "x_indices = tf.tile(x_indices, (2,1,5))\n", + "\n", + "indices = tf.stack([true, x_indices], axis=1)" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 43, + "outputs": [ + { + "data": { + "text/plain": "" + }, + "execution_count": 43, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "indices" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 47, + "outputs": [ + { + "data": { + "text/plain": "" + }, + "execution_count": 47, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tf.gather_nd(pred, [[[[0,5,5,0]]]])" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/eyeseg/io_utils/utils.py b/eyeseg/io_utils/utils.py index 7ac3734..0ce7133 100644 --- a/eyeseg/io_utils/utils.py +++ b/eyeseg/io_utils/utils.py @@ -44,7 +44,7 @@ def preprocess_split(volume_paths, savepath, split, excluded=None): # Load volume data = ep.Oct.from_duke_mat(p) # Compute center of annotation - bm_annotation = (~np.isnan(data.layers["BM"])).astype(int) + bm_annotation = (~np.isnan(data.analyse["BM"])).astype(int) height_center, width_center = [ int(c) for c in ndimage.measurements.center_of_mass(bm_annotation) ] @@ -61,13 +61,13 @@ def preprocess_split(volume_paths, savepath, split, excluded=None): image = bscan.scan[:, width_center - 256 : width_center + 256].astype( np.uint8 ) - bm = bscan.layers["BM"][width_center - 256 : width_center + 256].astype( - np.float32 - ) - rpe = bscan.layers["RPE"][ + bm = bscan.analyse["BM"][ + width_center - 256 : width_center + 256 + ].astype(np.float32) + rpe = bscan.analyse["RPE"][ width_center - 256 : width_center + 256 ].astype(np.float32) - ilm = bscan.layers["ILM"][ + ilm = bscan.analyse["ILM"][ width_center - 256 : width_center + 256 ].astype(np.float32) diff --git a/eyeseg/models/feature_refinement_net.py b/eyeseg/models/feature_refinement_net.py index bf55d30..68f089e 100644 --- a/eyeseg/models/feature_refinement_net.py +++ b/eyeseg/models/feature_refinement_net.py @@ -51,11 +51,7 @@ def clip_norm_func(input): # Receptive field 727 dilations = [ - [ - 3, - 9, - 3, - ], + [3, 9, 3], [9, 27, 9], [27, 243, 27], ] @@ -124,14 +120,30 @@ def clip_norm_func(input): outputs = layers.Multiply()([output_features, attention_map]) else: outputs = x - output = get_output( - outputs, - num_classes, - input_shape, - guaranteed_order=guaranteed_order, - soft=soft_layerhead, - ) - output = layers.Reshape((input_shape[1], num_classes), name="layer_output")(output) - model = Model(inputs=[inputs], outputs=[output]) + if soft_layerhead: + outputs, col_softmax = get_output( + outputs, + num_classes, + input_shape, + guaranteed_order=guaranteed_order, + soft=soft_layerhead, + ) + output = layers.Reshape((input_shape[1], num_classes), name="layer_output")( + outputs + ) + model = Model(inputs=[inputs], outputs=[output, col_softmax]) + else: + outputs, _ = get_output( + outputs, + num_classes, + input_shape, + guaranteed_order=guaranteed_order, + soft=soft_layerhead, + ) + output = layers.Reshape((input_shape[1], num_classes), name="layer_output")( + outputs + ) + model = Model(inputs=[inputs], outputs=[output]) + return model diff --git a/eyeseg/models/parts.py b/eyeseg/models/parts.py index 48549c6..b3a7e7f 100644 --- a/eyeseg/models/parts.py +++ b/eyeseg/models/parts.py @@ -6,7 +6,7 @@ import tensorflow as tf -class FRNLayer(keras.layers.Layer): +class FRNLayer(layers.Layer): def __init__(self): super(FRNLayer, self).__init__() @@ -27,7 +27,7 @@ def call(self, x): return x * self.gamma + self.beta -class TLU(keras.layers.Layer): +class TLU(layers.Layer): def __init__(self): super(TLU, self).__init__() @@ -42,7 +42,7 @@ def call(self, x): return K.maximum(x, self.tau) -class TSwish(keras.layers.Layer): +class TSwish(layers.Layer): def __init__(self): super(TSwish, self).__init__() @@ -164,7 +164,9 @@ def get_output(input, num_classes, input_shape, guaranteed_order=True, soft=Fals num_classes, (1, 1), kernel_initializer="he_uniform" )(input) - col_softmax = activations.softmax(output_top_to_bottom, axis=1) + col_softmax = layers.Softmax(axis=1, name="columnwise_softmax")( + output_top_to_bottom + ) col_softargmax = tf.reduce_sum( col_softmax @@ -182,7 +184,13 @@ def get_output(input, num_classes, input_shape, guaranteed_order=True, soft=Fals col_softargmax[..., i] - output_list_top_to_bottom[0] ), ) - output_top_to_bottom = tf.stack(output_list_top_to_bottom, axis=-1) + output_top_to_bottom = tf.stack( + output_list_top_to_bottom, axis=-1, name="layer_output" + ) + + # Return the layer heights for L1 loss and the column-wise softmax for CE Loss as in He et al + return output_top_to_bottom, col_softmax + else: output_top_to_bottom = layers.Conv2D( num_classes, (1, 1), activation="relu", kernel_initializer="he_uniform" @@ -200,6 +208,8 @@ def get_output(input, num_classes, input_shape, guaranteed_order=True, soft=Fals output_top_to_bottom = layers.AveragePooling2D((input_shape[0], 1))( output_top_to_bottom ) - output_top_to_bottom = tf.math.multiply(output_top_to_bottom, input_shape[0]) + output_top_to_bottom = tf.math.multiply( + output_top_to_bottom, input_shape[0], name="layer_output" + ) - return output_top_to_bottom + return output_top_to_bottom, False diff --git a/eyeseg/scripts/commands/layers.py b/eyeseg/scripts/commands/analyse.py similarity index 67% rename from eyeseg/scripts/commands/layers.py rename to eyeseg/scripts/commands/analyse.py index 0fc2b61..8469cee 100644 --- a/eyeseg/scripts/commands/layers.py +++ b/eyeseg/scripts/commands/analyse.py @@ -3,6 +3,8 @@ from importlib import resources +import skimage.transform + from eyeseg.models import weights as weights_resources from eyeseg.models.utils import load_model from eyeseg.scripts.utils import find_volumes @@ -21,9 +23,16 @@ default=False, help="Whether to overwrite existing layers. Default is --no-overwrite.", ) +@click.option( + "--drusen_threshold", + "-t", + type=click.INT, + default=2, + help="Minimum height for drusen to be included", +) @click.argument("model_id", type=click.STRING, default="2c41ukad") @click.pass_context -def layers(ctx: click.Context, model_id, overwrite): +def analyse(ctx: click.Context, model_id, overwrite, drusen_threshold): """Predict OCT layers \b @@ -69,27 +78,47 @@ def layers(ctx: click.Context, model_id, overwrite): # Predict layers data = get_layers(data, model_id) # Save predicted layers - output_dir = output_path / path.relative_to(input_path).parent / path.name + output_dir = output_path / path.relative_to(input_path).parent output_dir.mkdir(parents=True, exist_ok=True) - with open(output_dir / ("layers.pkl"), "wb") as myfile: - pickle.dump( - {name: data.data for name, data in data.layers.items()}, myfile + + if "RPE" in data.layers and "BM" in data.layers: + drusen = ep.drusen( + data.layers["RPE"], + data.layers["BM"], + data.shape, + minimum_height=drusen_threshold, ) + data.add_voxel_annotation(drusen, name="drusen") + + data.save(output_dir / (path.name + ".eye")) - click.echo("\nPredicted OCT layers are saved. You can now use the 'drusen' command") + click.echo( + "\nComputed layers and drusen are saved. You can now use the 'quantify', 'plot-enface' and 'plot-bscans' commands" + ) def get_layers(data, model_id): - layer_model, model_config = load_model(model_id, (512, data[0].shape[1], 1)) + if data.meta["scale_x"] < 0.009: + factor = 2 + else: + factor = 1 + + width = data[0].shape[1] + layer_model, model_config = load_model(model_id, (512, width // factor, 1)) results = [] for bscan in tqdm(data, desc=f"Predict '{data.meta['visit_date']}': "): - img = preprocess_standard(bscan.data, bscan.shape[1]) + img = skimage.transform.rescale(bscan.data, (1, 1 / factor)) + img = preprocess_standard(img, width // factor) prediction = layer_model.predict(img)[0] results.append(prediction) results = np.flip(np.stack(results, axis=0), axis=0) for index, name in model_config["layer_mapping"].items(): - height_map = results[..., index] + if factor != 1: + height_map = skimage.transform.rescale(results[..., index], (1, factor)) + # height_map = np.interp(np.arange(width), np.arange(width//factor) * factor, results[..., index]) + else: + height_map = results[..., index] data.add_layer_annotation(height_map, name=name) return data diff --git a/eyeseg/scripts/commands/drusen.py b/eyeseg/scripts/commands/drusen.py deleted file mode 100644 index b9572c7..0000000 --- a/eyeseg/scripts/commands/drusen.py +++ /dev/null @@ -1,94 +0,0 @@ -import click -from pathlib import Path -import logging - -import eyepy as ep -from tqdm import tqdm -import pickle -import numpy as np -import pandas as pd - -from eyeseg.scripts.utils import find_volumes - -logger = logging.getLogger("eyeseg.drusen") - - -@click.command() -@click.option( - "--drusen_threshold", - "-t", - type=click.INT, - default=2, - help="Minimum height for drusen to be included", -) -@click.option( - "--overwrite/--no-overwrite", - default=False, - help="Whether to overwrite existing drusen. Default is --no-overwrite.", -) -@click.pass_context -def drusen(ctx: click.Context, drusen_threshold, overwrite): - """Compute drusen from BM and RPE layer segmentation - - \f - :param drusen_threshold: - :return: - """ - input_path = ctx.obj["input_path"] - output_path = ctx.obj["output_path"] - - volumes = find_volumes(input_path) - - # Check for which volumes drusen need to be predicted - if overwrite is False and output_path.is_dir(): - # Remove path from volumes if layers are found in the output location - precomputed_drusen = [ - p.name for p in output_path.iterdir() if (p / "drusen.pkl").exists() - ] - for datatype in volumes.keys(): - volumes[datatype] = [ - v for v in volumes[datatype] if v.name not in precomputed_drusen - ] - - data_readers = {"vol": ep.import_heyex_vol, "xml": ep.import_heyex_xml} - # Read data - no_layers_volumes = [] - results = [] - for datatype, volumes in volumes.items(): - for path in tqdm(volumes): - # Load data - data = data_readers[datatype](path) - # Read layers - output_dir = output_path / path.relative_to(input_path).parent / path.name - layers_filepath = output_dir / "layers.pkl" - try: - with open(layers_filepath, "rb") as myfile: - layers = pickle.load(myfile) - except FileNotFoundError: - logger.warning(f"No layers.pkl found for {path.name}") - no_layers_volumes.append(path) - continue - - for name, layer in layers.items(): - data.add_layer_annotation(layer, name=name) - - # Compute drusen - drusen = ep.drusen( - data.layers["RPE"], - data.layers["BM"], - data.shape, - minimum_height=drusen_threshold, - ) - output_dir = output_path / path.relative_to(input_path).parent / path.name - drusen_filepath = output_dir / "drusen.pkl" - with open(drusen_filepath, "wb") as myfile: - pickle.dump(drusen, myfile) - - if len(no_layers_volumes) > 0: - click.echo( - f"No retinal layers found for {len(no_layers_volumes)} volumes. To predict layers run the 'layers' command." - ) - else: - click.echo( - "\nComputed drusen are saved. You can now use the 'quantify', 'plot-enface' and 'plot-bscans' commands" - ) diff --git a/eyeseg/scripts/commands/evaluate.py b/eyeseg/scripts/commands/evaluate.py index 9cbdef7..31be861 100644 --- a/eyeseg/scripts/commands/evaluate.py +++ b/eyeseg/scripts/commands/evaluate.py @@ -1,8 +1,9 @@ -import os import click import logging import yaml -import wandb +import numpy as np +import pickle +import matplotlib.pyplot as plt from eyeseg.models.feature_refinement_net import model from eyeseg.io_utils.losses import MovingMeanFocalSSE @@ -27,12 +28,16 @@ help="Shape of the data.", ) @click.option("-b", "--batch_size", type=int, help="Batch size used during training") +@click.option( + "-p", "--plot", type=bool, default=True, help="Plot test dataset with predictions" +) @click.pass_context def evaluate( ctx: click.Context, run_path, input_shape, batch_size, + plot, ): """Evaluate a model on a test dataset""" input_path = ctx.obj["input_path"] @@ -61,7 +66,7 @@ def evaluate( input_path, config["layer_mapping"], input_shape, - batch_size, + 1, 1, "test", ) @@ -84,5 +89,32 @@ def evaluate( sample_weight_mode="temporal", ) - results = my_model.evaluate(test_data, batch_size=batch_size, return_dict=True) - print(results) + results = {} + for image, data in test_data: + prediction = my_model.predict(image) + layerout = data["layer_output"] + volume, bscan, group = ( + bytes.decode(data["Volume"].numpy()[0]), + bytes.decode(data["Bscan"].numpy()[0]), + bytes.decode(data["Group"].numpy()[0]), + ) + + results[(volume, bscan, group)] = np.abs(prediction - layerout) + + (output_path / "results").mkdir(parents=True, exist_ok=True) + if plot: + plt.imshow(image[0, ..., 0], cmap="gray") + for layer in range(9): + plt.plot(image.shape[1] - prediction[0, ..., layer]) + plt.savefig( + output_path + / "results" + / f"{np.mean(results[(volume, bscan, group)]):.2f}_{volume}_{bscan}.jpeg" + ) + plt.close() + + with open(output_path / "results" / "test_results.pkl", "wb") as myfile: + pickle.dump(results, myfile) + + # results = my_model.evaluate(test_data, batch_size=batch_size, return_dict=True) + # print(results) diff --git a/eyeseg/scripts/commands/plot_bscans.py b/eyeseg/scripts/commands/plot_bscans.py index 9e3c815..c41cc2d 100644 --- a/eyeseg/scripts/commands/plot_bscans.py +++ b/eyeseg/scripts/commands/plot_bscans.py @@ -25,16 +25,8 @@ default=[], help="Layers predictions to overlay on the B-scan", ) -@click.option( - "--volumes", - "-v", - type=click.STRING, - multiple=True, - default=[], - help="Volumes to plot B-scans for. If not specified B-scans are plotted for all volumes.", -) @click.pass_context -def plot_bscans(ctx: click.Context, drusen, layers, volumes): +def plot_bscans(ctx: click.Context, drusen, layers): """Plot B-scans \f @@ -43,79 +35,30 @@ def plot_bscans(ctx: click.Context, drusen, layers, volumes): input_path = ctx.obj["input_path"] output_path = ctx.obj["output_path"] - available_volumes = find_volumes(input_path) - if len(volumes) != 0: - # Select volume path if any folder in the volumes path is in the folders specified by volumes - new_volumes = { - k: [ - v - for v in volums - if not set(v.relative_to(input_path).parts).isdisjoint(set(volumes)) - ] - for k, volums in available_volumes.items() - } - click.echo( - f"You specified {len(volumes)} volumes. {sum([len(v) for v in new_volumes.values()])} volumes were found." - ) + volumes = [p for p in (input_path / "processed").iterdir() if p.suffix == ".eye"] + if len(volumes) == 0: + click.echo("\nNo volumes found.") + sys.exit() - if sum([len(v) for v in new_volumes.values()]) == 0: - sys.exit() - volumes = new_volumes + if drusen: + areas = ["drusen"] else: - volumes = available_volumes - - data_readers = {"vol": ep.import_heyex_vol, "xml": ep.import_heyex_xml} - # Read data - for datatype, volumes in volumes.items(): - for path in tqdm(volumes): - # Load data - data = data_readers[datatype](path) - # Load layers and drusen - output_dir = output_path / path.relative_to(input_path).parent / path.name - layers_filepath = output_dir / "layers.pkl" - drusen_filepath = output_dir / "drusen.pkl" - - try: - with open(layers_filepath, "rb") as myfile: - layer_data = pickle.load(myfile) - except FileNotFoundError: - logger.warning(f"No layers.pkl found for {path.name}") - continue - - try: - with open(drusen_filepath, "rb") as myfile: - drusen_data = pickle.load(myfile) - except FileNotFoundError: - logger.warning(f"No drusen.pkl found for {path.stem}") - continue - - for name, layer in layer_data.items(): - data.add_layer_annotation(layer, name=name) - data.add_voxel_annotation(drusen_data, name="drusen") - - if "iRPE" in layers: - irpe = ep.quantification._drusen.ideal_rpe( - data.layers["RPE"].data, data.layers["BM"].data, data.shape - ) - data.add_layer_annotation(irpe, name="iRPE") + ares = [] - save_path = ( - output_path - / "plots" - / "bscans" - / path.relative_to(input_path).parent - / path.stem + for path in tqdm(volumes): + # Load data + data = ep.EyeVolume.load(path) + save_path = output_path / "plots" / "bscans" / path.stem + save_path.mkdir(parents=True, exist_ok=True) + for bscan in tqdm(data): + bscan.plot(areas=areas, layers=layers) + plt.axis("off") + plt.savefig( + save_path / f"{bscan.index}.jpeg", + bbox_inches="tight", + pad_inches=0, + dpi=200, ) - save_path.mkdir(parents=True, exist_ok=True) - for bscan in tqdm(data): - bscan.plot(areas=["drusen"], layers=layers) - plt.axis("off") - plt.savefig( - save_path / f"{bscan.index}.jpeg", - bbox_inches="tight", - pad_inches=0, - dpi=200, - ) - plt.close() + plt.close() click.echo("\nB-scan plots are saved.") diff --git a/eyeseg/scripts/commands/plot_data.py b/eyeseg/scripts/commands/plot_data.py index d275552..331def8 100644 --- a/eyeseg/scripts/commands/plot_data.py +++ b/eyeseg/scripts/commands/plot_data.py @@ -13,6 +13,7 @@ @click.option( "-c", "--model-config", + default="./config.yaml", type=click.Path(exists=True), help="Path to to model configuration as yaml file. If not provided a new file is generated from the provided arguments.", ) diff --git a/eyeseg/scripts/commands/plot_enface.py b/eyeseg/scripts/commands/plot_enface.py index f1b551d..9df40be 100644 --- a/eyeseg/scripts/commands/plot_enface.py +++ b/eyeseg/scripts/commands/plot_enface.py @@ -37,54 +37,23 @@ def plot_enface(ctx: click.Context, drusen, bscan_area, bscan_positions): input_path = ctx.obj["input_path"] output_path = ctx.obj["output_path"] - volumes = find_volumes(input_path) - - data_readers = {"vol": ep.import_heyex_vol, "xml": ep.import_heyex_xml} - # Read data - for datatype, volumes in volumes.items(): - for path in tqdm(volumes): - # Load data - data = data_readers[datatype](path) - # Load layers and drusen - output_dir = output_path / path.relative_to(input_path).parent / path.name - layers_filepath = output_dir / "layers.pkl" - drusen_filepath = output_dir / "drusen.pkl" - - try: - with open(layers_filepath, "rb") as myfile: - layer_data = pickle.load(myfile) - except FileNotFoundError: - logger.warning(f"No layers.pkl found for {path.name}") - continue - - try: - with open(drusen_filepath, "rb") as myfile: - drusen_data = pickle.load(myfile) - except FileNotFoundError: - logger.warning(f"No drusen.pkl found for {path.stem}") - continue - - for name, layer in layer_data.items(): - data.add_layer_annotation(layer, name=name) - data.add_voxel_annotation(drusen_data, name="drusen") - - save_path = ( - output_path - / "plots" - / "enface" - / path.relative_to(input_path).parent - / path.stem - ) - save_path.mkdir(parents=True, exist_ok=True) - - if not bscan_positions: - bscan_positions = None - data.plot( - projections=["drusen"], - bscan_region=bscan_area, - bscan_positions=bscan_positions, - ) - plt.savefig(save_path / f"{path.stem}.jpeg", bbox_inches="tight", dpi=200) - plt.close() + volumes = [p for p in (input_path / "processed").iterdir() if p.suffix == ".eye"] + + for path in tqdm(volumes): + # Load data + data = ep.EyeVolume.load(path) + + save_path = output_path / "plots" / "enface" + save_path.mkdir(parents=True, exist_ok=True) + + if not bscan_positions: + bscan_positions = None + data.plot( + projections=["drusen"], + bscan_region=bscan_area, + bscan_positions=bscan_positions, + ) + plt.savefig(save_path / f"{path.stem}.jpeg", bbox_inches="tight", dpi=200) + plt.close() click.echo("\nDrusen enface plots are saved.") diff --git a/eyeseg/scripts/commands/quantify.py b/eyeseg/scripts/commands/quantify.py index 9cdfbc2..0ab3f46 100644 --- a/eyeseg/scripts/commands/quantify.py +++ b/eyeseg/scripts/commands/quantify.py @@ -1,17 +1,10 @@ import click -from pathlib import Path import logging import eyepy as ep from tqdm import tqdm -import pickle -import numpy as np import pandas as pd -from eyeseg.scripts.utils import find_volumes - -# from eyeseg.grids import grid - logger = logging.getLogger("eyeseg.quantify") @@ -53,32 +46,23 @@ def quantify(ctx: click.Context, radii, sectors, offsets): input_path = ctx.obj["input_path"] output_path = ctx.obj["output_path"] - volumes = find_volumes(input_path) + volumes = [p for p in (input_path / "processed").iterdir() if p.suffix == ".eye"] - data_readers = {"vol": ep.import_heyex_vol, "xml": ep.import_heyex_xml} # Read data - no_drusen_volumes = [] results = [] - for datatype, volumes in volumes.items(): - for path in tqdm(volumes): - # Load data - data = data_readers[datatype](path) - # Read layers - output_dir = output_path / path.relative_to(input_path).parent / path.name - layers_filepath = output_dir / "layers.pkl" - drusen_filepath = output_dir / "drusen.pkl" - try: - with open(drusen_filepath, "rb") as myfile: - drusen = pickle.load(myfile) - except FileNotFoundError: - no_drusen_volumes.append(path) - continue + for path in tqdm(volumes): + # Load data + data = ep.EyeVolume.load(path) - data.add_voxel_annotation( - drusen, name="drusen", radii=radii, n_sectors=sectors, offsets=offsets - ) - results.append(data.volume_maps["drusen"].quantification) + vm = data.volume_maps["drusen"] + vm.radii = radii + vm.n_sectors = sectors + vm.offsets = offsets + + quant = vm.quantification + quant["Visit"] = path.stem + results.append(vm.quantification) # Save quantification results as csv if len(results) > 0: @@ -88,8 +72,3 @@ def quantify(ctx: click.Context, radii, sectors, offsets): csv.to_csv(output_path / f"drusen_results.csv") click.echo(f"Drusen quantification saved for {len(csv)} volumes.") - - if len(no_drusen_volumes) > 0: - click.echo( - f"No drusen found for {len(no_drusen_volumes)} volumes. To compute drusen run the 'drusen' command after having predicted layers with the 'layers' command." - ) diff --git a/eyeseg/scripts/commands/train.py b/eyeseg/scripts/commands/train.py index bc83969..ca85067 100644 --- a/eyeseg/scripts/commands/train.py +++ b/eyeseg/scripts/commands/train.py @@ -14,7 +14,7 @@ _normalize, _prepare_train, ) -from eyeseg.io_utils.losses import MovingMeanFocalSSE +from eyeseg.io_utils.losses import MovingMeanFocalSSE, layer_ce from eyeseg.io_utils.input_pipe import get_split, count_samples from eyeseg.io_utils.utils import get_metrics @@ -266,13 +266,17 @@ def get_metric(*args, **kwargs): window_size=config["training"]["boosting_window_size"], curv_weight=config["training"]["curv_weight"], ) + if config["parameters"]["soft_layerhead"]: + losses = {"layer_output": loss_fn, "columnwise_softmax": layer_ce} + else: + losses = {"layer_output": loss_fn} metrics["layer_output"].append(internal_metric(loss_fn, "ema", name="EMA")) my_model.compile( optimizer=tf.keras.optimizers.Adam( learning_rate=config["training"]["lr"], clipnorm=1.0, clipvalue=0.5 ), - loss={"layer_output": loss_fn}, + loss=losses, metrics=metrics, sample_weight_mode="temporal", ) diff --git a/eyeseg/scripts/main.py b/eyeseg/scripts/main.py index 9e3cb7d..9668821 100644 --- a/eyeseg/scripts/main.py +++ b/eyeseg/scripts/main.py @@ -2,8 +2,7 @@ import os from pathlib import Path from eyeseg.scripts.commands.check import check -from eyeseg.scripts.commands.layers import layers -from eyeseg.scripts.commands.drusen import drusen +from eyeseg.scripts.commands.analyse import analyse from eyeseg.scripts.commands.quantify import quantify from eyeseg.scripts.commands.plot_enface import plot_enface from eyeseg.scripts.commands.plot_bscans import plot_bscans @@ -102,8 +101,7 @@ def main(ctx, input_path, output_path, log_level, gpu): main.add_command(check) -main.add_command(layers) -main.add_command(drusen) +main.add_command(analyse) main.add_command(quantify) main.add_command(plot_enface) main.add_command(plot_bscans) diff --git a/eyeseg/scripts/utils.py b/eyeseg/scripts/utils.py index edfffc4..36577d7 100644 --- a/eyeseg/scripts/utils.py +++ b/eyeseg/scripts/utils.py @@ -2,16 +2,23 @@ def find_volumes(data_path): if data_path.is_dir(): vol_volumes = data_path.glob("**/*.vol") xml_volumes = data_path.glob("**/*.xml") + eye_volumes = data_path.glob("**/*.eye") # We do not support multiple XML exports in the same folder. xml_volumes = [v.parent for v in xml_volumes] elif data_path.is_file(): if ".vol" == data_path.suffix: vol_volumes = [data_path] xml_volumes = [] + eye_volumes = [] if ".xml" == data_path.suffix: xml_volumes = [data_path] vol_volumes = [] + eye_volumes = [] + if ".eye" == data_path.suffix: + eye_volumes = [data_path] + xml_volumes = [] + vol_volumes = [] else: raise ValueError("Data not found") - return {"vol": set(vol_volumes), "xml": set(xml_volumes)} + return {"vol": set(vol_volumes), "xml": set(xml_volumes), "eye": set(eye_volumes)} diff --git a/poetry.lock b/poetry.lock index 2db7fe3..70a38ca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6,6 +6,14 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "argcomplete" version = "1.12.3" @@ -17,6 +25,51 @@ python-versions = "*" [package.extras] test = ["coverage", "flake8", "pexpect", "wheel"] +[[package]] +name = "argon2-cffi" +version = "21.3.0" +description = "The secure Argon2 password hashing algorithm." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["pre-commit", "cogapp", "tomli", "coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "sphinx-notfound-page", "furo"] +docs = ["sphinx", "sphinx-notfound-page", "furo"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["pytest", "cogapp", "pre-commit", "wheel"] +tests = ["pytest"] + +[[package]] +name = "asttokens" +version = "2.0.8" +description = "Annotate AST trees with source code positions" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[package.extras] +test = ["astroid (<=2.5.3)", "pytest"] + [[package]] name = "astunparse" version = "1.6.3" @@ -40,7 +93,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -50,6 +103,45 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "beautifulsoup4" +version = "4.11.1" +description = "Screen-scraping library" +category = "main" +optional = false +python-versions = ">=3.6.0" + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bleach" +version = "5.0.1" +description = "An easy safelist-based HTML-sanitizing tool." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.2)"] +dev = ["build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "Sphinx (==4.3.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)", "black (==22.3.0)", "mypy (==0.961)"] + [[package]] name = "cachetools" version = "5.2.0" @@ -66,6 +158,17 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.3.1" @@ -140,6 +243,14 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "debugpy" +version = "1.6.3" +description = "An implementation of the Debug Adapter Protocol for Python" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "decli" version = "0.5.2" @@ -156,6 +267,14 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "distlib" version = "0.3.4" @@ -183,6 +302,22 @@ python-versions = "*" [package.dependencies] six = ">=1.4.0" +[[package]] +name = "entrypoints" +version = "0.4" +description = "Discover and load entry points from installed packages." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "executing" +version = "1.0.0" +description = "Get the currently executing AST node of a frame, and other information" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "eyepie" version = "0.6.7" @@ -197,6 +332,17 @@ itk = ">=5.2.1,<6.0.0" matplotlib = ">=3.5.1,<4.0.0" scikit-image = ">=0.19.1,<0.20.0" +[[package]] +name = "fastjsonschema" +version = "2.16.1" +description = "Fastest Python implementation of JSON schema" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + [[package]] name = "filelock" version = "3.7.1" @@ -418,6 +564,21 @@ docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +[[package]] +name = "importlib-resources" +version = "5.9.0" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + [[package]] name = "iniconfig" version = "1.1.1" @@ -426,6 +587,91 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "ipykernel" +version = "6.15.2" +description = "IPython Kernel for Jupyter" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +debugpy = ">=1.0" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=17" +tornado = ">=6.1" +traitlets = ">=5.1.0" + +[package.extras] +test = ["flaky", "ipyparallel", "pre-commit", "pytest-cov", "pytest-timeout", "pytest (>=6.0)"] + +[[package]] +name = "ipython" +version = "8.4.0" +description = "IPython: Productive Interactive Computing" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" + +[package.extras] +all = ["black", "Sphinx (>=1.3)", "ipykernel", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.19)", "pandas", "trio"] +black = ["black"] +doc = ["Sphinx (>=1.3)"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.19)", "pandas", "trio"] + +[[package]] +name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "ipywidgets" +version = "8.0.2" +description = "Jupyter interactive widgets" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ipykernel = ">=4.5.1" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0,<4.0" +traitlets = ">=4.3.1" +widgetsnbextension = ">=4.0,<5.0" + +[package.extras] +test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] + [[package]] name = "itk" version = "5.2.1.post1" @@ -509,11 +755,26 @@ python-versions = "*" [package.dependencies] itk-filtering = "5.2.1.post1" +[[package]] +name = "jedi" +version = "0.18.1" +description = "An autocompletion tool for Python that can be used for text editors." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] + [[package]] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -523,6 +784,110 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jsonschema" +version = "4.15.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jupyter" +version = "1.0.0" +description = "Jupyter metapackage. Install all the Jupyter components in one go." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +ipykernel = "*" +ipywidgets = "*" +jupyter-console = "*" +nbconvert = "*" +notebook = "*" +qtconsole = "*" + +[[package]] +name = "jupyter-client" +version = "7.3.5" +description = "Jupyter protocol implementation and client libraries" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +entrypoints = "*" +jupyter-core = ">=4.9.2" +nest-asyncio = ">=1.5.4" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = "*" + +[package.extras] +doc = ["ipykernel", "myst-parser", "sphinx-rtd-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt"] +test = ["codecov", "coverage", "ipykernel (>=6.5)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-console" +version = "6.4.4" +description = "Jupyter terminal console" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ipykernel = "*" +ipython = "*" +jupyter-client = ">=7.0.0" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" + +[package.extras] +test = ["pexpect"] + +[[package]] +name = "jupyter-core" +version = "4.11.1" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = "*" + +[package.extras] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.2.2" +description = "Pygments theme using JupyterLab CSS variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "jupyterlab-widgets" +version = "3.0.3" +description = "Jupyter interactive widgets for JupyterLab" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "keras" version = "2.9.0" @@ -564,6 +929,20 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "lxml" +version = "4.9.1" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["beautifulsoup4"] +source = ["Cython (>=0.29.7)"] + [[package]] name = "markdown" version = "3.3.7" @@ -582,7 +961,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -605,6 +984,104 @@ pyparsing = ">=2.2.1" python-dateutil = ">=2.7" setuptools_scm = ">=4" +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mistune" +version = "2.0.4" +description = "A sane Markdown parser with useful plugins and renderers" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "nbclient" +version = "0.6.7" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "main" +optional = false +python-versions = ">=3.7.0" + +[package.dependencies] +jupyter-client = ">=6.1.5" +nbformat = ">=5.0" +nest-asyncio = "*" +traitlets = ">=5.2.2" + +[package.extras] +sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] +test = ["black", "check-manifest", "flake8", "ipykernel", "ipython", "ipywidgets", "mypy", "nbconvert", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.0.0" +description = "Converting Jupyter Notebooks" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "*" +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +lxml = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<3" +nbclient = ">=0.5.0" +nbformat = ">=5.1" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.0" + +[package.extras] +all = ["ipykernel", "ipython", "ipywidgets (>=7)", "nbsphinx (>=0.2.12)", "pre-commit", "pyppeteer (>=1,<1.1)", "pyqtwebengine (>=5.15)", "pytest", "pytest-cov", "pytest-dependency", "sphinx-rtd-theme", "sphinx (==5.0.2)", "tornado (>=6.1)"] +docs = ["ipython", "nbsphinx (>=0.2.12)", "sphinx-rtd-theme", "sphinx (==5.0.2)"] +qtpdf = ["pyqtwebengine (>=5.15)"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-cov", "pytest-dependency"] +webpdf = ["pyppeteer (>=1,<1.1)"] + +[[package]] +name = "nbformat" +version = "5.4.0" +description = "The Jupyter Notebook format" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +fastjsonschema = "*" +jsonschema = ">=2.6" +jupyter-core = "*" +traitlets = ">=5.1" + +[package.extras] +test = ["check-manifest", "testpath", "pytest", "pre-commit"] + +[[package]] +name = "nest-asyncio" +version = "1.5.5" +description = "Patch asyncio to allow nested event loops" +category = "main" +optional = false +python-versions = ">=3.5" + [[package]] name = "networkx" version = "2.8.4" @@ -628,6 +1105,36 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "notebook" +version = "6.4.12" +description = "A web-based notebook environment for interactive computing" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=5.3.4" +jupyter-core = ">=4.6.1" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] +json-logging = ["json-logging"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] + [[package]] name = "numpy" version = "1.22.4" @@ -696,6 +1203,26 @@ pytz = ">=2020.1" [package.extras] test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] +[[package]] +name = "pandocfilters" +version = "1.5.0" +description = "Utilities for writing pandoc filters in python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + [[package]] name = "pathtools" version = "0.1.2" @@ -704,6 +1231,25 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "pillow" version = "9.1.1" @@ -716,6 +1262,14 @@ python-versions = ">=3.7" docs = ["olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinx-rtd-theme (>=1.0)", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "platformdirs" version = "2.5.2" @@ -756,6 +1310,17 @@ pyyaml = ">=5.1" toml = "*" virtualenv = ">=20.0.8" +[[package]] +name = "prometheus-client" +version = "0.14.1" +description = "Python client for the Prometheus monitoring system." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +twisted = ["twisted"] + [[package]] name = "promise" version = "2.3" @@ -774,7 +1339,7 @@ test = ["pytest (>=2.7.3)", "pytest-cov", "coveralls", "futures", "pytest-benchm name = "prompt-toolkit" version = "3.0.29" description = "Library for building powerful interactive command lines in Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.6.2" @@ -800,11 +1365,30 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +tests = ["pytest"] + [[package]] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -827,6 +1411,25 @@ python-versions = "*" [package.dependencies] pyasn1 = ">=0.4.6,<0.5.0" +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pygments" +version = "2.13.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pyparsing" version = "3.0.9" @@ -838,6 +1441,14 @@ python-versions = ">=3.6.8" [package.extras] diagrams = ["railroad-diagrams", "jinja2"] +[[package]] +name = "pyrsistent" +version = "0.18.1" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "pytest" version = "6.2.5" @@ -889,6 +1500,22 @@ python-versions = ">=3.7" [package.dependencies] numpy = ">=1.17.3" +[[package]] +name = "pywin32" +version = "304" +description = "Python for Window Extensions" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pywinpty" +version = "2.0.7" +description = "Pseudo terminal support for Windows from Python." +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "pyyaml" version = "6.0" @@ -897,6 +1524,54 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "pyzmq" +version = "23.2.1" +description = "Python bindings for 0MQ" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} +py = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "qtconsole" +version = "5.3.2" +description = "Jupyter Qt console" +category = "main" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +ipykernel = ">=4.1" +ipython-genutils = "*" +jupyter-client = ">=4.1" +jupyter-core = "*" +pygments = "*" +pyzmq = ">=17.1" +qtpy = ">=2.0.1" +traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" + +[package.extras] +doc = ["Sphinx (>=1.3)"] +test = ["flaky", "pytest", "pytest-qt"] + +[[package]] +name = "qtpy" +version = "2.2.0" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = "*" + +[package.extras] +test = ["pytest-qt", "pytest-cov (>=3.0.0)", "pytest (>=6,!=7.0.0,!=7.0.1)"] + [[package]] name = "questionary" version = "1.10.0" @@ -990,6 +1665,19 @@ python-versions = ">=3.8,<3.11" [package.dependencies] numpy = ">=1.17.3,<1.25.0" +[[package]] +name = "send2trash" +version = "1.8.0" +description = "Send file to trash natively under Mac OS X, Windows and Linux." +category = "main" +optional = false +python-versions = "*" + +[package.extras] +nativelib = ["pyobjc-framework-cocoa", "pywin32"] +objc = ["pyobjc-framework-cocoa"] +win32 = ["pywin32"] + [[package]] name = "sentry-sdk" version = "1.5.12" @@ -1071,6 +1759,30 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "soupsieve" +version = "2.3.2.post1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "stack-data" +version = "0.5.0" +description = "Extract data from python stack frames and tracebacks for informative displays" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +asttokens = "*" +executing = "*" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "typeguard", "pytest"] + [[package]] name = "tensorboard" version = "2.9.1" @@ -1208,6 +1920,22 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "terminado" +version = "0.15.0" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +test = ["pre-commit", "pytest-timeout", "pytest (>=6.0)"] + [[package]] name = "tifffile" version = "2022.5.4" @@ -1222,6 +1950,21 @@ numpy = ">=1.19.2" [package.extras] all = ["imagecodecs (>=2021.11.20)", "matplotlib (>=3.3)", "lxml"] +[[package]] +name = "tinycss2" +version = "1.1.1" +description = "A tiny CSS parser" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] + [[package]] name = "toml" version = "0.10.2" @@ -1246,6 +1989,14 @@ category = "dev" optional = false python-versions = ">=3.6,<4.0" +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" +optional = false +python-versions = ">= 3.7" + [[package]] name = "tqdm" version = "4.64.0" @@ -1263,6 +2014,17 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "traitlets" +version = "5.3.0" +description = "" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pre-commit", "pytest"] + [[package]] name = "typeguard" version = "2.13.3" @@ -1351,7 +2113,15 @@ sweeps = ["sweeps (>=0.1.0)"] name = "wcwidth" version = "0.2.5" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "main" optional = false python-versions = "*" @@ -1366,6 +2136,14 @@ python-versions = ">=3.7" [package.extras] watchdog = ["watchdog"] +[[package]] +name = "widgetsnbextension" +version = "4.0.3" +description = "Jupyter interactive widgets for Jupyter Notebook" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "wrapt" version = "1.14.1" @@ -1389,17 +2167,49 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = ">=3.8,<3.11" -content-hash = "82724c57bf779b674c6337071c4f0dc8036ac47f5018c26f30042800ab346cf5" +content-hash = "f5a3946b918fea863bf7f901f5fda2ffefb50cb59f05aab1b2261b89411c2cf3" [metadata.files] absl-py = [ {file = "absl-py-1.1.0.tar.gz", hash = "sha256:3aa39f898329c2156ff525dfa69ce709e42d77aab18bf4917719d6f260aa6a08"}, {file = "absl_py-1.1.0-py3-none-any.whl", hash = "sha256:db97287655e30336938f8058d2c81ed2be6af1d9b6ebbcd8df1080a6c7fcd24e"}, ] +appnope = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] argcomplete = [ {file = "argcomplete-1.12.3-py2.py3-none-any.whl", hash = "sha256:291f0beca7fd49ce285d2f10e4c1c77e9460cf823eef2de54df0c0fec88b0d81"}, {file = "argcomplete-1.12.3.tar.gz", hash = "sha256:2c7dbffd8c045ea534921e63b0be6fe65e88599990d8dc408ac8c542b72a5445"}, ] +argon2-cffi = [ + {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, + {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, +] +argon2-cffi-bindings = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] +asttokens = [] astunparse = [ {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, @@ -1412,6 +2222,15 @@ attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, +] +bleach = [] cachetools = [ {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, @@ -1420,6 +2239,7 @@ certifi = [ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] +cffi = [] cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, @@ -1448,6 +2268,7 @@ cycler = [ {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"}, {file = "cycler-0.11.0.tar.gz", hash = "sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f"}, ] +debugpy = [] decli = [ {file = "decli-0.5.2-py3-none-any.whl", hash = "sha256:d3207bc02d0169bf6ed74ccca09ce62edca0eb25b0ebf8bf4ae3fb8333e15ca0"}, {file = "decli-0.5.2.tar.gz", hash = "sha256:f2cde55034a75c819c630c7655a844c612f2598c42c21299160465df6ad463ad"}, @@ -1456,6 +2277,10 @@ decorator = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +defusedxml = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] distlib = [ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, @@ -1488,10 +2313,16 @@ docker-pycreds = [ {file = "docker-pycreds-0.4.0.tar.gz", hash = "sha256:6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4"}, {file = "docker_pycreds-0.4.0-py2.py3-none-any.whl", hash = "sha256:7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49"}, ] +entrypoints = [ + {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, + {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, +] +executing = [] eyepie = [ {file = "eyepie-0.6.7-py3-none-any.whl", hash = "sha256:baf651bc86c617eb1b07cd1add14110e2dec9f793f68065926a36688cca392da"}, {file = "eyepie-0.6.7.tar.gz", hash = "sha256:bcc89b5cd50f87c4363d4bf652ae67e390b682e3aac0f56ad13bb1c7dfea8c6d"}, ] +fastjsonschema = [] filelock = [ {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"}, {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"}, @@ -1647,10 +2478,18 @@ importlib-metadata = [ {file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"}, {file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"}, ] +importlib-resources = [] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] +ipykernel = [] +ipython = [] +ipython-genutils = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] +ipywidgets = [] itk = [ {file = "itk-5.2.1.post1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d18723ca6791fc5d9c7498e03d73929df56acffd9290ed8f61a24f25a138951e"}, {file = "itk-5.2.1.post1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fddd62554da37254eb8de4cd2660a8d9e601af88df50e7017202ed269c20a584"}, @@ -1784,10 +2623,25 @@ itk-segmentation = [ {file = "itk_segmentation-5.2.1.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e7d4953bd4e2b2d36539944c8bef33e50266398249e8af3c4a5b31a291f72b"}, {file = "itk_segmentation-5.2.1.post1-cp39-cp39-win_amd64.whl", hash = "sha256:136b995ee4f65096792c8be41c696d1dd384364f89cc84a0d8fb003f12ab6b9e"}, ] +jedi = [ + {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, + {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, +] jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] +jsonschema = [] +jupyter = [ + {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, + {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, + {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, +] +jupyter-client = [] +jupyter-console = [] +jupyter-core = [] +jupyterlab-pygments = [] +jupyterlab-widgets = [] keras = [ {file = "keras-2.9.0-py2.py3-none-any.whl", hash = "sha256:55911256f89cfc9343c9fbe4b61ec45a2d33d89729cbe1ab9dcacf8b07b8b6ab"}, ] @@ -1849,6 +2703,7 @@ libclang = [ {file = "libclang-14.0.1-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:58b9679868b2d6b5172ded26026c2f71306c4cabd6d15b93b597446fd677eb98"}, {file = "libclang-14.0.1-py2.py3-none-win_amd64.whl", hash = "sha256:1a4f0d5959c801c975950926cffb9b45521c890d7c4b730d8a1f688d75b25de9"}, ] +lxml = [] markdown = [ {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, @@ -1932,6 +2787,15 @@ matplotlib = [ {file = "matplotlib-3.5.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:24173c23d1bcbaed5bf47b8785d27933a1ac26a5d772200a0f3e0e38f471b001"}, {file = "matplotlib-3.5.2.tar.gz", hash = "sha256:48cf850ce14fa18067f2d9e0d646763681948487a8080ec0af2686468b4607a2"}, ] +matplotlib-inline = [] +mistune = [] +nbclient = [] +nbconvert = [] +nbformat = [] +nest-asyncio = [ + {file = "nest_asyncio-1.5.5-py3-none-any.whl", hash = "sha256:b98e3ec1b246135e4642eceffa5a6c23a3ab12c82ff816a92c612d68205813b2"}, + {file = "nest_asyncio-1.5.5.tar.gz", hash = "sha256:e442291cd942698be619823a17a86a5759eabe1f8613084790de189fe9e16d65"}, +] networkx = [ {file = "networkx-2.8.4-py3-none-any.whl", hash = "sha256:6933b9b3174a0bdf03c911bb4a1ee43a86ce3edeb813e37e1d4c553b3f4a2c4f"}, {file = "networkx-2.8.4.tar.gz", hash = "sha256:5e53f027c0d567cf1f884dbb283224df525644e43afd1145d64c9d88a3584762"}, @@ -1940,6 +2804,7 @@ nodeenv = [ {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, ] +notebook = [] numpy = [ {file = "numpy-1.22.4-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9ead61dfb5d971d77b6c131a9dbee62294a932bf6a356e48c75ae684e635b3"}, {file = "numpy-1.22.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1ce7ab2053e36c0a71e7a13a7475bd3b1f54750b4b433adc96313e127b870887"}, @@ -1999,9 +2864,25 @@ pandas = [ {file = "pandas-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:09d8be7dd9e1c4c98224c4dfe8abd60d145d934e9fc1f5f411266308ae683e6a"}, {file = "pandas-1.4.2.tar.gz", hash = "sha256:92bc1fc585f1463ca827b45535957815b7deb218c549b7c18402c322c7549a12"}, ] +pandocfilters = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] +parso = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] pathtools = [ {file = "pathtools-0.1.2.tar.gz", hash = "sha256:7c35c5421a39bb82e58018febd90e3b6e5db34c5443aaaf742b3f33d4655f1c0"}, ] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] pillow = [ {file = "Pillow-9.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe"}, {file = "Pillow-9.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e"}, @@ -2042,6 +2923,7 @@ pillow = [ {file = "Pillow-9.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8"}, {file = "Pillow-9.1.1.tar.gz", hash = "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0"}, ] +pkgutil-resolve-name = [] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, @@ -2054,6 +2936,10 @@ pre-commit = [ {file = "pre_commit-2.19.0-py2.py3-none-any.whl", hash = "sha256:10c62741aa5704faea2ad69cb550ca78082efe5697d6f04e5710c3c229afdd10"}, {file = "pre_commit-2.19.0.tar.gz", hash = "sha256:4233a1e38621c87d9dda9808c6606d7e7ba0e087cd56d3fe03202a01d2919615"}, ] +prometheus-client = [ + {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, + {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, +] promise = [ {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, ] @@ -2123,6 +3009,14 @@ psutil = [ {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, ] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pure-eval = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -2157,10 +3051,38 @@ pyasn1-modules = [ {file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"}, {file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"}, ] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pygments = [] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] +pyrsistent = [ + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, +] pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, @@ -2205,6 +3127,8 @@ pywavelets = [ {file = "PyWavelets-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:a354979e2ee8cd71a8952ded381f3d9f981692b73c6842bcc6c9f64047e0a5be"}, {file = "PyWavelets-1.3.0.tar.gz", hash = "sha256:cbaa9d62052d9daf8da765fc8e7c30c38ea2b8e9e1c18841913dfb4aec671ee5"}, ] +pywin32 = [] +pywinpty = [] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, @@ -2240,6 +3164,9 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +pyzmq = [] +qtconsole = [] +qtpy = [] questionary = [ {file = "questionary-1.10.0-py3-none-any.whl", hash = "sha256:fecfcc8cca110fda9d561cb83f1e97ecbb93c613ff857f655818839dac74ce90"}, {file = "questionary-1.10.0.tar.gz", hash = "sha256:600d3aefecce26d48d97eee936fdb66e4bc27f934c3ab6dd1e292c4f43946d90"}, @@ -2309,6 +3236,10 @@ scipy = [ {file = "scipy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:9dd4012ac599a1e7eb63c114d1eee1bcfc6dc75a29b589ff0ad0bb3d9412034f"}, {file = "scipy-1.8.1.tar.gz", hash = "sha256:9e3fb1b0e896f14a85aa9a28d5f755daaeeb54c897b746df7a55ccb02b340f33"}, ] +send2trash = [ + {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, + {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, +] sentry-sdk = [ {file = "sentry-sdk-1.5.12.tar.gz", hash = "sha256:259535ba66933eacf85ab46524188c84dcb4c39f40348455ce15e2c0aca68863"}, {file = "sentry_sdk-1.5.12-py2.py3-none-any.whl", hash = "sha256:778b53f0a6c83b1ee43d3b7886318ba86d975e686cb2c7906ccc35b334360be1"}, @@ -2402,6 +3333,8 @@ smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] +soupsieve = [] +stack-data = [] tensorboard = [ {file = "tensorboard-2.9.1-py3-none-any.whl", hash = "sha256:baa727f791776f9e5841d347127720ceed4bbd59c36b40604b95fb2ae6029276"}, ] @@ -2471,10 +3404,15 @@ tensorflow-probability = [ termcolor = [ {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, ] +terminado = [] tifffile = [ {file = "tifffile-2022.5.4-py3-none-any.whl", hash = "sha256:52b4c02040d00c1811e26c0f6abd41e77e2d57559b3657ff3e873955f74f5c57"}, {file = "tifffile-2022.5.4.tar.gz", hash = "sha256:b03147a15862b7c1d90d47435197f149bef7a52c25ad67cf1f9b465faa71b8d2"}, ] +tinycss2 = [ + {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, + {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, +] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -2487,10 +3425,12 @@ tomlkit = [ {file = "tomlkit-0.11.0-py3-none-any.whl", hash = "sha256:0f4050db66fd445b885778900ce4dd9aea8c90c4721141fde0d6ade893820ef1"}, {file = "tomlkit-0.11.0.tar.gz", hash = "sha256:71ceb10c0eefd8b8f11fe34e8a51ad07812cb1dc3de23247425fbc9ddc47b9dd"}, ] +tornado = [] tqdm = [ {file = "tqdm-4.64.0-py2.py3-none-any.whl", hash = "sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6"}, {file = "tqdm-4.64.0.tar.gz", hash = "sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d"}, ] +traitlets = [] typeguard = [ {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, @@ -2515,10 +3455,15 @@ wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] +webencodings = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] werkzeug = [ {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, ] +widgetsnbextension = [] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, diff --git a/pyproject.toml b/pyproject.toml index 68a1bb7..1e9e1bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ tensorflow-addons = "^0.17.1" wandb = "^0.12.18" pandas = "^1.4.2" tensorflow-probability = "^0.17.0" +jupyter = "^1.0.0" [tool.poetry.dev-dependencies] pytest = "^6.2.5"