Compare commits

..

1 Commits

Author SHA1 Message Date
b7049f5b47 Parallelize dataset transformations 2021-06-24 19:30:46 +02:00
24 changed files with 80466 additions and 161258 deletions

View File

@@ -2,15 +2,10 @@
locimend is a tool that corrects DNA sequencing errors using Deep Learning. locimend is a tool that corrects DNA sequencing errors using Deep Learning.
The goal is to provide a correct DNA sequence, when a sequence containing errors is provided.
It provides both a command-line program and a REST API.
## Technologies ## Technologies
- Tensorflow - Tensorflow
- Biopython - Biopython
- FastAPI
## Installation ## Installation
@@ -42,70 +37,13 @@ cd locimend
nix-shell nix-shell
``` ```
5. Install the dependencies via poetry:
```bash
poetry install
```
After running these commands, you will find yourself in a shell that After running these commands, you will find yourself in a shell that
contains all the needed dependencies. contains all the needed dependencies.
## Usage ## Usage
### Training the model The following command creates the dataset, trains the Deep Learning model and shows the accuracy:
The following command creates the trains the Deep Learning model and shows the accuracy and AUC:
```bash ```bash
poetry run python locimend/main.py train <data file> <label file> poetry run python src/model.py
``` ```
- <data file>: FASTQ file containing the sequences with errors
- <label file>: FASTQ file containing the sequences without errors
Both files must contain the canonical and read simulated sequences in the same positions (same row).
A dataset is provided to train the model, in order to proceed execute the following command:
```bash
poetry run python locimend/main.py train data/curesim-HVR.fastq data/HVR.fastq
```
### Inference
A trained model is provided, which can be used to infer the correct sequences. There are two ways to interact with it:
- Command-line execution
- REST API
#### Command-line
The following command will infer the correct sequence, and print it:
```bash
poetry run python locimend/main.py infer "<DNA sequence>"
```
#### REST API
It is also possible to serve the model via a REST API, to start the web server run the following command:
```bash
poetry run api
```
The API can be accessed at http://localhost:8000, with either a GET or POST request:
| Request | Endpoint | Payload |
|:----:|:-----:|:-----:|
| GET | / | Sequence as a path parameter (in the URL) |
| POST | /| JSON |
For a POST request the JSON must have the following structure:
```json
{"sequence": "<DNA sequence>"}
```

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +0,0 @@
{ sources ? import ./nix/sources.nix, pkgs ? import sources.nixpkgs { } }:
with pkgs;
poetry2nix.mkPoetryApplication { projectDir = ./.; }

View File

@@ -1,14 +0,0 @@
{ sources ? import ./nix/sources.nix, pkgs ? import sources.nixpkgs { } }:
with pkgs;
let locimend = callPackage ./default.nix { };
in {
docker = dockerTools.streamLayeredImage {
name = "locimend";
contents = [ locimend ];
config.Cmd = [ "api" ];
};
}

388
docs/locimend.ipynb Normal file
View File

@@ -0,0 +1,388 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "locimend.ipynb",
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "code",
"metadata": {
"id": "sRYtN362elcw"
},
"source": [
"# Constants\n",
"BASES = \"ACGT\"\n",
"TRAIN_DATASET = \"data/train_data.tfrecords\"\n",
"TEST_DATASET = \"data/test_data.tfrecords\"\n",
"EVAL_DATASET = \"data/eval_data.tfrecords\"\n",
"EPOCHS = 1000\n",
"BATCH_SIZE = 256\n",
"LEARNING_RATE = 0.004\n",
"L2 = 0.001\n",
"LOG_DIR = \"logs\""
],
"execution_count": 4,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "mjwJOPSbvA0Y",
"outputId": "bb7fce1c-5758-4da8-e8a1-acd5275f979e"
},
"source": [
"!mkdir logs\n",
"!mkdir data\n",
"!curl -fL https://git.coolneng.duckdns.org/coolneng/locimend/raw/branch/master/data/HVR.fastq -o data/HVR.fastq\n",
"!curl -fL https://git.coolneng.duckdns.org/coolneng/locimend/raw/branch/master/data/curesim-HVR.fastq -o data/curesim-HVR.fastq"
],
"execution_count": 3,
"outputs": [
{
"output_type": "stream",
"text": [
"mkdir: cannot create directory logs: File exists\n",
" % Total % Received % Xferd Average Speed Time Time Time Current\n",
" Dload Upload Total Spent Left Speed\n",
"100 1074k 100 1074k 0 0 804k 0 0:00:01 0:00:01 --:--:-- 804k\n",
" % Total % Received % Xferd Average Speed Time Time Time Current\n",
" Dload Upload Total Spent Left Speed\n",
"100 1484k 100 1484k 0 0 321k 0 0:00:04 0:00:04 --:--:-- 321k\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "-uWm7bS7fkRE",
"outputId": "347d17aa-752e-425c-e727-71df2a32da67"
},
"source": [
"!pip install biopython"
],
"execution_count": 5,
"outputs": [
{
"output_type": "stream",
"text": [
"Collecting biopython\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/5a/42/de1ed545df624180b84c613e5e4de4848f72989ce5846a74af6baa0737b9/biopython-1.79-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (2.3MB)\n",
"\u001b[K |████████████████████████████████| 2.3MB 5.2MB/s \n",
"\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from biopython) (1.19.5)\n",
"Installing collected packages: biopython\n",
"Successfully installed biopython-1.79\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "CKFwG1_afwFU"
},
"source": [
"from typing import List, Tuple\n",
"\n",
"from Bio.motifs import create\n",
"from Bio.SeqIO import parse\n",
"from numpy.random import random\n",
"from tensorflow import Tensor, int64, stack, cast, int32\n",
"from tensorflow.sparse import to_dense\n",
"from tensorflow.data import TFRecordDataset\n",
"from tensorflow.io import (\n",
" FixedLenFeature,\n",
" TFRecordWriter,\n",
" VarLenFeature,\n",
" parse_single_example,\n",
")\n",
"from tensorflow.train import Example, Feature, Features, Int64List\n",
"\n",
"\n",
"\n",
"def generate_example(sequence, label, base_counts) -> bytes:\n",
" \"\"\"\n",
" Create a binary-string for each sequence containing the sequence and the bases' counts\n",
" \"\"\"\n",
" schema = {\n",
" \"A_counts\": Feature(int64_list=Int64List(value=[sum(base_counts[\"A\"])])),\n",
" \"C_counts\": Feature(int64_list=Int64List(value=[sum(base_counts[\"C\"])])),\n",
" \"G_counts\": Feature(int64_list=Int64List(value=[sum(base_counts[\"G\"])])),\n",
" \"T_counts\": Feature(int64_list=Int64List(value=[sum(base_counts[\"T\"])])),\n",
" \"sequence\": Feature(int64_list=Int64List(value=encode_sequence(sequence))),\n",
" \"label\": Feature(int64_list=Int64List(value=encode_sequence(label))),\n",
" }\n",
" example = Example(features=Features(feature=schema))\n",
" return example.SerializeToString()\n",
"\n",
"\n",
"def encode_sequence(sequence) -> List[int]:\n",
" \"\"\"\n",
" Encode the DNA sequence using the indices of the BASES constant\n",
" \"\"\"\n",
" encoded_sequence = [BASES.index(element) for element in sequence]\n",
" return encoded_sequence\n",
"\n",
"\n",
"def read_fastq(data_file, label_file) -> List[bytes]:\n",
" \"\"\"\n",
" Parses a data and a label FASTQ files and generates a List of serialized Examples\n",
" \"\"\"\n",
" examples = []\n",
" with open(data_file) as data, open(label_file) as labels:\n",
" for element, label in zip(parse(data, \"fastq\"), parse(labels, \"fastq\")):\n",
" motifs = create([element.seq])\n",
" example = generate_example(\n",
" sequence=str(element.seq),\n",
" label=str(label.seq),\n",
" base_counts=motifs.counts,\n",
" )\n",
" examples.append(example)\n",
" return examples\n",
"\n",
"\n",
"def create_dataset(\n",
" data_file, label_file, train_eval_test_split=[0.8, 0.1, 0.1]\n",
") -> None:\n",
" \"\"\"\n",
" Create a training, evaluation and test dataset with a 80/10/30 split respectively\n",
" \"\"\"\n",
" data = read_fastq(data_file, label_file)\n",
" with TFRecordWriter(TRAIN_DATASET) as training, TFRecordWriter(\n",
" TEST_DATASET\n",
" ) as test, TFRecordWriter(EVAL_DATASET) as evaluation:\n",
" for element in data:\n",
" if random() < train_eval_test_split[0]:\n",
" training.write(element)\n",
" elif random() < train_eval_test_split[0] + train_eval_test_split[1]:\n",
" evaluation.write(element)\n",
" else:\n",
" test.write(element)\n",
"\n",
"\n",
"def transform_features(parsed_features) -> List[Tensor]:\n",
" \"\"\"\n",
" Cast and transform the parsed features of an Example into a list of Tensors\n",
" \"\"\"\n",
" sparse_features = [\"sequence\", \"label\"]\n",
" for feature in sparse_features:\n",
" parsed_features[feature] = cast(parsed_features[feature], int32)\n",
" parsed_features[feature] = to_dense(parsed_features[feature])\n",
" for base in BASES:\n",
" parsed_features[f\"{base}_counts\"] = cast(\n",
" parsed_features[f\"{base}_counts\"], int32\n",
" )\n",
" features = list(parsed_features.values())[:-1]\n",
" return features\n",
"\n",
"\n",
"def process_input(byte_string) -> Tuple[Tensor, Tensor]:\n",
" \"\"\"\n",
" Parse a byte-string into an Example object\n",
" \"\"\"\n",
" schema = {\n",
" \"A_counts\": FixedLenFeature(shape=[1], dtype=int64),\n",
" \"C_counts\": FixedLenFeature(shape=[1], dtype=int64),\n",
" \"G_counts\": FixedLenFeature(shape=[1], dtype=int64),\n",
" \"T_counts\": FixedLenFeature(shape=[1], dtype=int64),\n",
" \"sequence\": VarLenFeature(dtype=int64),\n",
" \"label\": VarLenFeature(dtype=int64),\n",
" }\n",
" parsed_features = parse_single_example(byte_string, features=schema)\n",
" features = transform_features(parsed_features)\n",
" return stack(features, axis=-1), parsed_features[\"label\"]\n",
"\n",
"\n",
"def read_dataset(filepath) -> TFRecordDataset:\n",
" \"\"\"\n",
" Read TFRecords files and generate a dataset\n",
" \"\"\"\n",
" data_input = TFRecordDataset(filenames=filepath)\n",
" dataset = data_input.map(map_func=process_input)\n",
" shuffled_dataset = dataset.shuffle(buffer_size=10000, seed=42)\n",
" batched_dataset = shuffled_dataset.batch(batch_size=BATCH_SIZE).repeat(count=EPOCHS)\n",
" return batched_dataset\n",
"\n",
"\n",
"def dataset_creation(\n",
" data_file, label_file\n",
") -> Tuple[TFRecordDataset, TFRecordDataset, TFRecordDataset]:\n",
" \"\"\"\n",
" Generate the TFRecord files and split them into training, validation and test data\n",
" \"\"\"\n",
" create_dataset(data_file, label_file)\n",
" train_data = read_dataset(TRAIN_DATASET)\n",
" eval_data = read_dataset(EVAL_DATASET)\n",
" test_data = read_dataset(TEST_DATASET)\n",
" return train_data, eval_data, test_data\n",
"\n"
],
"execution_count": 6,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "UXAAAVolf7GA"
},
"source": [
"from random import seed\n",
"\n",
"from tensorflow.keras import Model, Sequential, layers\n",
"from tensorflow.keras.callbacks import TensorBoard\n",
"from tensorflow.keras.losses import sparse_categorical_crossentropy\n",
"from tensorflow.keras.optimizers import Adam\n",
"from tensorflow.keras.regularizers import l2\n",
"from tensorflow.random import set_seed\n",
"\n",
"\n",
"def build_model() -> Model:\n",
" \"\"\"\n",
" Build the CNN model\n",
" \"\"\"\n",
" model = Sequential()\n",
" model.add(\n",
" layers.Conv1D(\n",
" filters=16,\n",
" kernel_size=5,\n",
" activation=\"relu\",\n",
" kernel_regularizer=l2(L2),\n",
" )\n",
" )\n",
" model.add(layers.MaxPool1D(pool_size=3, strides=1))\n",
" model.add(\n",
" layers.Conv1D(\n",
" filters=16,\n",
" kernel_size=3,\n",
" activation=\"relu\",\n",
" kernel_regularizer=l2(L2),\n",
" )\n",
" )\n",
" model.add(layers.MaxPool1D(pool_size=3, strides=1))\n",
" model.add(layers.Flatten())\n",
" model.add(\n",
" layers.Dense(\n",
" units=16,\n",
" activation=\"relu\",\n",
" kernel_regularizer=l2(L2),\n",
" )\n",
" )\n",
" model.add(layers.Dropout(rate=0.3))\n",
" model.add(\n",
" layers.Dense(\n",
" units=16,\n",
" activation=\"relu\",\n",
" kernel_regularizer=l2(L2),\n",
" )\n",
" )\n",
" model.add(layers.Dropout(rate=0.3))\n",
" # FIXME Change output size\n",
" model.add(layers.Dense(units=len(BASES), activation=\"softmax\"))\n",
" model.compile(\n",
" optimizer=Adam(LEARNING_RATE),\n",
" loss=sparse_categorical_crossentropy,\n",
" metrics=[\"accuracy\"],\n",
" )\n",
" return model\n",
"\n",
"\n",
"def show_metrics(model, eval_dataset, test_dataset) -> None:\n",
" \"\"\"\n",
" Show the model metrics\n",
" \"\"\"\n",
" eval_metrics = model.evaluate(eval_dataset, verbose=0)\n",
" test_metrics = model.evaluate(test_dataset, verbose=0)\n",
" print(f\"Final eval metrics - loss: {eval_metrics[0]} - accuracy: {eval_metrics[1]}\")\n",
" print(f\"Final test metrics - loss: {test_metrics[0]} - accuracy: {test_metrics[1]}\")\n",
"\n",
"\n",
"def run(data_file, label_file, seed_value=42) -> None:\n",
" \"\"\"\n",
" Create a dataset, a model and runs training and evaluation on it\n",
" \"\"\"\n",
" seed(seed_value)\n",
" set_seed(seed_value)\n",
" train_data, eval_data, test_data = dataset_creation(data_file, label_file)\n",
" tensorboard = TensorBoard(log_dir=LOG_DIR, histogram_freq=1, profile_batch=0)\n",
" model = build_model()\n",
" print(\"Training the model\")\n",
" model.fit(\n",
" train_data,\n",
" epochs=EPOCHS,\n",
" validation_data=eval_data,\n",
" callbacks=[tensorboard],\n",
" verbose=0,\n",
" )\n",
" print(\"Training complete. Obtaining final metrics...\")\n",
" show_metrics(model, eval_data, test_data)"
],
"execution_count": 7,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
},
"id": "V8BuUmpIgDqc",
"outputId": "e7f697dc-a459-4e4e-ed98-406b44e120fc"
},
"source": [
"run(data_file=\"data/curesim-HVR.fastq\", label_file=\"data/HVR.fastq\")"
],
"execution_count": 8,
"outputs": [
{
"output_type": "stream",
"text": [
"Training the model\n"
],
"name": "stdout"
},
{
"output_type": "error",
"ename": "TypeError",
"evalue": "ignored",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-8-4b0e5d1da156>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata_file\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"data/curesim-HVR.fastq\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabel_file\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"data/HVR.fastq\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m<ipython-input-7-d8ed7ecffd74>\u001b[0m in \u001b[0;36mrun\u001b[0;34m(data_file, label_file, seed_value)\u001b[0m\n\u001b[1;32m 84\u001b[0m \u001b[0mvalidation_data\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0meval_data\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 85\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mtensorboard\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 86\u001b[0;31m \u001b[0mverbose\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 87\u001b[0m )\n\u001b[1;32m 88\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Training complete. Obtaining final metrics...\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[1;32m 1181\u001b[0m _r=1):\n\u001b[1;32m 1182\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1183\u001b[0;31m \u001b[0mtmp_logs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1184\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1185\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwds)\u001b[0m\n\u001b[1;32m 887\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 888\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mOptionalXlaContext\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_jit_compile\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 889\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 890\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 891\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m_call\u001b[0;34m(self, *args, **kwds)\u001b[0m\n\u001b[1;32m 931\u001b[0m \u001b[0;31m# This is the first call of __call__, so we have to initialize.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 932\u001b[0m \u001b[0minitializers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 933\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_initialize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0madd_initializers_to\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minitializers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 934\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 935\u001b[0m \u001b[0;31m# At this point we know that the initialization is complete (or less\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m_initialize\u001b[0;34m(self, args, kwds, add_initializers_to)\u001b[0m\n\u001b[1;32m 762\u001b[0m self._concrete_stateful_fn = (\n\u001b[1;32m 763\u001b[0m self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\n\u001b[0;32m--> 764\u001b[0;31m *args, **kwds))\n\u001b[0m\u001b[1;32m 765\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 766\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0minvalid_creator_scope\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0munused_args\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0munused_kwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m_get_concrete_function_internal_garbage_collected\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 3048\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3049\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3050\u001b[0;31m \u001b[0mgraph_function\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_maybe_define_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3051\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3052\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m_maybe_define_function\u001b[0;34m(self, args, kwargs)\u001b[0m\n\u001b[1;32m 3442\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3443\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_function_cache\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmissed\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcall_context_key\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3444\u001b[0;31m \u001b[0mgraph_function\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_create_graph_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3445\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_function_cache\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprimary\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mcache_key\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3446\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m_create_graph_function\u001b[0;34m(self, args, kwargs, override_flat_arg_shapes)\u001b[0m\n\u001b[1;32m 3287\u001b[0m \u001b[0marg_names\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0marg_names\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3288\u001b[0m \u001b[0moverride_flat_arg_shapes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0moverride_flat_arg_shapes\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3289\u001b[0;31m capture_by_value=self._capture_by_value),\n\u001b[0m\u001b[1;32m 3290\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_function_attributes\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3291\u001b[0m \u001b[0mfunction_spec\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfunction_spec\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/func_graph.py\u001b[0m in \u001b[0;36mfunc_graph_from_py_func\u001b[0;34m(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\u001b[0m\n\u001b[1;32m 997\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moriginal_func\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_decorator\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0munwrap\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpython_func\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 998\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 999\u001b[0;31m \u001b[0mfunc_outputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpython_func\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mfunc_args\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mfunc_kwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1000\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1001\u001b[0m \u001b[0;31m# invariant: `func_outputs` contains only Tensors, CompositeTensors,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36mwrapped_fn\u001b[0;34m(*args, **kwds)\u001b[0m\n\u001b[1;32m 670\u001b[0m \u001b[0;31m# the function a weak reference to itself to avoid a reference cycle.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 671\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mOptionalXlaContext\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcompile_with_xla\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 672\u001b[0;31m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mweak_wrapped_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__wrapped__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 673\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mout\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 674\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/func_graph.py\u001b[0m in \u001b[0;36mwrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 984\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# pylint:disable=broad-except\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 985\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"ag_error_metadata\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 986\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mag_error_metadata\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_exception\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 987\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 988\u001b[0m \u001b[0;32mraise\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mTypeError\u001b[0m: in user code:\n\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:855 train_function *\n return step_function(self, iterator)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:845 step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:1285 run\n return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica\n return self._call_for_each_replica(fn, args, kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica\n return fn(*args, **kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:838 run_step **\n outputs = model.train_step(data)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:795 train_step\n y_pred = self(x, training=True)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/base_layer.py:1030 __call__\n outputs = call_fn(inputs, *args, **kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/sequential.py:394 call\n outputs = layer(inputs, **kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/base_layer.py:1030 __call__\n outputs = call_fn(inputs, *args, **kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/layers/convolutional.py:249 call\n outputs = self._convolution_op(inputs, self.kernel)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper\n return target(*args, **kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/nn_ops.py:1019 convolution_v2\n name=name)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/nn_ops.py:1149 convolution_internal\n name=name)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper\n return target(*args, **kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/deprecation.py:602 new_func\n return func(*args, **kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/deprecation.py:602 new_func\n return func(*args, **kwargs)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/nn_ops.py:1892 conv1d\n name=name)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/gen_nn_ops.py:973 conv2d\n data_format=data_format, dilations=dilations, name=name)\n /usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/op_def_library.py:558 _apply_op_helper\n inferred_from[input_arg.type_attr]))\n\n TypeError: Input 'filter' of 'Conv2D' Op has type float32 that does not match type int32 of argument 'input'.\n"
]
}
]
}
]
}

41
flake.lock generated
View File

@@ -1,41 +0,0 @@
{
"nodes": {
"flake-utils": {
"locked": {
"lastModified": 1631561581,
"narHash": "sha256-3VQMV5zvxaVLvqqUrNz3iJelLw30mIVSfZmAaauM3dA=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "7e5bf3925f6fbdfaf50a2a7ca0be2879c4261d19",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1634044603,
"narHash": "sha256-JX9/U/ci9Gw1fhWjEB3HfzDK8bAbcfQcTO6fEJmgFfo=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "15847b4b4fc260fb400880aa3cbee65a65f252c5",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -1,11 +0,0 @@
{
description =
"locimend is a tool that corrects DNA sequencing errors using Deep Learning";
inputs.flake-utils.url = "github:numtide/flake-utils";
outputs = { self, nixpkgs, flake-utils }:
flake-utils.lib.eachDefaultSystem (system:
let pkgs = nixpkgs.legacyPackages.${system};
in { devShell = import ./shell.nix { inherit pkgs; }; });
}

View File

@@ -1,27 +0,0 @@
from fastapi import FastAPI
from pydantic import BaseModel
from uvicorn import run
from locimend.model import infer_sequence
app = FastAPI()
class Input(BaseModel):
sequence: str
@app.get("/{sequence}")
async def get_sequence_path(sequence: str):
correct_sequence = await infer_sequence(sequence)
return {"sequence": correct_sequence}
@app.post("/")
async def get_sequence_body(sequence: Input):
correct_sequence = await infer_sequence(sequence.sequence)
return {"sequence": correct_sequence}
def main():
run(app, host="0.0.0.0")

View File

@@ -1,24 +0,0 @@
class Hyperparameters:
def __init__(
self,
data_file,
label_file,
train_dataset="data/train_data.tfrecords",
test_dataset="data/test_data.tfrecords",
eval_dataset="data/eval_data.tfrecords",
epochs=100,
batch_size=64,
learning_rate=0.004,
l2_rate=0.001,
max_length=80,
):
self.data_file = data_file
self.label_file = label_file
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.test_dataset = test_dataset
self.epochs = epochs
self.batch_size = batch_size
self.learning_rate = learning_rate
self.l2_rate = l2_rate
self.max_length = max_length

View File

@@ -1,40 +0,0 @@
from asyncio import run
from argparse import ArgumentParser, Namespace
from time import time
from locimend.model import infer_sequence, train_model
def parse_arguments() -> Namespace:
parser = ArgumentParser()
subparsers = parser.add_subparsers(dest="task")
parser_train = subparsers.add_parser("train")
parser_infer = subparsers.add_parser("infer")
parser_train.add_argument(
"data_file", help="FASTQ file containing the sequences with errors"
)
parser_train.add_argument(
"label_file", help="FASTQ file containing the sequences without errors"
)
parser_infer.add_argument("sequence", help="DNA sequence with errors")
return parser.parse_args()
async def execute_task(args):
if args.task == "train":
start_time = time()
train_model(data_file=args.data_file, label_file=args.label_file)
end_time = time()
print(f"Training time: {end_time - start_time}")
else:
prediction = await infer_sequence(sequence=args.sequence)
print(f"Error-corrected sequence: {prediction}")
def main() -> None:
args = parse_arguments()
run(execute_task(args))
if __name__ == "__main__":
main()

View File

@@ -1,89 +0,0 @@
from random import seed
from numpy import argmax
from tensorflow import one_hot
from tensorflow.keras import Model, Sequential
from tensorflow.keras.layers import Dense, Dropout, Input, Masking
from tensorflow.keras.losses import categorical_crossentropy
from tensorflow.keras.models import load_model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.regularizers import l2
from tensorflow.random import set_seed
from locimend.hyperparameters import Hyperparameters
from locimend.preprocessing import (
BASES,
dataset_creation,
decode_sequence,
encode_sequence,
)
def build_model(hyperparams) -> Model:
"""
Build the CNN model
"""
model = Sequential(
[
Input(shape=(hyperparams.max_length, len(BASES))),
Masking(mask_value=-1),
Dense(
units=256, activation="relu", kernel_regularizer=l2(hyperparams.l2_rate)
),
Dropout(rate=0.3),
Dense(
units=128, activation="relu", kernel_regularizer=l2(hyperparams.l2_rate)
),
Dropout(rate=0.3),
Dense(
units=64, activation="relu", kernel_regularizer=l2(hyperparams.l2_rate)
),
Dropout(rate=0.3),
Dense(units=len(BASES), activation="softmax"),
]
)
model.compile(
optimizer=Adam(hyperparams.learning_rate),
loss=categorical_crossentropy,
metrics=["accuracy", "AUC"],
)
return model
def show_metrics(model, eval_dataset, test_dataset) -> None:
"""
Show the model metrics
"""
eval_metrics = model.evaluate(eval_dataset, verbose=0)
test_metrics = model.evaluate(test_dataset, verbose=0)
print(f"Eval metrics {eval_metrics}")
print(f"Test metrics {test_metrics}")
def train_model(data_file, label_file, seed_value=42) -> None:
"""
Create a dataset, a model and runs training and evaluation on it
"""
seed(seed_value)
set_seed(seed_value)
hyperparams = Hyperparameters(data_file=data_file, label_file=label_file)
train_data, eval_data, test_data = dataset_creation(hyperparams)
model = build_model(hyperparams)
print("Training the model")
model.fit(train_data, epochs=hyperparams.epochs, validation_data=eval_data)
print("Training complete. Obtaining the model's metrics...")
show_metrics(model, eval_data, test_data)
model.save("trained_model")
async def infer_sequence(sequence) -> str:
"""
Predict the correct sequence, using the trained model
"""
model = load_model("trained_model")
encoded_sequence = encode_sequence(sequence)
one_hot_encoded_sequence = one_hot(encoded_sequence, depth=len(BASES))
prediction = model.predict(one_hot_encoded_sequence)
encoded_prediction = argmax(prediction, axis=1)
final_prediction = decode_sequence(encoded_prediction)
return final_prediction

View File

@@ -17,10 +17,10 @@
"homepage": "", "homepage": "",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "f930ea227cecaed1f1bdb047fef54fe4f0721c8c", "rev": "0d337eb6b77c8911cd02ed92e63fcc2a8949b404",
"sha256": "04khbc44lppzz0m2g56zr7vafv4fvnb7rfbz7c03dqw6k99svj1c", "sha256": "1xm6ss7j3zscpiczz3kxjad3jd1qvy5zpm35kqri6p9mp4jzna1x",
"type": "tarball", "type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/f930ea227cecaed1f1bdb047fef54fe4f0721c8c.tar.gz", "url": "https://github.com/NixOS/nixpkgs/archive/0d337eb6b77c8911cd02ed92e63fcc2a8949b404.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz" "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
} }
} }

929
poetry.lock generated
View File

@@ -1,929 +0,0 @@
[[package]]
name = "absl-py"
version = "0.14.1"
description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py."
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
six = "*"
[[package]]
name = "asgiref"
version = "3.4.1"
description = "ASGI specs, helper code, and adapters"
category = "main"
optional = false
python-versions = ">=3.6"
[package.extras]
tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"]
[[package]]
name = "astunparse"
version = "1.6.3"
description = "An AST unparser for Python"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
six = ">=1.6.1,<2.0"
[[package]]
name = "biopython"
version = "1.79"
description = "Freely available tools for computational molecular biology."
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
numpy = "*"
[[package]]
name = "cachetools"
version = "4.2.4"
description = "Extensible memoizing collections and decorators"
category = "main"
optional = false
python-versions = "~=3.5"
[[package]]
name = "certifi"
version = "2021.10.8"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "charset-normalizer"
version = "2.0.7"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
python-versions = ">=3.5.0"
[package.extras]
unicode_backport = ["unicodedata2"]
[[package]]
name = "clang"
version = "5.0"
description = "libclang python bindings"
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "click"
version = "8.0.3"
description = "Composable command line interface toolkit"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "colorama"
version = "0.4.4"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "fastapi"
version = "0.66.1"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0"
starlette = "0.14.2"
[package.extras]
all = ["requests (>=2.24.0,<3.0.0)", "aiofiles (>=0.5.0,<0.6.0)", "jinja2 (>=2.11.2,<3.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "itsdangerous (>=1.1.0,<2.0.0)", "pyyaml (>=5.3.1,<6.0.0)", "graphene (>=2.1.8,<3.0.0)", "ujson (>=4.0.1,<5.0.0)", "orjson (>=3.2.1,<4.0.0)", "email_validator (>=1.1.1,<2.0.0)", "uvicorn[standard] (>=0.12.0,<0.14.0)", "async_exit_stack (>=1.0.1,<2.0.0)", "async_generator (>=1.10,<2.0.0)"]
dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.14.0)", "graphene (>=2.1.8,<3.0.0)"]
doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=7.1.9,<8.0.0)", "markdown-include (>=0.6.0,<0.7.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.2.0)", "typer-cli (>=0.0.12,<0.0.13)", "pyyaml (>=5.3.1,<6.0.0)"]
test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<3.0.0)", "pytest-asyncio (>=0.14.0,<0.15.0)", "mypy (==0.812)", "flake8 (>=3.8.3,<4.0.0)", "black (==20.8b1)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.15.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.4.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.4.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,<5.0.0)", "async_exit_stack (>=1.0.1,<2.0.0)", "async_generator (>=1.10,<2.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "aiofiles (>=0.5.0,<0.6.0)", "flask (>=1.1.2,<2.0.0)"]
[[package]]
name = "flatbuffers"
version = "1.12"
description = "The FlatBuffers serialization format for Python"
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "gast"
version = "0.4.0"
description = "Python AST that abstracts the underlying Python version"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "google-auth"
version = "2.3.0"
description = "Google Authentication Library"
category = "main"
optional = false
python-versions = ">= 3.6"
[package.dependencies]
cachetools = ">=2.0.0,<5.0"
pyasn1-modules = ">=0.2.1"
rsa = ">=3.1.4,<5"
[package.extras]
aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"]
pyopenssl = ["pyopenssl (>=20.0.0)"]
reauth = ["pyu2f (>=0.1.5)"]
[[package]]
name = "google-auth-oauthlib"
version = "0.4.6"
description = "Google Authentication Library"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
google-auth = ">=1.0.0"
requests-oauthlib = ">=0.7.0"
[package.extras]
tool = ["click (>=6.0.0)"]
[[package]]
name = "google-pasta"
version = "0.2.0"
description = "pasta is an AST-based Python refactoring library"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
six = "*"
[[package]]
name = "grpcio"
version = "1.41.0"
description = "HTTP/2-based RPC framework"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
six = ">=1.5.2"
[package.extras]
protobuf = ["grpcio-tools (>=1.41.0)"]
[[package]]
name = "h11"
version = "0.12.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "h5py"
version = "3.1.0"
description = "Read and write HDF5 files from Python"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
numpy = {version = ">=1.19.3", markers = "python_version >= \"3.9\""}
[[package]]
name = "idna"
version = "3.3"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
name = "isort"
version = "5.9.3"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.6.1,<4.0"
[package.extras]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
plugins = ["setuptools"]
[[package]]
name = "keras"
version = "2.6.0"
description = "TensorFlow Keras."
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "keras-preprocessing"
version = "1.1.2"
description = "Easy data preprocessing and data augmentation for deep learning models"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
numpy = ">=1.9.1"
six = ">=1.9.0"
[package.extras]
image = ["scipy (>=0.14)", "Pillow (>=5.2.0)"]
pep8 = ["flake8"]
tests = ["pandas", "pillow", "tensorflow", "keras", "pytest", "pytest-xdist", "pytest-cov"]
[[package]]
name = "markdown"
version = "3.3.4"
description = "Python implementation of Markdown."
category = "main"
optional = false
python-versions = ">=3.6"
[package.extras]
testing = ["coverage", "pyyaml"]
[[package]]
name = "numpy"
version = "1.19.5"
description = "NumPy is the fundamental package for array computing with Python."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "oauthlib"
version = "3.1.1"
description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
category = "main"
optional = false
python-versions = ">=3.6"
[package.extras]
rsa = ["cryptography (>=3.0.0,<4)"]
signals = ["blinker (>=1.4.0)"]
signedtoken = ["cryptography (>=3.0.0,<4)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "opt-einsum"
version = "3.3.0"
description = "Optimizing numpys einsum function"
category = "main"
optional = false
python-versions = ">=3.5"
[package.dependencies]
numpy = ">=1.7"
[package.extras]
docs = ["sphinx (==1.2.3)", "sphinxcontrib-napoleon", "sphinx-rtd-theme", "numpydoc"]
tests = ["pytest", "pytest-cov", "pytest-pep8"]
[[package]]
name = "protobuf"
version = "3.18.1"
description = "Protocol Buffers"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
name = "pyasn1"
version = "0.4.8"
description = "ASN.1 types and codecs"
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "pyasn1-modules"
version = "0.2.8"
description = "A collection of ASN.1-based protocols modules."
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
pyasn1 = ">=0.4.6,<0.5.0"
[[package]]
name = "pydantic"
version = "1.8.2"
description = "Data validation and settings management using python 3.6 type hinting"
category = "main"
optional = false
python-versions = ">=3.6.1"
[package.dependencies]
typing-extensions = ">=3.7.4.3"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pyflakes"
version = "2.4.0"
description = "passive checker of Python programs"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "requests"
version = "2.26.0"
description = "Python HTTP for Humans."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
[[package]]
name = "requests-oauthlib"
version = "1.3.0"
description = "OAuthlib authentication support for Requests."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
oauthlib = ">=3.0.0"
requests = ">=2.0.0"
[package.extras]
rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
[[package]]
name = "rsa"
version = "4.7.2"
description = "Pure-Python RSA implementation"
category = "main"
optional = false
python-versions = ">=3.5, <4"
[package.dependencies]
pyasn1 = ">=0.1.3"
[[package]]
name = "six"
version = "1.15.0"
description = "Python 2 and 3 compatibility utilities"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "starlette"
version = "0.14.2"
description = "The little ASGI library that shines."
category = "main"
optional = false
python-versions = ">=3.6"
[package.extras]
full = ["aiofiles", "graphene", "itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"]
[[package]]
name = "tensorboard"
version = "2.7.0"
description = "TensorBoard lets you watch Tensors Flow"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
absl-py = ">=0.4"
google-auth = ">=1.6.3,<3"
google-auth-oauthlib = ">=0.4.1,<0.5"
grpcio = ">=1.24.3"
markdown = ">=2.6.8"
numpy = ">=1.12.0"
protobuf = ">=3.6.0"
requests = ">=2.21.0,<3"
tensorboard-data-server = ">=0.6.0,<0.7.0"
tensorboard-plugin-wit = ">=1.6.0"
werkzeug = ">=0.11.15"
[[package]]
name = "tensorboard-data-server"
version = "0.6.1"
description = "Fast data loading for TensorBoard"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "tensorboard-plugin-wit"
version = "1.8.0"
description = "What-If Tool TensorBoard plugin."
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "tensorflow"
version = "2.6.0"
description = "TensorFlow is an open source machine learning framework for everyone."
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
absl-py = ">=0.10,<1.0"
astunparse = ">=1.6.3,<1.7.0"
clang = ">=5.0,<6.0"
flatbuffers = ">=1.12.0,<1.13.0"
gast = "0.4.0"
google-pasta = ">=0.2,<1.0"
grpcio = ">=1.37.0,<2.0"
h5py = ">=3.1.0,<3.2.0"
keras = ">=2.6,<3.0"
keras-preprocessing = ">=1.1.2,<1.2.0"
numpy = ">=1.19.2,<1.20.0"
opt-einsum = ">=3.3.0,<3.4.0"
protobuf = ">=3.9.2"
six = ">=1.15.0,<1.16.0"
tensorboard = ">=2.6,<3.0"
tensorflow-estimator = ">=2.6,<3.0"
termcolor = ">=1.1.0,<1.2.0"
typing-extensions = ">=3.7.4,<3.8.0"
wrapt = ">=1.12.1,<1.13.0"
[[package]]
name = "tensorflow-estimator"
version = "2.6.0"
description = "TensorFlow Estimator."
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "termcolor"
version = "1.1.0"
description = "ANSII Color formatting for output in terminal."
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "typing-extensions"
version = "3.7.4.3"
description = "Backported and Experimental Type Hints for Python 3.5+"
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "urllib3"
version = "1.26.7"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
[package.extras]
brotli = ["brotlipy (>=0.6.0)"]
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "uvicorn"
version = "0.14.0"
description = "The lightning-fast ASGI server."
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
asgiref = ">=3.3.4"
click = ">=7"
h11 = ">=0.8"
[package.extras]
standard = ["websockets (>=9.1)", "httptools (>=0.2.0,<0.3.0)", "watchgod (>=0.6)", "python-dotenv (>=0.13)", "PyYAML (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "colorama (>=0.4)"]
[[package]]
name = "werkzeug"
version = "2.0.2"
description = "The comprehensive WSGI web application library."
category = "main"
optional = false
python-versions = ">=3.6"
[package.extras]
watchdog = ["watchdog"]
[[package]]
name = "wrapt"
version = "1.12.1"
description = "Module for decorators, wrappers and monkey patching."
category = "main"
optional = false
python-versions = "*"
[metadata]
lock-version = "1.1"
python-versions = "3.9.*"
content-hash = "a87a2e774ea01273c9b218f32a57ea87af82546663f7f977fefe2456f9a1f8c3"
[metadata.files]
absl-py = [
{file = "absl-py-0.14.1.tar.gz", hash = "sha256:eb0383bd431c0d7b2320179904cab00120a10977e3c9671d99efbbed17efb55a"},
{file = "absl_py-0.14.1-py3-none-any.whl", hash = "sha256:565a2c1be855e466e697e1be6b9876c2435dda926954d1de4abf0d592561ece8"},
]
asgiref = [
{file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"},
{file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"},
]
astunparse = [
{file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"},
{file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"},
]
biopython = [
{file = "biopython-1.79-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:72a1477cf1701964c7224e506a54fd65d1cc5228da200b634a17992230aa1cbd"},
{file = "biopython-1.79-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:365569543ea58dd07ef205ec351c23b6c1a3200d5d321eb28ceaecd55eb5955e"},
{file = "biopython-1.79-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4be31815226052d86d4c2f6a103c40504e34bba3e25cc1b1d687a3203c42fb6e"},
{file = "biopython-1.79-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ceab668be9cbdcddef55ad459f87acd0316ae4a00d32251fea4cf665f5062fda"},
{file = "biopython-1.79-cp36-cp36m-win32.whl", hash = "sha256:83bfea8a19f9352c47b13965c4b73853e7aeef3c5aed8489895b0679e32c621b"},
{file = "biopython-1.79-cp36-cp36m-win_amd64.whl", hash = "sha256:98deacc30b8654cfcdcf707d93fa4e3c8717bbda07c3f9f828cf84753d4a1e4d"},
{file = "biopython-1.79-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:884a2b99ac7820cb84f70089769a512e3238ee60438b8c934ed519613dc570ce"},
{file = "biopython-1.79-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51eb467a60c38820ad1e6c3a7d4cb10535606f559646e824cc65c96091d91ff7"},
{file = "biopython-1.79-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03ee5c72b3cc3f0675a8c22ce1c45fe99a32a60db18df059df479ae6cf619708"},
{file = "biopython-1.79-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9580978803b582e0612b71673cab289e6bf261a865009cfb9501d65bc726a76e"},
{file = "biopython-1.79-cp37-cp37m-win32.whl", hash = "sha256:5ae69c5e09769390643aa0f8064517665df6fb99c37433821d6664584d0ecb8c"},
{file = "biopython-1.79-cp37-cp37m-win_amd64.whl", hash = "sha256:f0a7e1d94a318f74974345fd0987ec389b16988ec484e67218e900b116b932a8"},
{file = "biopython-1.79-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aa23a83a220486af6193760d079b36543fe00afcfbd18280ca2fd0b2c1c8dd6d"},
{file = "biopython-1.79-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3d4eec2e348c3d97a7fde80ee0f2b8ebeed849d2bd64a616833a9be03b93c8"},
{file = "biopython-1.79-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:947b793e804c59ea45ae46945a57612ad1789ca87af4af0d6a62dcecf3a6246a"},
{file = "biopython-1.79-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d9f6ce961e0c380e2a5435f64c96421dbcebeab6a1b41506bd81251feb733c08"},
{file = "biopython-1.79-cp38-cp38-win32.whl", hash = "sha256:155c5b95857bca7ebd607210cb9d8ea459bb0b86b3ca37ea44ec47c26ede7e9a"},
{file = "biopython-1.79-cp38-cp38-win_amd64.whl", hash = "sha256:2dbb4388c75b5dfca8ce729e791f465c9c878dbd7ba2ab9a1f9854609d2b5426"},
{file = "biopython-1.79-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:76988ed3d7383d566db1d7fc69c9cf136c6275813fb749fc6753c340f81f1a8f"},
{file = "biopython-1.79-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e921571b51514a6d35944242d6fef6427c3998acf58940fe1f209ac8a92a6e87"},
{file = "biopython-1.79-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf634a56f449a4123e48e538d661948e5ac29fb452acd2962b8cb834b472a9d7"},
{file = "biopython-1.79-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ab93d5749b375be3682866b3a606aa2ebd3e6d868079793925bf4fbb0987cf1f"},
{file = "biopython-1.79-cp39-cp39-win32.whl", hash = "sha256:8f33dafd3c7254fff5e1684b965e45a7c08d9b8e1bf51562b0a521ff9a6f5ea0"},
{file = "biopython-1.79-cp39-cp39-win_amd64.whl", hash = "sha256:b3ab26f26a1956ef26303386510d84e917e31fcbbc94918c336da0163ef628df"},
{file = "biopython-1.79.tar.gz", hash = "sha256:edb07eac99d3b8abd7ba56ff4bedec9263f76dfc3c3f450e7d2e2bcdecf8559b"},
]
cachetools = [
{file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"},
{file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"},
]
certifi = [
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
]
charset-normalizer = [
{file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
{file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"},
]
clang = [
{file = "clang-5.0-py2-none-any.whl", hash = "sha256:b9301dff507041b5019b30ae710b78b0552c1ca1d4441b8dfa93c2e85078a5f8"},
{file = "clang-5.0.tar.gz", hash = "sha256:ceccae97eda0225a5b44d42ffd61102e248325c2865ca53e4407746464a5333a"},
]
click = [
{file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"},
{file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"},
]
colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
fastapi = [
{file = "fastapi-0.66.1-py3-none-any.whl", hash = "sha256:958ed7341f97292e2fc3e6401830bbe203a917af93cd10bb6392be170ad3c15f"},
{file = "fastapi-0.66.1.tar.gz", hash = "sha256:1ac66c0635301bbd99785fb825300064d54adb774e8a5562661901de14ce6560"},
]
flatbuffers = [
{file = "flatbuffers-1.12-py2.py3-none-any.whl", hash = "sha256:9e9ef47fa92625c4721036e7c4124182668dc6021d9e7c73704edd395648deb9"},
{file = "flatbuffers-1.12.tar.gz", hash = "sha256:63bb9a722d5e373701913e226135b28a6f6ac200d5cc7b4d919fa38d73b44610"},
]
gast = [
{file = "gast-0.4.0-py3-none-any.whl", hash = "sha256:b7adcdd5adbebf1adf17378da5ba3f543684dbec47b1cda1f3997e573cd542c4"},
{file = "gast-0.4.0.tar.gz", hash = "sha256:40feb7b8b8434785585ab224d1568b857edb18297e5a3047f1ba012bc83b42c1"},
]
google-auth = [
{file = "google-auth-2.3.0.tar.gz", hash = "sha256:2800f6dfad29c6ced5faf9ca0c38ea8ba1ebe2559b10c029bd021e3de3301627"},
{file = "google_auth-2.3.0-py2.py3-none-any.whl", hash = "sha256:91892727c09cf5d090c391936a8e67ef5b9a9794c2f426b3d0ceedddbcc0ef50"},
]
google-auth-oauthlib = [
{file = "google-auth-oauthlib-0.4.6.tar.gz", hash = "sha256:a90a072f6993f2c327067bf65270046384cda5a8ecb20b94ea9a687f1f233a7a"},
{file = "google_auth_oauthlib-0.4.6-py2.py3-none-any.whl", hash = "sha256:3f2a6e802eebbb6fb736a370fbf3b055edcb6b52878bf2f26330b5e041316c73"},
]
google-pasta = [
{file = "google-pasta-0.2.0.tar.gz", hash = "sha256:c9f2c8dfc8f96d0d5808299920721be30c9eec37f2389f28904f454565c8a16e"},
{file = "google_pasta-0.2.0-py2-none-any.whl", hash = "sha256:4612951da876b1a10fe3960d7226f0c7682cf901e16ac06e473b267a5afa8954"},
{file = "google_pasta-0.2.0-py3-none-any.whl", hash = "sha256:b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed"},
]
grpcio = [
{file = "grpcio-1.41.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:9ecd0fc34aa46eeac24f4d20e67bafaf72ca914f99690bf2898674905eaddaf9"},
{file = "grpcio-1.41.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:d539ebd05a2bbfbf897d41738d37d162d5c3d9f2b1f8ddf2c4f75e2c9cf59907"},
{file = "grpcio-1.41.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:2410000eb57cf76b05b37d2aee270b686f0a7876710850a2bba92b4ed133e026"},
{file = "grpcio-1.41.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be3c6ac822edb509aeef41361ca9c8c5ee52cb9e4973e1977d2bb7d6a460fd97"},
{file = "grpcio-1.41.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c4bdd1d646365d10ba1468bcf234ea5ad46e8ce2b115983e8563248614910a"},
{file = "grpcio-1.41.0-cp310-cp310-win32.whl", hash = "sha256:7033199706526e7ee06a362e38476dfdf2ddbad625c19b67ed30411d1bb25a18"},
{file = "grpcio-1.41.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb64abf0d92134cb0ba4496a3b7ab918588eee42de20e5b3507fe6ee16db97ee"},
{file = "grpcio-1.41.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:b6b68c444abbaf4a2b944a61cf35726ab9645f45d416bcc7cf4addc4b2f2d53d"},
{file = "grpcio-1.41.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:5292a627b44b6d3065de4a364ead23bab3c9d7a7c05416a9de0c0624d0fe03f4"},
{file = "grpcio-1.41.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:1820845e7e6410240eff97742e9f76cd5bf10ca01d36a322e86c0bd5340ac25b"},
{file = "grpcio-1.41.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:462178987f0e5c60d6d1b79e4e95803a4cd789db961d6b3f087245906bb5ae04"},
{file = "grpcio-1.41.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:7b07cbbd4eea56738e995fcbba3b60e41fd9aa9dac937fb7985c5dcbc7626260"},
{file = "grpcio-1.41.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a92e4df5330cd384984e04804104ae34f521345917813aa86fc0930101a3697"},
{file = "grpcio-1.41.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd2f1cf11768d1f6fbe4e13e8b8fb0ccfe9914ceeff55a367d5571e82eeb543"},
{file = "grpcio-1.41.0-cp36-cp36m-win32.whl", hash = "sha256:59645b2d9f19b5ff30cb46ddbcaa09c398f9cd81e4e476b21c7c55ae1e942807"},
{file = "grpcio-1.41.0-cp36-cp36m-win_amd64.whl", hash = "sha256:0abd56d90dff3ed566807520de1385126dded21e62d3490a34c180a91f94c1f4"},
{file = "grpcio-1.41.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:9674a9d3f23702e35a89e22504f41b467893cf704f627cc9cdd118cf1dcc8e26"},
{file = "grpcio-1.41.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:c95dd6e60e059ff770a2ac9f5a202b75dd64d76b0cd0c48f27d58907e43ed6a6"},
{file = "grpcio-1.41.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:a3cd7f945d3e3b82ebd2a4c9862eb9891a5ac87f84a7db336acbeafd86e6c402"},
{file = "grpcio-1.41.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:c07acd49541f5f6f9984fe0adf162d77bf70e0f58e77f9960c6f571314ff63a4"},
{file = "grpcio-1.41.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:7da3f6f6b857399c9ad85bcbffc83189e547a0a1a777ab68f5385154f8bc1ed4"},
{file = "grpcio-1.41.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39ce785f0cbd07966a9019386b7a054615b2da63da3c7727f371304d000a1890"},
{file = "grpcio-1.41.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07594e585a5ba25cf331ddb63095ca51010c34e328a822cb772ffbd5daa62cb5"},
{file = "grpcio-1.41.0-cp37-cp37m-win32.whl", hash = "sha256:3bbeee115b05b22f6a9fa9bc78f9ab8d9d6bb8c16fdfc60401fc8658beae1099"},
{file = "grpcio-1.41.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dcb5f324712a104aca4a459e524e535f205f36deb8005feb4f9d3ff0a22b5177"},
{file = "grpcio-1.41.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:83c1e731c2b76f26689ad88534cafefe105dcf385567bead08f5857cb308246b"},
{file = "grpcio-1.41.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:5d4b30d068b022e412adcf9b14c0d9bcbc872e9745b91467edc0a4c700a8bba6"},
{file = "grpcio-1.41.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d71aa430b2ac40e18e388504ac34cc91d49d811855ca507c463a21059bf364f0"},
{file = "grpcio-1.41.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:c8c5bc498f6506b6041c30afb7a55c57a9fd535d1a0ac7cdba9b5fd791a85633"},
{file = "grpcio-1.41.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a144f6cecbb61aace12e5920840338a3d246123a41d795e316e2792e9775ad15"},
{file = "grpcio-1.41.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e516124010ef60d5fc2e0de0f1f987599249dc55fd529001f17f776a4145767f"},
{file = "grpcio-1.41.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1e0a4c86d4cbd93059d5eeceed6e1c2e3e1494e1bf40be9b8ab14302c576162"},
{file = "grpcio-1.41.0-cp38-cp38-win32.whl", hash = "sha256:a614224719579044bd7950554d3b4c1793bb5715cbf0f0399b1f21d283c40ef6"},
{file = "grpcio-1.41.0-cp38-cp38-win_amd64.whl", hash = "sha256:b2de4e7b5a930be04a4d05c9f5fce7e9191217ccdc174b026c2a7928770dca9f"},
{file = "grpcio-1.41.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:056806e83eaa09d0af0e452dd353db8f7c90aa2dedcce1112a2d21592550f6b1"},
{file = "grpcio-1.41.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:5502832b7cec670a880764f51a335a19b10ff5ab2e940e1ded67f39b88aa02b1"},
{file = "grpcio-1.41.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:585847ed190ea9cb4d632eb0ebf58f1d299bbca5e03284bc3d0fa08bab6ea365"},
{file = "grpcio-1.41.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:d0cc0393744ce3ce1b237ae773635cc928470ff46fb0d3f677e337a38e5ed4f6"},
{file = "grpcio-1.41.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:2882b62f74de8c8a4f7b2be066f6230ecc46f4edc8f42db1fb7358200abe3b25"},
{file = "grpcio-1.41.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:297ee755d3c6cd7e7d3770f298f4d4d4b000665943ae6d2888f7407418a9a510"},
{file = "grpcio-1.41.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace080a9c3c673c42adfd2116875a63fec9613797be01a6105acf7721ed0c693"},
{file = "grpcio-1.41.0-cp39-cp39-win32.whl", hash = "sha256:1bcbeac764bbae329bc2cc9e95d0f4d3b0fb456b92cf12e7e06e3e860a4b31cf"},
{file = "grpcio-1.41.0-cp39-cp39-win_amd64.whl", hash = "sha256:4537bb9e35af62c5189493792a8c34d127275a6d175c8ad48b6314cacba4021e"},
{file = "grpcio-1.41.0.tar.gz", hash = "sha256:15c04d695833c739dbb25c88eaf6abd9a461ec0dbd32f44bc8769335a495cf5a"},
]
h11 = [
{file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
{file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
]
h5py = [
{file = "h5py-3.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1cd367f89a5441236bdbb795e9fb9a9e3424929c00b4a54254ca760437f83d69"},
{file = "h5py-3.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fea05349f63625a8fb808e57e42bb4c76930cf5d50ac58b678c52f913a48a89b"},
{file = "h5py-3.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2e37352ddfcf9d77a2a47f7c8f7e125c6d20cc06c2995edeb7be222d4e152636"},
{file = "h5py-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e33f61d3eb862614c0f273a1f993a64dc2f093e1a3094932c50ada9d2db2170f"},
{file = "h5py-3.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:236ac8d943be30b617ab615c3d4a4bf4a438add2be87e54af3687ab721a18fac"},
{file = "h5py-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:02c391fdb980762a1cc03a4bcaecd03dc463994a9a63a02264830114a96e111f"},
{file = "h5py-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f89a3dae38843ffa49d17a31a3509a8129e9b46ece602a0138e1ed79e685c361"},
{file = "h5py-3.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ba71f6229d2013fbb606476ecc29c6223fc16b244d35fcd8566ad9dbaf910857"},
{file = "h5py-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:dccb89358bc84abcd711363c3e138f9f4eccfdf866f2139a8e72308328765b2c"},
{file = "h5py-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cb74df83709d6d03d11e60b9480812f58da34f194beafa8c8314dbbeeedfe0a6"},
{file = "h5py-3.1.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:80c623be10479e81b64fa713b7ed4c0bbe9f02e8e7d2a2e5382336087b615ce4"},
{file = "h5py-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:1cdfd1c5449ca1329d152f0b66830e93226ebce4f5e07dd8dc16bfc2b1a49d7b"},
{file = "h5py-3.1.0.tar.gz", hash = "sha256:1e2516f190652beedcb8c7acfa1c6fa92d99b42331cbef5e5c7ec2d65b0fc3c2"},
]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
]
isort = [
{file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"},
{file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"},
]
keras = [
{file = "keras-2.6.0-py2.py3-none-any.whl", hash = "sha256:504af5656a9829fe803ce48a8580ef16916e89906aceddad9e098614269437e7"},
]
keras-preprocessing = [
{file = "Keras_Preprocessing-1.1.2-py2.py3-none-any.whl", hash = "sha256:7b82029b130ff61cc99b55f3bd27427df4838576838c5b2f65940e4fcec99a7b"},
{file = "Keras_Preprocessing-1.1.2.tar.gz", hash = "sha256:add82567c50c8bc648c14195bf544a5ce7c1f76761536956c3d2978970179ef3"},
]
markdown = [
{file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"},
{file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"},
]
numpy = [
{file = "numpy-1.19.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc6bd4fd593cb261332568485e20a0712883cf631f6f5e8e86a52caa8b2b50ff"},
{file = "numpy-1.19.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:aeb9ed923be74e659984e321f609b9ba54a48354bfd168d21a2b072ed1e833ea"},
{file = "numpy-1.19.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8b5e972b43c8fc27d56550b4120fe6257fdc15f9301914380b27f74856299fea"},
{file = "numpy-1.19.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:43d4c81d5ffdff6bae58d66a3cd7f54a7acd9a0e7b18d97abb255defc09e3140"},
{file = "numpy-1.19.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:a4646724fba402aa7504cd48b4b50e783296b5e10a524c7a6da62e4a8ac9698d"},
{file = "numpy-1.19.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:2e55195bc1c6b705bfd8ad6f288b38b11b1af32f3c8289d6c50d47f950c12e76"},
{file = "numpy-1.19.5-cp36-cp36m-win32.whl", hash = "sha256:39b70c19ec771805081578cc936bbe95336798b7edf4732ed102e7a43ec5c07a"},
{file = "numpy-1.19.5-cp36-cp36m-win_amd64.whl", hash = "sha256:dbd18bcf4889b720ba13a27ec2f2aac1981bd41203b3a3b27ba7a33f88ae4827"},
{file = "numpy-1.19.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:603aa0706be710eea8884af807b1b3bc9fb2e49b9f4da439e76000f3b3c6ff0f"},
{file = "numpy-1.19.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:cae865b1cae1ec2663d8ea56ef6ff185bad091a5e33ebbadd98de2cfa3fa668f"},
{file = "numpy-1.19.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:36674959eed6957e61f11c912f71e78857a8d0604171dfd9ce9ad5cbf41c511c"},
{file = "numpy-1.19.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:06fab248a088e439402141ea04f0fffb203723148f6ee791e9c75b3e9e82f080"},
{file = "numpy-1.19.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6149a185cece5ee78d1d196938b2a8f9d09f5a5ebfbba66969302a778d5ddd1d"},
{file = "numpy-1.19.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:50a4a0ad0111cc1b71fa32dedd05fa239f7fb5a43a40663269bb5dc7877cfd28"},
{file = "numpy-1.19.5-cp37-cp37m-win32.whl", hash = "sha256:d051ec1c64b85ecc69531e1137bb9751c6830772ee5c1c426dbcfe98ef5788d7"},
{file = "numpy-1.19.5-cp37-cp37m-win_amd64.whl", hash = "sha256:a12ff4c8ddfee61f90a1633a4c4afd3f7bcb32b11c52026c92a12e1325922d0d"},
{file = "numpy-1.19.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf2402002d3d9f91c8b01e66fbb436a4ed01c6498fffed0e4c7566da1d40ee1e"},
{file = "numpy-1.19.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1ded4fce9cfaaf24e7a0ab51b7a87be9038ea1ace7f34b841fe3b6894c721d1c"},
{file = "numpy-1.19.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:012426a41bc9ab63bb158635aecccc7610e3eff5d31d1eb43bc099debc979d94"},
{file = "numpy-1.19.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:759e4095edc3c1b3ac031f34d9459fa781777a93ccc633a472a5468587a190ff"},
{file = "numpy-1.19.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:a9d17f2be3b427fbb2bce61e596cf555d6f8a56c222bd2ca148baeeb5e5c783c"},
{file = "numpy-1.19.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:99abf4f353c3d1a0c7a5f27699482c987cf663b1eac20db59b8c7b061eabd7fc"},
{file = "numpy-1.19.5-cp38-cp38-win32.whl", hash = "sha256:384ec0463d1c2671170901994aeb6dce126de0a95ccc3976c43b0038a37329c2"},
{file = "numpy-1.19.5-cp38-cp38-win_amd64.whl", hash = "sha256:811daee36a58dc79cf3d8bdd4a490e4277d0e4b7d103a001a4e73ddb48e7e6aa"},
{file = "numpy-1.19.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c843b3f50d1ab7361ca4f0b3639bf691569493a56808a0b0c54a051d260b7dbd"},
{file = "numpy-1.19.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d6631f2e867676b13026e2846180e2c13c1e11289d67da08d71cacb2cd93d4aa"},
{file = "numpy-1.19.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7fb43004bce0ca31d8f13a6eb5e943fa73371381e53f7074ed21a4cb786c32f8"},
{file = "numpy-1.19.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2ea52bd92ab9f768cc64a4c3ef8f4b2580a17af0a5436f6126b08efbd1838371"},
{file = "numpy-1.19.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:400580cbd3cff6ffa6293df2278c75aef2d58d8d93d3c5614cd67981dae68ceb"},
{file = "numpy-1.19.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:df609c82f18c5b9f6cb97271f03315ff0dbe481a2a02e56aeb1b1a985ce38e60"},
{file = "numpy-1.19.5-cp39-cp39-win32.whl", hash = "sha256:ab83f24d5c52d60dbc8cd0528759532736b56db58adaa7b5f1f76ad551416a1e"},
{file = "numpy-1.19.5-cp39-cp39-win_amd64.whl", hash = "sha256:0eef32ca3132a48e43f6a0f5a82cb508f22ce5a3d6f67a8329c81c8e226d3f6e"},
{file = "numpy-1.19.5-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a0d53e51a6cb6f0d9082decb7a4cb6dfb33055308c4c44f53103c073f649af73"},
{file = "numpy-1.19.5.zip", hash = "sha256:a76f502430dd98d7546e1ea2250a7360c065a5fdea52b2dffe8ae7180909b6f4"},
]
oauthlib = [
{file = "oauthlib-3.1.1-py2.py3-none-any.whl", hash = "sha256:42bf6354c2ed8c6acb54d971fce6f88193d97297e18602a3a886603f9d7730cc"},
{file = "oauthlib-3.1.1.tar.gz", hash = "sha256:8f0215fcc533dd8dd1bee6f4c412d4f0cd7297307d43ac61666389e3bc3198a3"},
]
opt-einsum = [
{file = "opt_einsum-3.3.0-py3-none-any.whl", hash = "sha256:2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147"},
{file = "opt_einsum-3.3.0.tar.gz", hash = "sha256:59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549"},
]
protobuf = [
{file = "protobuf-3.18.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fa6d1049d5315566f55c04d0b50c0033415144f96a9d25c820dc542fe2bb7f45"},
{file = "protobuf-3.18.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e2790c580070cff2921b93d562539ae027064340151c50db6aaf94c33048cd"},
{file = "protobuf-3.18.1-cp36-cp36m-win32.whl", hash = "sha256:7e2f0677d68ecdd1cfda2abea65873f5bc7c3f5aae199404a3f5c1d1198c1a63"},
{file = "protobuf-3.18.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6f714f5de9d40b3bec90ede4a688cce52f637ccdc5403afcda1f67598f4fdcd7"},
{file = "protobuf-3.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a7be937c319146cc9f2626f0181e6809062c353e1fe449ecd0df374ba1036b2"},
{file = "protobuf-3.18.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:10544fc7ace885a882623083c24da5b14148c77563acddc3c58d66f6153c09cd"},
{file = "protobuf-3.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ee8b11e3eb2ed38f12137c3c132270a0b1dd509e317228ac47b67f21a583f1"},
{file = "protobuf-3.18.1-cp37-cp37m-win32.whl", hash = "sha256:c492c217d3f69f4d2d5619571e52ab98538edbf53caf67e53ea92bd0a3b5670f"},
{file = "protobuf-3.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3c1644f8a7f19b45c7a4c32278e2a55ae9e7e2f9e5f02d960a61f04a4890d3e6"},
{file = "protobuf-3.18.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9ac691f7b24e4371dcd3980e4f5d6c840a2010da37986203053fee995786ec5"},
{file = "protobuf-3.18.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:93bad12895d8b0ebc66b605c2ef1802311595f881aef032d9f13282b7550e6b2"},
{file = "protobuf-3.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0851b5b89191e1976d34fa2e8eb8659829dfb45252053224cf9df857fb5f6a45"},
{file = "protobuf-3.18.1-cp38-cp38-win32.whl", hash = "sha256:09d9268f6f9da81b7657adcf2fb397524c82f20cdf9e0db3ff4e7567977abd67"},
{file = "protobuf-3.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6d927774c0ec746fed15a4faff5f44aad0b7a3421fadb6f3ae5ca1f2f8ae26e"},
{file = "protobuf-3.18.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4d19c9cb805fd2be1d59eee39e152367ee92a30167e77bd06c8819f8f0009a4c"},
{file = "protobuf-3.18.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:387f621bf7295a331f8c8a6962d097ceddeb85356792888cfa6a5c6bfc6886a4"},
{file = "protobuf-3.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1c5d3966c856f60a9d8d62f4455d70c31026422acdd5c228edf22b65b16c38"},
{file = "protobuf-3.18.1-cp39-cp39-win32.whl", hash = "sha256:f20f803892f2135e8b96dc58c9a0c6a7ad8436794bf8784af229498d939b4c77"},
{file = "protobuf-3.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:d76201380f41a2d83fb613a4683059d1fcafbe969518b3e409e279a8788fde2f"},
{file = "protobuf-3.18.1-py2.py3-none-any.whl", hash = "sha256:61ca58e14033ca0dfa484a31d57237c1be3b6013454c7f53876a20fc88dd69b1"},
{file = "protobuf-3.18.1.tar.gz", hash = "sha256:1c9bb40503751087300dd12ce2e90899d68628977905c76effc48e66d089391e"},
]
pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
]
pyasn1-modules = [
{file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"},
{file = "pyasn1_modules-0.2.8-py2.4.egg", hash = "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199"},
{file = "pyasn1_modules-0.2.8-py2.5.egg", hash = "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"},
{file = "pyasn1_modules-0.2.8-py2.6.egg", hash = "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb"},
{file = "pyasn1_modules-0.2.8-py2.7.egg", hash = "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8"},
{file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"},
{file = "pyasn1_modules-0.2.8-py3.1.egg", hash = "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d"},
{file = "pyasn1_modules-0.2.8-py3.2.egg", hash = "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45"},
{file = "pyasn1_modules-0.2.8-py3.3.egg", hash = "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4"},
{file = "pyasn1_modules-0.2.8-py3.4.egg", hash = "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811"},
{file = "pyasn1_modules-0.2.8-py3.5.egg", hash = "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed"},
{file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"},
{file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"},
]
pydantic = [
{file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"},
{file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"},
{file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:589eb6cd6361e8ac341db97602eb7f354551482368a37f4fd086c0733548308e"},
{file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:10e5622224245941efc193ad1d159887872776df7a8fd592ed746aa25d071840"},
{file = "pydantic-1.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99a9fc39470010c45c161a1dc584997f1feb13f689ecf645f59bb4ba623e586b"},
{file = "pydantic-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a83db7205f60c6a86f2c44a61791d993dff4b73135df1973ecd9eed5ea0bda20"},
{file = "pydantic-1.8.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:41b542c0b3c42dc17da70554bc6f38cbc30d7066d2c2815a94499b5684582ecb"},
{file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:ea5cb40a3b23b3265f6325727ddfc45141b08ed665458be8c6285e7b85bd73a1"},
{file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:18b5ea242dd3e62dbf89b2b0ec9ba6c7b5abaf6af85b95a97b00279f65845a23"},
{file = "pydantic-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:234a6c19f1c14e25e362cb05c68afb7f183eb931dd3cd4605eafff055ebbf287"},
{file = "pydantic-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:021ea0e4133e8c824775a0cfe098677acf6fa5a3cbf9206a376eed3fc09302cd"},
{file = "pydantic-1.8.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e710876437bc07bd414ff453ac8ec63d219e7690128d925c6e82889d674bb505"},
{file = "pydantic-1.8.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:ac8eed4ca3bd3aadc58a13c2aa93cd8a884bcf21cb019f8cfecaae3b6ce3746e"},
{file = "pydantic-1.8.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4a03cbbe743e9c7247ceae6f0d8898f7a64bb65800a45cbdc52d65e370570820"},
{file = "pydantic-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:8621559dcf5afacf0069ed194278f35c255dc1a1385c28b32dd6c110fd6531b3"},
{file = "pydantic-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b223557f9510cf0bfd8b01316bf6dd281cf41826607eada99662f5e4963f316"},
{file = "pydantic-1.8.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:244ad78eeb388a43b0c927e74d3af78008e944074b7d0f4f696ddd5b2af43c62"},
{file = "pydantic-1.8.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:05ef5246a7ffd2ce12a619cbb29f3307b7c4509307b1b49f456657b43529dc6f"},
{file = "pydantic-1.8.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:54cd5121383f4a461ff7644c7ca20c0419d58052db70d8791eacbbe31528916b"},
{file = "pydantic-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:4be75bebf676a5f0f87937c6ddb061fa39cbea067240d98e298508c1bda6f3f3"},
{file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"},
{file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"},
]
pyflakes = [
{file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"},
{file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
]
requests = [
{file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"},
{file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"},
]
requests-oauthlib = [
{file = "requests-oauthlib-1.3.0.tar.gz", hash = "sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a"},
{file = "requests_oauthlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:7f71572defaecd16372f9006f33c2ec8c077c3cfa6f5911a9a90202beb513f3d"},
{file = "requests_oauthlib-1.3.0-py3.7.egg", hash = "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc"},
]
rsa = [
{file = "rsa-4.7.2-py3-none-any.whl", hash = "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2"},
{file = "rsa-4.7.2.tar.gz", hash = "sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9"},
]
six = [
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
{file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
]
starlette = [
{file = "starlette-0.14.2-py3-none-any.whl", hash = "sha256:3c8e48e52736b3161e34c9f0e8153b4f32ec5d8995a3ee1d59410d92f75162ed"},
{file = "starlette-0.14.2.tar.gz", hash = "sha256:7d49f4a27f8742262ef1470608c59ddbc66baf37c148e938c7038e6bc7a998aa"},
]
tensorboard = [
{file = "tensorboard-2.7.0-py3-none-any.whl", hash = "sha256:239f78a4a8dff200ce585a030c787773a8c1184d5c159252f5f85bac4e3c3b38"},
]
tensorboard-data-server = [
{file = "tensorboard_data_server-0.6.1-py3-none-any.whl", hash = "sha256:809fe9887682d35c1f7d1f54f0f40f98bb1f771b14265b453ca051e2ce58fca7"},
{file = "tensorboard_data_server-0.6.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:fa8cef9be4fcae2f2363c88176638baf2da19c5ec90addb49b1cde05c95c88ee"},
{file = "tensorboard_data_server-0.6.1-py3-none-manylinux2010_x86_64.whl", hash = "sha256:d8237580755e58eff68d1f3abefb5b1e39ae5c8b127cc40920f9c4fb33f4b98a"},
]
tensorboard-plugin-wit = [
{file = "tensorboard_plugin_wit-1.8.0-py3-none-any.whl", hash = "sha256:2a80d1c551d741e99b2f197bb915d8a133e24adb8da1732b840041860f91183a"},
]
tensorflow = [
{file = "tensorflow-2.6.0-cp36-cp36m-macosx_10_11_x86_64.whl", hash = "sha256:c67fad296a3a2133b7a14da5f06c9937e7911b02c5d7a3ff6ba52a1d79b6bc9e"},
{file = "tensorflow-2.6.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:8b5ce09ede0fe45ef100f4dc65cf3f46722194e75139f85d524058315e2ce9fa"},
{file = "tensorflow-2.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dea97f664246e185d79cbe40a86309527affd4232f06afa8a6500c4fc4b64a03"},
{file = "tensorflow-2.6.0-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:4716c9b25a61a2c79b1f253d1e114f1f8679241559c13ad18c657c626a7d5924"},
{file = "tensorflow-2.6.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e45e026a9d08c89cecc1160d8248135e2fb79bdc3267328399e1fb25ce583bd6"},
{file = "tensorflow-2.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6e38b6969414d16afc560c58ca34e1328cc0a5dbd644b64e060f5be8a6653274"},
{file = "tensorflow-2.6.0-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:2a067d22a356c2cd4753bdd16ee492c55a610f5ebc52713e2954c642f070321c"},
{file = "tensorflow-2.6.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:2c9b8c6adc060acfcf805a2ea501db0124b679d95b522fd5983a4c110e8e0264"},
{file = "tensorflow-2.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d6468e05552720100e8f94097feb770de320e4c8c244323a8746bd84e5ba4052"},
{file = "tensorflow-2.6.0-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:00b1af0a0c5c102db19caceffac4bd4e6c536e6d7512144c241a4ace4428e7c6"},
{file = "tensorflow-2.6.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:bc73ebdd30c48cfc27ba307271117e6dbb795b37396ed817b2fec9393380b115"},
{file = "tensorflow-2.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:bfb255c2b0400bc5b4060dda098d46cd7ddeb53b7cbac1dfa29435612cba828c"},
]
tensorflow-estimator = [
{file = "tensorflow_estimator-2.6.0-py2.py3-none-any.whl", hash = "sha256:cf78528998efdb637ac0abaf525c929bf192767544eb24ae20d9266effcf5afd"},
]
termcolor = [
{file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"},
]
typing-extensions = [
{file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"},
{file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"},
{file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"},
]
urllib3 = [
{file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"},
{file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"},
]
uvicorn = [
{file = "uvicorn-0.14.0-py3-none-any.whl", hash = "sha256:2a76bb359171a504b3d1c853409af3adbfa5cef374a4a59e5881945a97a93eae"},
{file = "uvicorn-0.14.0.tar.gz", hash = "sha256:45ad7dfaaa7d55cab4cd1e85e03f27e9d60bc067ddc59db52a2b0aeca8870292"},
]
werkzeug = [
{file = "Werkzeug-2.0.2-py3-none-any.whl", hash = "sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f"},
{file = "Werkzeug-2.0.2.tar.gz", hash = "sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"},
]
wrapt = [
{file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
]

View File

@@ -6,19 +6,14 @@ authors = ["coolneng <akasroua@gmail.com>"]
license = "GPL-3.0-or-later" license = "GPL-3.0-or-later"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "3.9.*" python = "3.8.*"
tensorflow = "^2.4.1" tensorflow = "^2.4.1"
biopython = "^1.78" biopython = "^1.78"
fastapi = "^0.66.0"
uvicorn = "^0.14.0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
isort = "^5.8.0" isort = "^5.8.0"
pyflakes = "^2.3.1" pyflakes = "^2.3.1"
[tool.poetry.scripts]
api = "locimend.api:main"
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"

View File

@@ -1,9 +1,9 @@
{ pkgs ? import <nixpkgs> { } }: { sources ? import ./nix/sources.nix, pkgs ? import sources.nixpkgs { } }:
with pkgs; with pkgs;
mkShell { mkShell {
buildInputs = [ python39 poetry ]; buildInputs = [ python38 poetry ];
shellHook = '' shellHook = ''
export LD_LIBRARY_PATH=${pkgs.stdenv.cc.cc.lib}/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=${pkgs.stdenv.cc.cc.lib}/lib:$LD_LIBRARY_PATH
unset SOURCE_DATE_EPOCH unset SOURCE_DATE_EPOCH

9
src/constants.py Normal file
View File

@@ -0,0 +1,9 @@
BASES = "ACGT-"
TRAIN_DATASET = "data/train_data.tfrecords"
TEST_DATASET = "data/test_data.tfrecords"
EVAL_DATASET = "data/eval_data.tfrecords"
EPOCHS = 1000
BATCH_SIZE = 256
LEARNING_RATE = 0.004
L2 = 0.001
LOG_DIR = "logs"

95
src/model.py Normal file
View File

@@ -0,0 +1,95 @@
from random import seed
from tensorflow.keras import Model, Sequential, layers
from tensorflow.keras.callbacks import TensorBoard
from tensorflow.keras.losses import sparse_categorical_crossentropy
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.regularizers import l2
from tensorflow.random import set_seed
from constants import *
from preprocessing import dataset_creation
def build_model() -> Model:
"""
Build the CNN model
"""
model = Sequential()
model.add(
layers.Conv1D(
filters=16,
kernel_size=5,
activation="relu",
kernel_regularizer=l2(L2),
)
)
model.add(layers.MaxPool1D(pool_size=3, strides=1))
model.add(
layers.Conv1D(
filters=16,
kernel_size=3,
activation="relu",
kernel_regularizer=l2(L2),
)
)
model.add(layers.MaxPool1D(pool_size=3, strides=1))
model.add(layers.Flatten())
model.add(
layers.Dense(
units=16,
activation="relu",
kernel_regularizer=l2(L2),
)
)
model.add(layers.Dropout(rate=0.3))
model.add(
layers.Dense(
units=16,
activation="relu",
kernel_regularizer=l2(L2),
)
)
model.add(layers.Dropout(rate=0.3))
# FIXME Change output size
model.add(layers.Dense(units=len(BASES), activation="softmax"))
model.compile(
optimizer=Adam(LEARNING_RATE),
loss=sparse_categorical_crossentropy,
metrics=["accuracy"],
)
return model
def show_metrics(model, eval_dataset, test_dataset) -> None:
"""
Show the model metrics
"""
eval_metrics = model.evaluate(eval_dataset, verbose=0)
test_metrics = model.evaluate(test_dataset, verbose=0)
print(f"Final eval metrics - loss: {eval_metrics[0]} - accuracy: {eval_metrics[1]}")
print(f"Final test metrics - loss: {test_metrics[0]} - accuracy: {test_metrics[1]}")
def run(data_file, label_file, seed_value=42) -> None:
"""
Create a dataset, a model and runs training and evaluation on it
"""
seed(seed_value)
set_seed(seed_value)
train_data, eval_data, test_data = dataset_creation(data_file, label_file)
tensorboard = TensorBoard(log_dir=LOG_DIR, histogram_freq=1, profile_batch=0)
model = build_model()
print("Training the model")
model.fit(
train_data,
epochs=EPOCHS,
validation_data=eval_data,
callbacks=[tensorboard],
)
print("Training complete. Obtaining final metrics...")
show_metrics(model, eval_data, test_data)
if __name__ == "__main__":
run(data_file="data/curesim-HVR.fastq", label_file="data/HVR.fastq")

View File

@@ -3,13 +3,14 @@ from typing import Dict, List, Tuple
from Bio.pairwise2 import align from Bio.pairwise2 import align
from Bio.SeqIO import parse from Bio.SeqIO import parse
from numpy.random import random from numpy.random import random
from tensorflow import Tensor, int64, one_hot from tensorflow import Tensor, int64
from tensorflow.data import TFRecordDataset
from tensorflow.data import AUTOTUNE, TFRecordDataset from tensorflow.data import AUTOTUNE, TFRecordDataset
from tensorflow.io import TFRecordWriter, VarLenFeature, parse_single_example from tensorflow.io import TFRecordWriter, VarLenFeature, parse_single_example
from tensorflow.sparse import to_dense from tensorflow.sparse import to_dense
from tensorflow.train import Example, Feature, Features, Int64List from tensorflow.train import Example, Feature, Features, Int64List
BASES = "ACGT-" from constants import *
def align_sequences(sequence, label) -> Tuple[str, str]: def align_sequences(sequence, label) -> Tuple[str, str]:
@@ -22,6 +23,19 @@ def align_sequences(sequence, label) -> Tuple[str, str]:
return aligned_seq, aligned_label return aligned_seq, aligned_label
def generate_example(sequence, label) -> bytes:
"""
Create a binary-string for each sequence containing the sequence and the bases' counts
"""
aligned_seq, aligned_label = align_sequences(sequence, label)
schema = {
"sequence": Feature(int64_list=Int64List(value=encode_sequence(aligned_seq))),
"label": Feature(int64_list=Int64List(value=encode_sequence(aligned_label))),
}
example = Example(features=Features(feature=schema))
return example.SerializeToString()
def encode_sequence(sequence) -> List[int]: def encode_sequence(sequence) -> List[int]:
""" """
Encode the DNA sequence using the indices of the BASES constant Encode the DNA sequence using the indices of the BASES constant
@@ -30,59 +44,29 @@ def encode_sequence(sequence) -> List[int]:
return encoded_sequence return encoded_sequence
def decode_sequence(sequence) -> str: def read_fastq(data_file, label_file) -> List[bytes]:
"""
Decode an index encoded sequence back to the human readable format
"""
decoded_list = [BASES[element] for element in sequence]
sequence = "".join(decoded_list)
return sequence
def prepare_sequences(sequence, label):
"""
Align and encode the sequences to obtain a fixed length output in order to perform batching
"""
encoded_sequences = []
aligned_seq, aligned_label = align_sequences(sequence, label)
for item in [aligned_seq, aligned_label]:
encoded_sequences.append(encode_sequence(item))
return encoded_sequences[0], encoded_sequences[1]
def generate_example(sequence, label) -> bytes:
"""
Create a binary-string for each sequence containing the sequence and the bases' counts
"""
processed_seq, processed_label = prepare_sequences(sequence, label)
schema = {
"sequence": Feature(int64_list=Int64List(value=processed_seq)),
"label": Feature(int64_list=Int64List(value=processed_label)),
}
example = Example(features=Features(feature=schema))
return example.SerializeToString()
def read_fastq(hyperparams) -> List[bytes]:
""" """
Parses a data and a label FASTQ files and generates a List of serialized Examples Parses a data and a label FASTQ files and generates a List of serialized Examples
""" """
examples = [] examples = []
with open(hyperparams.data_file) as data, open(hyperparams.label_file) as labels: with open(data_file) as data, open(label_file) as labels:
for element, label in zip(parse(data, "fastq"), parse(labels, "fastq")): for element, label in zip(parse(data, "fastq"), parse(labels, "fastq")):
example = generate_example(sequence=str(element.seq), label=str(label.seq)) example = generate_example(
sequence=str(element.seq),
label=str(label.seq),
)
examples.append(example) examples.append(example)
return examples return examples
def create_dataset(hyperparams, dataset_split=[0.8, 0.1, 0.1]) -> None: def create_dataset(data_file, label_file, dataset_split=[0.8, 0.1, 0.1]) -> None:
""" """
Create a training, evaluation and test dataset with a 80/10/10 split respectively Create a training, evaluation and test dataset with a 80/10/10 split respectively
""" """
data = read_fastq(hyperparams) data = read_fastq(data_file, label_file)
with TFRecordWriter(hyperparams.train_dataset) as training, TFRecordWriter( with TFRecordWriter(TRAIN_DATASET) as training, TFRecordWriter(
hyperparams.test_dataset TEST_DATASET
) as test, TFRecordWriter(hyperparams.eval_dataset) as evaluation: ) as test, TFRecordWriter(EVAL_DATASET) as evaluation:
for element in data: for element in data:
if random() < dataset_split[0]: if random() < dataset_split[0]:
training.write(element) training.write(element)
@@ -94,13 +78,12 @@ def create_dataset(hyperparams, dataset_split=[0.8, 0.1, 0.1]) -> None:
def transform_features(parsed_features) -> Dict[str, Tensor]: def transform_features(parsed_features) -> Dict[str, Tensor]:
""" """
Transform the parsed features of an Example into a list of dense one hot encoded Tensors Transform the parsed features of an Example into a list of dense Tensors
""" """
features = {} features = {}
sparse_features = ["sequence", "label"] sparse_features = ["sequence", "label"]
for element in sparse_features: for element in sparse_features:
features[element] = to_dense(parsed_features[element]) features[element] = to_dense(parsed_features[element])
features[element] = one_hot(features[element], depth=len(BASES))
return features return features
@@ -117,32 +100,25 @@ def process_input(byte_string) -> Tuple[Tensor, Tensor]:
return features["sequence"], features["label"] return features["sequence"], features["label"]
def read_dataset(filepath, hyperparams) -> TFRecordDataset: def read_dataset(filepath) -> TFRecordDataset:
""" """
Read TFRecords files and generate a dataset Read TFRecords files and generate a dataset
""" """
data_input = TFRecordDataset(filenames=filepath) data_input = TFRecordDataset(filenames=filepath)
dataset = data_input.map(map_func=process_input, num_parallel_calls=AUTOTUNE)
shuffled_dataset = dataset.shuffle(buffer_size=10000, seed=42) shuffled_dataset = dataset.shuffle(buffer_size=10000, seed=42)
batched_dataset = shuffled_dataset.padded_batch( dataset = data_input.map(map_func=process_input, num_parallel_calls=AUTOTUNE)
batch_size=hyperparams.batch_size, batched_dataset = shuffled_dataset.batch(batch_size=BATCH_SIZE).repeat(count=EPOCHS)
padded_shapes=(
[hyperparams.max_length, len(BASES)],
[hyperparams.max_length, len(BASES)],
),
padding_values=-1.0,
)
return batched_dataset return batched_dataset
def dataset_creation( def dataset_creation(
hyperparams, data_file, label_file
) -> Tuple[TFRecordDataset, TFRecordDataset, TFRecordDataset]: ) -> Tuple[TFRecordDataset, TFRecordDataset, TFRecordDataset]:
""" """
Generate the TFRecord files and split them into training, validation and test data Generate the TFRecord files and split them into training, validation and test data
""" """
create_dataset(hyperparams) create_dataset(data_file, label_file)
train_data = read_dataset(hyperparams.train_dataset, hyperparams) train_data = read_dataset(TRAIN_DATASET)
eval_data = read_dataset(hyperparams.eval_dataset, hyperparams) eval_data = read_dataset(EVAL_DATASET)
test_data = read_dataset(hyperparams.test_dataset, hyperparams) test_data = read_dataset(TEST_DATASET)
return train_data, eval_data, test_data return train_data, eval_data, test_data

File diff suppressed because one or more lines are too long

Binary file not shown.