latentblending/latent_blending.ipynb

1724 lines
94 KiB
Plaintext
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "t9DPiP5BgqfF"
},
"source": [
"# Instructions\n",
"### 0) [optional]: change the model type in the cell below, and BYO-ckpt.\n",
"### 1) hit the white play button below \n",
"### 2) grab yourself a coffee 🍹 (10min wait) \n",
"### 3) scroll all the way to bottom of output and open link \"Running on public URL: https://xxxxxxxxx.gradio.live\" \n",
"### 4) there are many parameters, read here what they mean: https://github.com/lunarring/latentblending/blob/main/parameters.md\n",
"👇 (start here, move cursor below finger and play button will appear)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000,
"referenced_widgets": [
"341059598bb54246ad013a06228104a4",
"1ab78359663e42be979657d5fc5d6219",
"f99ee65269614dc1ab794e4b669ba17e",
"1316532bb31e4b13a79af8bcbf06d646",
"128efcd179fe430195be3065d96998bb",
"76bc701a660d46079b28b16b15d2e8e1",
"e1b429046ead49fa93500211ef09f4c6",
"38b7d3452c2a4fd58509f857a6f678b9",
"919cd507a0744e89b6d289f633e420fd",
"ac31a161b6a542c79b12d940e570b5e1",
"0b87e9c86694421e9c980cabf0ad6000",
"65415aaafdac4b48b69ad524a6cd6450",
"58e31ecfa254438589a048c01fdf9557",
"258106bbf2094f10b70f98c0f399ac11",
"53d20e434e2b4f6c8cab12882eaed1c3",
"2c3acf3296fd4205bbb7f8af047de104",
"1974e3d2b5e84ef6834fa2930be83e82",
"16f60055f74442e2b910ec73d12eca97",
"8e4f1e942dfe4e1ca233bfaa07eee10a",
"222befb69330421aa839bea35a125039",
"cdef154a0c4d4eaea3dbeec02af41897",
"8234b932853445e19699a9026668dd06",
"a658eabcd03b4f28913e51f0f6aba716",
"48d59773d9654912be56ca4b3bd00937",
"12ee22cd996049ed97fdba99fef238ce",
"da4c9da1490a458e80a3418709b76253",
"4fee8311391c4b4d82162ffdc1d9e96e",
"30ca0cf01a4443049a16ef13ddfa56cc",
"e32a38e4ff534c9f98c2d4fded8941d7",
"d5c15b9adc9b43568b2024e857f5d943",
"4640a0426a21443a97c4cec0f4851864",
"c40a78115f5444cda7930c00f0449d7f",
"1595284570a3466d825bb04abce5f2b3"
]
},
"collapsed": true,
"id": "jgZQj-tE6GWW",
"outputId": "04e7e6f8-4569-462b-83e8-05eb26159e73"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Collecting open-clip-torch\n",
" Downloading open_clip_torch-2.9.3-py3-none-any.whl (1.4 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.4/1.4 MB\u001b[0m \u001b[31m28.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting huggingface-hub\n",
" Downloading huggingface_hub-0.11.1-py3-none-any.whl (182 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m182.4/182.4 KB\u001b[0m \u001b[31m12.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: torchvision in /usr/local/lib/python3.8/dist-packages (from open-clip-torch) (0.14.0+cu116)\n",
"Requirement already satisfied: torch>=1.9.0 in /usr/local/lib/python3.8/dist-packages (from open-clip-torch) (1.13.0+cu116)\n",
"Requirement already satisfied: regex in /usr/local/lib/python3.8/dist-packages (from open-clip-torch) (2022.6.2)\n",
"Collecting protobuf==3.20.*\n",
" Downloading protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m28.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting sentencepiece\n",
" Downloading sentencepiece-0.1.97-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m34.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: tqdm in /usr/local/lib/python3.8/dist-packages (from open-clip-torch) (4.64.1)\n",
"Collecting ftfy\n",
" Downloading ftfy-6.1.1-py3-none-any.whl (53 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.1/53.1 KB\u001b[0m \u001b[31m2.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: typing-extensions in /usr/local/lib/python3.8/dist-packages (from torch>=1.9.0->open-clip-torch) (4.4.0)\n",
"Requirement already satisfied: wcwidth>=0.2.5 in /usr/local/lib/python3.8/dist-packages (from ftfy->open-clip-torch) (0.2.5)\n",
"Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.8/dist-packages (from huggingface-hub->open-clip-torch) (6.0)\n",
"Requirement already satisfied: filelock in /usr/local/lib/python3.8/dist-packages (from huggingface-hub->open-clip-torch) (3.9.0)\n",
"Requirement already satisfied: requests in /usr/local/lib/python3.8/dist-packages (from huggingface-hub->open-clip-torch) (2.25.1)\n",
"Requirement already satisfied: packaging>=20.9 in /usr/local/lib/python3.8/dist-packages (from huggingface-hub->open-clip-torch) (21.3)\n",
"Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /usr/local/lib/python3.8/dist-packages (from torchvision->open-clip-torch) (7.1.2)\n",
"Requirement already satisfied: numpy in /usr/local/lib/python3.8/dist-packages (from torchvision->open-clip-torch) (1.21.6)\n",
"Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.8/dist-packages (from packaging>=20.9->huggingface-hub->open-clip-torch) (3.0.9)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests->huggingface-hub->open-clip-torch) (2022.12.7)\n",
"Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.8/dist-packages (from requests->huggingface-hub->open-clip-torch) (4.0.0)\n",
"Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests->huggingface-hub->open-clip-torch) (2.10)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests->huggingface-hub->open-clip-torch) (1.24.3)\n",
"Installing collected packages: sentencepiece, protobuf, ftfy, huggingface-hub, open-clip-torch\n",
" Attempting uninstall: protobuf\n",
" Found existing installation: protobuf 3.19.6\n",
" Uninstalling protobuf-3.19.6:\n",
" Successfully uninstalled protobuf-3.19.6\n",
"\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
"tensorflow 2.9.2 requires protobuf<3.20,>=3.9.2, but you have protobuf 3.20.3 which is incompatible.\n",
"tensorboard 2.9.1 requires protobuf<3.20,>=3.9.2, but you have protobuf 3.20.3 which is incompatible.\u001b[0m\u001b[31m\n",
"\u001b[0mSuccessfully installed ftfy-6.1.1 huggingface-hub-0.11.1 open-clip-torch-2.9.3 protobuf-3.20.3 sentencepiece-0.1.97\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Collecting omegaconf\n",
" Downloading omegaconf-2.3.0-py3-none-any.whl (79 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m79.5/79.5 KB\u001b[0m \u001b[31m8.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting antlr4-python3-runtime==4.9.*\n",
" Downloading antlr4-python3-runtime-4.9.3.tar.gz (117 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m117.0/117.0 KB\u001b[0m \u001b[31m13.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
"Requirement already satisfied: PyYAML>=5.1.0 in /usr/local/lib/python3.8/dist-packages (from omegaconf) (6.0)\n",
"Building wheels for collected packages: antlr4-python3-runtime\n",
" Building wheel for antlr4-python3-runtime (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
" Created wheel for antlr4-python3-runtime: filename=antlr4_python3_runtime-4.9.3-py3-none-any.whl size=144575 sha256=4c3089a5fd2660cf8938916989dc92d95679df79fa60107c725ce9d597b5561c\n",
" Stored in directory: /root/.cache/pip/wheels/b1/a3/c2/6df046c09459b73cc9bb6c4401b0be6c47048baf9a1617c485\n",
"Successfully built antlr4-python3-runtime\n",
"Installing collected packages: antlr4-python3-runtime, omegaconf\n",
"Successfully installed antlr4-python3-runtime-4.9.3 omegaconf-2.3.0\n"
]
},
{
"output_type": "display_data",
"data": {
"application/vnd.colab-display-data+json": {
"pip_warning": {
"packages": [
"pydevd_plugins"
]
}
}
},
"metadata": {}
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: fastcore in /usr/local/lib/python3.8/dist-packages (1.5.27)\n",
"Requirement already satisfied: packaging in /usr/local/lib/python3.8/dist-packages (from fastcore) (21.3)\n",
"Requirement already satisfied: pip in /usr/local/lib/python3.8/dist-packages (from fastcore) (22.0.4)\n",
"Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.8/dist-packages (from packaging->fastcore) (3.0.9)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: Pillow in /usr/local/lib/python3.8/dist-packages (7.1.2)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Collecting ffmpeg-python\n",
" Downloading ffmpeg_python-0.2.0-py3-none-any.whl (25 kB)\n",
"Requirement already satisfied: future in /usr/local/lib/python3.8/dist-packages (from ffmpeg-python) (0.16.0)\n",
"Installing collected packages: ffmpeg-python\n",
"Successfully installed ffmpeg-python-0.2.0\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Collecting einops\n",
" Downloading einops-0.6.0-py3-none-any.whl (41 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m41.6/41.6 KB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hInstalling collected packages: einops\n",
"Successfully installed einops-0.6.0\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Collecting gradio\n",
" Downloading gradio-3.16.1-py3-none-any.whl (14.2 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m14.2/14.2 MB\u001b[0m \u001b[31m74.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: matplotlib in /usr/local/lib/python3.8/dist-packages (from gradio) (3.2.2)\n",
"Collecting ffmpy\n",
" Downloading ffmpy-0.3.0.tar.gz (4.8 kB)\n",
" Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
"Requirement already satisfied: pyyaml in /usr/local/lib/python3.8/dist-packages (from gradio) (6.0)\n",
"Collecting fastapi\n",
" Downloading fastapi-0.89.1-py3-none-any.whl (55 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m55.8/55.8 KB\u001b[0m \u001b[31m6.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: typing-extensions in /usr/local/lib/python3.8/dist-packages (from gradio) (4.4.0)\n",
"Requirement already satisfied: fsspec in /usr/local/lib/python3.8/dist-packages (from gradio) (2022.11.0)\n",
"Requirement already satisfied: aiohttp in /usr/local/lib/python3.8/dist-packages (from gradio) (3.8.3)\n",
"Collecting pydub\n",
" Downloading pydub-0.25.1-py2.py3-none-any.whl (32 kB)\n",
"Requirement already satisfied: pillow in /usr/local/lib/python3.8/dist-packages (from gradio) (7.1.2)\n",
"Requirement already satisfied: markupsafe in /usr/local/lib/python3.8/dist-packages (from gradio) (2.0.1)\n",
"Requirement already satisfied: jinja2 in /usr/local/lib/python3.8/dist-packages (from gradio) (2.11.3)\n",
"Requirement already satisfied: pydantic in /usr/local/lib/python3.8/dist-packages (from gradio) (1.10.4)\n",
"Collecting markdown-it-py[linkify,plugins]\n",
" Downloading markdown_it_py-2.1.0-py3-none-any.whl (84 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m84.5/84.5 KB\u001b[0m \u001b[31m10.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting python-multipart\n",
" Downloading python-multipart-0.0.5.tar.gz (32 kB)\n",
" Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
"Requirement already satisfied: requests in /usr/local/lib/python3.8/dist-packages (from gradio) (2.25.1)\n",
"Collecting orjson\n",
" Downloading orjson-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (270 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m270.5/270.5 KB\u001b[0m \u001b[31m27.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.8/dist-packages (from gradio) (1.3.5)\n",
"Requirement already satisfied: numpy in /usr/local/lib/python3.8/dist-packages (from gradio) (1.21.6)\n",
"Collecting pycryptodome\n",
" Downloading pycryptodome-3.16.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (2.3 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.3/2.3 MB\u001b[0m \u001b[31m85.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting websockets>=10.0\n",
" Downloading websockets-10.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (106 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m107.0/107.0 KB\u001b[0m \u001b[31m14.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting httpx\n",
" Downloading httpx-0.23.3-py3-none-any.whl (71 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m71.5/71.5 KB\u001b[0m \u001b[31m9.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting uvicorn\n",
" Downloading uvicorn-0.20.0-py3-none-any.whl (56 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.9/56.9 KB\u001b[0m \u001b[31m7.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: altair>=4.2.0 in /usr/local/lib/python3.8/dist-packages (from gradio) (4.2.0)\n",
"Requirement already satisfied: entrypoints in /usr/local/lib/python3.8/dist-packages (from altair>=4.2.0->gradio) (0.4)\n",
"Requirement already satisfied: jsonschema>=3.0 in /usr/local/lib/python3.8/dist-packages (from altair>=4.2.0->gradio) (4.3.3)\n",
"Requirement already satisfied: toolz in /usr/local/lib/python3.8/dist-packages (from altair>=4.2.0->gradio) (0.12.0)\n",
"Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.8/dist-packages (from pandas->gradio) (2.8.2)\n",
"Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.8/dist-packages (from pandas->gradio) (2022.7)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (1.3.3)\n",
"Requirement already satisfied: charset-normalizer<3.0,>=2.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (2.1.1)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (6.0.4)\n",
"Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (1.3.1)\n",
"Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (22.2.0)\n",
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (4.0.2)\n",
"Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (1.8.2)\n",
"Collecting starlette==0.22.0\n",
" Downloading starlette-0.22.0-py3-none-any.whl (64 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.3/64.3 KB\u001b[0m \u001b[31m8.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting anyio<5,>=3.4.0\n",
" Downloading anyio-3.6.2-py3-none-any.whl (80 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m80.6/80.6 KB\u001b[0m \u001b[31m10.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting httpcore<0.17.0,>=0.15.0\n",
" Downloading httpcore-0.16.3-py3-none-any.whl (69 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m69.6/69.6 KB\u001b[0m \u001b[31m9.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting rfc3986[idna2008]<2,>=1.3\n",
" Downloading rfc3986-1.5.0-py2.py3-none-any.whl (31 kB)\n",
"Requirement already satisfied: certifi in /usr/local/lib/python3.8/dist-packages (from httpx->gradio) (2022.12.7)\n",
"Collecting sniffio\n",
" Downloading sniffio-1.3.0-py3-none-any.whl (10 kB)\n",
"Collecting mdurl~=0.1\n",
" Downloading mdurl-0.1.2-py3-none-any.whl (10.0 kB)\n",
"Collecting linkify-it-py~=1.0\n",
" Downloading linkify_it_py-1.0.3-py3-none-any.whl (19 kB)\n",
"Collecting mdit-py-plugins\n",
" Downloading mdit_py_plugins-0.3.3-py3-none-any.whl (50 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.5/50.5 KB\u001b[0m \u001b[31m6.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.8/dist-packages (from matplotlib->gradio) (1.4.4)\n",
"Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.8/dist-packages (from matplotlib->gradio) (3.0.9)\n",
"Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.8/dist-packages (from matplotlib->gradio) (0.11.0)\n",
"Requirement already satisfied: six>=1.4.0 in /usr/local/lib/python3.8/dist-packages (from python-multipart->gradio) (1.15.0)\n",
"Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests->gradio) (2.10)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests->gradio) (1.24.3)\n",
"Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.8/dist-packages (from requests->gradio) (4.0.0)\n",
"Requirement already satisfied: click>=7.0 in /usr/local/lib/python3.8/dist-packages (from uvicorn->gradio) (7.1.2)\n",
"Collecting h11>=0.8\n",
" Downloading h11-0.14.0-py3-none-any.whl (58 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 KB\u001b[0m \u001b[31m7.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.8/dist-packages (from jsonschema>=3.0->altair>=4.2.0->gradio) (5.10.2)\n",
"Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.8/dist-packages (from jsonschema>=3.0->altair>=4.2.0->gradio) (0.19.3)\n",
"Collecting uc-micro-py\n",
" Downloading uc_micro_py-1.0.1-py3-none-any.whl (6.2 kB)\n",
"Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.8/dist-packages (from importlib-resources>=1.4.0->jsonschema>=3.0->altair>=4.2.0->gradio) (3.11.0)\n",
"Building wheels for collected packages: ffmpy, python-multipart\n",
" Building wheel for ffmpy (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
" Created wheel for ffmpy: filename=ffmpy-0.3.0-py3-none-any.whl size=4711 sha256=e6f285734c2d220275fd83d619b53eff12a63a51db2207d308d8e4aaeb0aefd1\n",
" Stored in directory: /root/.cache/pip/wheels/ff/5b/59/913b443e7369dc04b61f607a746b6f7d83fb65e2e19fcc958d\n",
" Building wheel for python-multipart (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
" Created wheel for python-multipart: filename=python_multipart-0.0.5-py3-none-any.whl size=31678 sha256=a1da63cd4efc2c72da5c43f3d5f62f57ad40c86b3898ea9c392a508753533fca\n",
" Stored in directory: /root/.cache/pip/wheels/9e/fc/1c/cf980e6413d3ee8e70cd8f39e2366b0f487e3e221aeb452eb0\n",
"Successfully built ffmpy python-multipart\n",
"Installing collected packages: rfc3986, pydub, ffmpy, websockets, uc-micro-py, sniffio, python-multipart, pycryptodome, orjson, mdurl, h11, uvicorn, markdown-it-py, linkify-it-py, anyio, starlette, mdit-py-plugins, httpcore, httpx, fastapi, gradio\n",
"Successfully installed anyio-3.6.2 fastapi-0.89.1 ffmpy-0.3.0 gradio-3.16.1 h11-0.14.0 httpcore-0.16.3 httpx-0.23.3 linkify-it-py-1.0.3 markdown-it-py-2.1.0 mdit-py-plugins-0.3.3 mdurl-0.1.2 orjson-3.8.5 pycryptodome-3.16.0 pydub-0.25.1 python-multipart-0.0.5 rfc3986-1.5.0 sniffio-1.3.0 starlette-0.22.0 uc-micro-py-1.0.1 uvicorn-0.20.0 websockets-10.4\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Collecting pytorch_lightning\n",
" Downloading pytorch_lightning-1.8.6-py3-none-any.whl (800 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m800.3/800.3 KB\u001b[0m \u001b[31m48.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting torchmetrics>=0.7.0\n",
" Downloading torchmetrics-0.11.0-py3-none-any.whl (512 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m512.4/512.4 KB\u001b[0m \u001b[31m46.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting lightning-utilities!=0.4.0,>=0.3.0\n",
" Downloading lightning_utilities-0.5.0-py3-none-any.whl (18 kB)\n",
"Requirement already satisfied: fsspec[http]>2021.06.0 in /usr/local/lib/python3.8/dist-packages (from pytorch_lightning) (2022.11.0)\n",
"Requirement already satisfied: typing-extensions>=4.0.0 in /usr/local/lib/python3.8/dist-packages (from pytorch_lightning) (4.4.0)\n",
"Collecting tensorboardX>=2.2\n",
" Downloading tensorboardX-2.5.1-py2.py3-none-any.whl (125 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m125.4/125.4 KB\u001b[0m \u001b[31m15.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: numpy>=1.17.2 in /usr/local/lib/python3.8/dist-packages (from pytorch_lightning) (1.21.6)\n",
"Requirement already satisfied: torch>=1.9.0 in /usr/local/lib/python3.8/dist-packages (from pytorch_lightning) (1.12.1+cu113)\n",
"Requirement already satisfied: packaging>=17.0 in /usr/local/lib/python3.8/dist-packages (from pytorch_lightning) (21.3)\n",
"Requirement already satisfied: PyYAML>=5.4 in /usr/local/lib/python3.8/dist-packages (from pytorch_lightning) (6.0)\n",
"Requirement already satisfied: tqdm>=4.57.0 in /usr/local/lib/python3.8/dist-packages (from pytorch_lightning) (4.64.1)\n",
"Requirement already satisfied: aiohttp!=4.0.0a0,!=4.0.0a1 in /usr/local/lib/python3.8/dist-packages (from fsspec[http]>2021.06.0->pytorch_lightning) (3.8.3)\n",
"Requirement already satisfied: requests in /usr/local/lib/python3.8/dist-packages (from fsspec[http]>2021.06.0->pytorch_lightning) (2.25.1)\n",
"Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.8/dist-packages (from packaging>=17.0->pytorch_lightning) (3.0.9)\n",
"Collecting protobuf<=3.20.1,>=3.8.0\n",
" Downloading protobuf-3.20.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m66.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.8/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (1.3.1)\n",
"Requirement already satisfied: charset-normalizer<3.0,>=2.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (2.1.1)\n",
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.8/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (4.0.2)\n",
"Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (1.8.2)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.8/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (6.0.4)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.8/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (1.3.3)\n",
"Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (22.2.0)\n",
"Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.8/dist-packages (from requests->fsspec[http]>2021.06.0->pytorch_lightning) (4.0.0)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests->fsspec[http]>2021.06.0->pytorch_lightning) (1.24.3)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests->fsspec[http]>2021.06.0->pytorch_lightning) (2022.12.7)\n",
"Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests->fsspec[http]>2021.06.0->pytorch_lightning) (2.10)\n",
"Installing collected packages: protobuf, torchmetrics, tensorboardX, lightning-utilities, pytorch_lightning\n",
" Attempting uninstall: protobuf\n",
" Found existing installation: protobuf 3.20.3\n",
" Uninstalling protobuf-3.20.3:\n",
" Successfully uninstalled protobuf-3.20.3\n",
"\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
"tensorflow 2.9.2 requires protobuf<3.20,>=3.9.2, but you have protobuf 3.20.1 which is incompatible.\n",
"tensorboard 2.9.1 requires protobuf<3.20,>=3.9.2, but you have protobuf 3.20.1 which is incompatible.\n",
"googleapis-common-protos 1.57.1 requires protobuf!=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5, but you have protobuf 3.20.1 which is incompatible.\n",
"google-cloud-translate 3.8.4 requires protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5, but you have protobuf 3.20.1 which is incompatible.\n",
"google-cloud-language 2.6.1 requires protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5, but you have protobuf 3.20.1 which is incompatible.\n",
"google-cloud-firestore 2.7.3 requires protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5, but you have protobuf 3.20.1 which is incompatible.\n",
"google-cloud-datastore 2.11.1 requires protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5, but you have protobuf 3.20.1 which is incompatible.\n",
"google-cloud-bigquery 3.4.1 requires protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5, but you have protobuf 3.20.1 which is incompatible.\n",
"google-cloud-bigquery-storage 2.17.0 requires protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5, but you have protobuf 3.20.1 which is incompatible.\n",
"google-api-core 2.11.0 requires protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5, but you have protobuf 3.20.1 which is incompatible.\u001b[0m\u001b[31m\n",
"\u001b[0mSuccessfully installed lightning-utilities-0.5.0 protobuf-3.20.1 pytorch_lightning-1.8.6 tensorboardX-2.5.1 torchmetrics-0.11.0\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Collecting transformers\n",
" Downloading transformers-4.25.1-py3-none-any.whl (5.8 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.8/5.8 MB\u001b[0m \u001b[31m92.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting tokenizers!=0.11.3,<0.14,>=0.11.1\n",
" Downloading tokenizers-0.13.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (7.6 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.6/7.6 MB\u001b[0m \u001b[31m82.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.8/dist-packages (from transformers) (2022.6.2)\n",
"Requirement already satisfied: filelock in /usr/local/lib/python3.8/dist-packages (from transformers) (3.9.0)\n",
"Requirement already satisfied: huggingface-hub<1.0,>=0.10.0 in /usr/local/lib/python3.8/dist-packages (from transformers) (0.11.1)\n",
"Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.8/dist-packages (from transformers) (1.21.6)\n",
"Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.8/dist-packages (from transformers) (6.0)\n",
"Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.8/dist-packages (from transformers) (4.64.1)\n",
"Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.8/dist-packages (from transformers) (21.3)\n",
"Requirement already satisfied: requests in /usr/local/lib/python3.8/dist-packages (from transformers) (2.25.1)\n",
"Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.8/dist-packages (from huggingface-hub<1.0,>=0.10.0->transformers) (4.4.0)\n",
"Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.8/dist-packages (from packaging>=20.0->transformers) (3.0.9)\n",
"Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.8/dist-packages (from requests->transformers) (4.0.0)\n",
"Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests->transformers) (2.10)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests->transformers) (1.24.3)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests->transformers) (2022.12.7)\n",
"Installing collected packages: tokenizers, transformers\n",
"Successfully installed tokenizers-0.13.2 transformers-4.25.1\n",
"Cloning into 'latentblending'...\n",
"remote: Enumerating objects: 530, done.\u001b[K\n",
"remote: Counting objects: 100% (50/50), done.\u001b[K\n",
"remote: Compressing objects: 100% (37/37), done.\u001b[K\n",
"remote: Total 530 (delta 24), reused 29 (delta 13), pack-reused 480\u001b[K\n",
"Receiving objects: 100% (530/530), 8.36 MiB | 6.82 MiB/s, done.\n",
"Resolving deltas: 100% (301/301), done.\n",
"Already up to date.\n"
]
},
{
"output_type": "stream",
"name": "stderr",
"text": [
"/usr/local/lib/python3.8/dist-packages/pytorch_lightning/utilities/distributed.py:258: LightningDeprecationWarning: `pytorch_lightning.utilities.distributed.rank_zero_only` has been deprecated in v1.8.1 and will be removed in v1.10.0. You can import it from `pytorch_lightning.utilities` instead.\n",
" rank_zero_deprecation(\n"
]
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"--2023-01-15 16:09:49-- https://huggingface.co/stabilityai/stable-diffusion-2-1-base/resolve/main/v2-1_512-ema-pruned.ckpt\n",
"Resolving huggingface.co (huggingface.co)... 54.235.118.239, 3.231.67.228, 2600:1f18:147f:e850:e203:c458:10cd:fc3c, ...\n",
"Connecting to huggingface.co (huggingface.co)|54.235.118.239|:443... connected.\n",
"HTTP request sent, awaiting response... 302 Found\n",
"Location: https://cdn-lfs.huggingface.co/repos/24/cb/24cbc2f7542236eb613b4f16b6802d7c2bef443e86cf9d076719733866e66c3a/88ecb782561455673c4b78d05093494b9c539fc6bfc08f3a9a4a0dd7b0b10f36?response-content-disposition=attachment%3B%20filename%3D%22v2-1_512-ema-pruned.ckpt%22&Expires=1674051670&Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9jZG4tbGZzLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzI0L2NiLzI0Y2JjMmY3NTQyMjM2ZWI2MTNiNGYxNmI2ODAyZDdjMmJlZjQ0M2U4NmNmOWQwNzY3MTk3MzM4NjZlNjZjM2EvODhlY2I3ODI1NjE0NTU2NzNjNGI3OGQwNTA5MzQ5NGI5YzUzOWZjNmJmYzA4ZjNhOWE0YTBkZDdiMGIxMGYzNj9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPWF0dGFjaG1lbnQlM0IlMjBmaWxlbmFtZSUzRCUyMnYyLTFfNTEyLWVtYS1wcnVuZWQuY2twdCUyMiIsIkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTY3NDA1MTY3MH19fV19&Signature=kJC8WBl81~MjK7xt5NvRbmwoUiAw5bvjbeFBCgrGaEkKtZs9ufJDDcTdTH9N7DHR8IviAK14FAfn9XouLcmaGYMhclnLkdWbNPavOMF9gNmqjWvaTeUfslV9XHr~D8rb4Mn~ppw5B2P~3OkzKTEBVtuMXyH-71I38wwxbfCk4WQiHmRlpxAPA9Uq-R8erBTtK26FkJJCYpivHhdPZvoVhsMquvflplZYn-x1-LPxfdD5W-Hf8SvGi6N0iX-r6GnHfjUBzKK09znQ0nv73KRnus1fg-ayl3u20TKPJ~MufcItn8GmJJxVTFOR-2V8oVf29e~OQmYxPnfMXWYfs3lw0A__&Key-Pair-Id=KVTP0A1DKRTAX [following]\n",
"--2023-01-15 16:09:50-- https://cdn-lfs.huggingface.co/repos/24/cb/24cbc2f7542236eb613b4f16b6802d7c2bef443e86cf9d076719733866e66c3a/88ecb782561455673c4b78d05093494b9c539fc6bfc08f3a9a4a0dd7b0b10f36?response-content-disposition=attachment%3B%20filename%3D%22v2-1_512-ema-pruned.ckpt%22&Expires=1674051670&Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9jZG4tbGZzLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzI0L2NiLzI0Y2JjMmY3NTQyMjM2ZWI2MTNiNGYxNmI2ODAyZDdjMmJlZjQ0M2U4NmNmOWQwNzY3MTk3MzM4NjZlNjZjM2EvODhlY2I3ODI1NjE0NTU2NzNjNGI3OGQwNTA5MzQ5NGI5YzUzOWZjNmJmYzA4ZjNhOWE0YTBkZDdiMGIxMGYzNj9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPWF0dGFjaG1lbnQlM0IlMjBmaWxlbmFtZSUzRCUyMnYyLTFfNTEyLWVtYS1wcnVuZWQuY2twdCUyMiIsIkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTY3NDA1MTY3MH19fV19&Signature=kJC8WBl81~MjK7xt5NvRbmwoUiAw5bvjbeFBCgrGaEkKtZs9ufJDDcTdTH9N7DHR8IviAK14FAfn9XouLcmaGYMhclnLkdWbNPavOMF9gNmqjWvaTeUfslV9XHr~D8rb4Mn~ppw5B2P~3OkzKTEBVtuMXyH-71I38wwxbfCk4WQiHmRlpxAPA9Uq-R8erBTtK26FkJJCYpivHhdPZvoVhsMquvflplZYn-x1-LPxfdD5W-Hf8SvGi6N0iX-r6GnHfjUBzKK09znQ0nv73KRnus1fg-ayl3u20TKPJ~MufcItn8GmJJxVTFOR-2V8oVf29e~OQmYxPnfMXWYfs3lw0A__&Key-Pair-Id=KVTP0A1DKRTAX\n",
"Resolving cdn-lfs.huggingface.co (cdn-lfs.huggingface.co)... 13.227.254.33, 13.227.254.123, 13.227.254.52, ...\n",
"Connecting to cdn-lfs.huggingface.co (cdn-lfs.huggingface.co)|13.227.254.33|:443... connected.\n",
"HTTP request sent, awaiting response... 200 OK\n",
"Length: 5214865159 (4.9G) [binary/octet-stream]\n",
"Saving to: v2-1_512-ema-pruned.ckpt\n",
"\n",
"v2-1_512-ema-pruned 100%[===================>] 4.86G 184MB/s in 30s \n",
"\n",
"2023-01-15 16:10:20 (166 MB/s) - v2-1_512-ema-pruned.ckpt saved [5214865159/5214865159]\n",
"\n",
"--2023-01-15 16:10:20-- http://v2-1_512-ema-pruned.ckpt/\n",
"Resolving v2-1_512-ema-pruned.ckpt (v2-1_512-ema-pruned.ckpt)... failed: Name or service not known.\n",
"wget: unable to resolve host address v2-1_512-ema-pruned.ckpt\n",
"FINISHED --2023-01-15 16:10:20--\n",
"Total wall clock time: 31s\n",
"Downloaded: 1 files, 4.9G in 30s (166 MB/s)\n",
"LatentDiffusion: Running in eps-prediction mode\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is None and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is 1024 and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is None and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is 1024 and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is None and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is 1024 and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is None and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is 1024 and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is None and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is 1024 and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is None and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is 1024 and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is None and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is 1024 and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is None and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is 1024 and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is None and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is 1024 and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is None and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 1280, context_dim is 1024 and using 20 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is None and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is 1024 and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is None and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is 1024 and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is None and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 640, context_dim is 1024 and using 10 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is None and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is 1024 and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is None and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is 1024 and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is None and using 5 heads.\n",
"Setting up MemoryEfficientCrossAttention. Query dim is 320, context_dim is 1024 and using 5 heads.\n",
"DiffusionWrapper has 865.91 M params.\n",
"making attention of type 'vanilla-xformers' with 512 in_channels\n",
"building MemoryEfficientAttnBlock with 512 in_channels...\n",
"Working with z of shape (1, 4, 32, 32) = 4096 dimensions.\n",
"making attention of type 'vanilla-xformers' with 512 in_channels\n",
"building MemoryEfficientAttnBlock with 512 in_channels...\n"
]
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"Downloading: 0%| | 0.00/3.94G [00:00<?, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "341059598bb54246ad013a06228104a4"
}
},
"metadata": {}
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"Colab notebook detected. This cell will run indefinitely so that you can see errors and logs. To turn off, set debug=False in launch().\n",
"Running on public URL: https://3e25df32-cc36-4745.gradio.live\n",
"\n",
"This share link expires in 72 hours. For free permanent hosting and GPU upgrades (NEW!), check out Spaces: https://huggingface.co/spaces\n",
"STARTING DIFFUSION!\n",
"autosetup_branching: num_inference_steps: 20 list_nmb_branches: [2, 3, 5, 9] list_injection_idx: [0, 5, 11, 17]\n"
]
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"computing transition: 0%| | 0/21 [00:00<?, ?it/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "65415aaafdac4b48b69ad524a6cd6450"
}
},
"metadata": {}
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"Latent Blending pass finished. Resulted in 9 images\n",
"MovieSaver initialized. fps=30 crf=24 pix_fmt=yuv420p codec=libx264 preset=fast\n"
]
},
{
"output_type": "display_data",
"data": {
"text/plain": [
" 0%| | 0/300 [00:00<?, ?it/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "a658eabcd03b4f28913e51f0f6aba716"
}
},
"metadata": {}
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"Initialization done. Movie shape: (512, 512, 3)\n",
"Movie saved, 10s playtime, watch here: \n",
"movie_230115_161425.mp4\n",
"DONE SAVING MOVIE! SENDING BACK...\n",
"Keyboard interruption in main thread... closing server.\n",
"Killing tunnel 127.0.0.1:7860 <> https://3e25df32-cc36-4745.gradio.live\n"
]
}
],
"source": [
"!pip install wget\n",
"import wget\n",
"import os\n",
"\n",
"import requests\n",
"model = 'v2-1_512-ema-pruned' #@param [\"v2-1_512-ema-pruned\", \"v2-1_768-ema-pruned\", \"v1.5\"]\n",
"#@markdown Optionally, specify your own checkpoint below. Make sure to select the correct model above.\n",
"url_ckpt = \"\" #@param {type:\"string\"}\n",
"\n",
"if len(url_ckpt) < 1:\n",
" if model == \"v2-1_512-ema-pruned\":\n",
" url_ckpt = \"https://huggingface.co/stabilityai/stable-diffusion-2-1-base/resolve/main/v2-1_512-ema-pruned.ckpt\"\n",
" fp_config = 'latentblending/configs/v2-inference.yaml'\n",
" elif model == \"v2-1_768-ema-pruned\":\n",
" url_ckpt = \"https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.ckpt\"\n",
" fp_config = 'latentblending/configs/v2-inference-v.yaml'\n",
"\n",
"# Check that the supplied URLs exist.\n",
"response = requests.head(url_ckpt)\n",
"if response.status_code != 200 and response.status_code != 302:\n",
" raise ValueError(f\"url_ckpt could not be downloaded: {url_ckpt} gives {response.status_code}\")\n",
"fp_ckpt = 'model.ckpt'\n",
"wget.download(url_ckpt, fp_ckpt)\n",
"assert os.path.isfile(fp_ckpt), \"model download has failed.\"\n",
"\n",
"\n",
"if model == \"v2-1_512-ema-pruned\":\n",
" fp_config = 'latentblending/configs/v2-inference.yaml'\n",
"elif model == \"v2-1_768-ema-pruned\":\n",
" fp_config = 'latentblending/configs/v2-inference-v.yaml'\n",
"elif model == 'v1.5':\n",
" fp_config = 'latentblending/configs/v1-inference.yaml'\n",
"\n",
"print(f\"url_ckpt: {url_ckpt} fp_config {fp_config}\")\n",
"\n",
"\n",
"# installs\n",
"!pip install open-clip-torch\n",
"!pip install omegaconf\n",
"!pip install fastcore -U\n",
"!pip install Pillow\n",
"!pip install ffmpeg-python\n",
"!pip install einops\n",
"!pip install gradio\n",
"\n",
"import os, sys\n",
"from subprocess import getoutput\n",
"\n",
"# Xformers\n",
"os.system(\"pip install --extra-index-url https://download.pytorch.org/whl/cu113 torch torchvision==0.13.1+cu113\")\n",
"os.system(\"pip install triton==2.0.0.dev20220701\")\n",
"gpu_info = getoutput('nvidia-smi')\n",
"if(\"A10G\" in gpu_info):\n",
" os.system(f\"pip install -q https://github.com/camenduru/stable-diffusion-webui-colab/releases/download/0.0.15/xformers-0.0.15.dev0+4c06c79.d20221205-cp38-cp38-linux_x86_64.whl\")\n",
"elif(\"T4\" in gpu_info):\n",
" os.system(f\"pip install -q https://github.com/camenduru/stable-diffusion-webui-colab/releases/download/0.0.15/xformers-0.0.15.dev0+1515f77.d20221130-cp38-cp38-linux_x86_64.whl\")\n",
"\n",
"!pip install pytorch_lightning\n",
"!pip install transformers\n",
"\n",
"# Get Latent Blending from git / pull \n",
"!git clone https://github.com/lunarring/latentblending\n",
"!cd latentblending; git pull; cd ..\n",
"sys.path.append(\"/content/latentblending\")\n",
"\n",
"\n",
"\n",
"\n",
"# Imports\n",
"import torch\n",
"import numpy as np\n",
"import warnings\n",
"warnings.filterwarnings('ignore')\n",
"import warnings\n",
"import torch\n",
"from tqdm.auto import tqdm\n",
"from PIL import Image\n",
"import torch\n",
"from typing import Callable, List, Optional, Union\n",
"from latent_blending import LatentBlending, add_frames_linear_interp, get_time, yml_save, LatentBlending, compare_dicts\n",
"from stable_diffusion_holder import StableDiffusionHolder\n",
"from gradio_ui import BlendingFrontend\n",
"import gradio as gr\n",
"\n",
"torch.set_grad_enabled(False)\n",
"torch.backends.cudnn.benchmark = False\n",
"\n",
"\n",
"#%% First let us spawn a stable diffusion holder\n",
"device = \"cuda\" \n",
"\n",
"\n",
"sdh = StableDiffusionHolder(fp_ckpt, fp_config, device) \n",
"\n",
"from latent_blending import get_time, yml_save, LatentBlending, add_frames_linear_interp, compare_dicts\n",
"from gradio_ui import BlendingFrontend\n",
"\n",
"import gradio as gr\n",
"\n",
"if __name__ == \"__main__\": \n",
" \n",
" self = BlendingFrontend(sdh) # Yes this is possible in python and yes it is an awesome trick\n",
" \n",
" with gr.Blocks() as demo:\n",
" with gr.Row():\n",
" prompt1 = gr.Textbox(label=\"prompt 1\")\n",
" prompt2 = gr.Textbox(label=\"prompt 2\")\n",
" negative_prompt = gr.Textbox(label=\"negative prompt\") \n",
" \n",
" with gr.Row():\n",
" nmb_branches_final = gr.Slider(5, 125, self.nmb_branches_final, step=4, label='nmb trans images', interactive=True) \n",
" height = gr.Slider(256, 2048, self.height, step=128, label='height', interactive=True)\n",
" width = gr.Slider(256, 2048, self.width, step=128, label='width', interactive=True) \n",
" \n",
" with gr.Row():\n",
" num_inference_steps = gr.Slider(5, 100, self.num_inference_steps, step=1, label='num_inference_steps', interactive=True)\n",
" branch1_influence = gr.Slider(0.0, 1.0, self.branch1_influence, step=0.01, label='branch1_influence', interactive=True) \n",
" guidance_scale = gr.Slider(1, 25, self.guidance_scale, step=0.1, label='guidance_scale', interactive=True) \n",
" \n",
" with gr.Row():\n",
" depth_strength = gr.Slider(0.01, 0.99, self.depth_strength, step=0.01, label='depth_strength', interactive=True) \n",
" duration = gr.Slider(0.1, 30, self.duration, step=0.1, label='video duration', interactive=True) \n",
" guidance_scale_mid_damper = gr.Slider(0.01, 2.0, self.guidance_scale_mid_damper, step=0.01, label='guidance_scale_mid_damper', interactive=True) \n",
" \n",
" with gr.Row():\n",
" seed1 = gr.Number(42, label=\"seed 1\", interactive=True)\n",
" b_newseed1 = gr.Button(\"randomize seed 1\", variant='secondary')\n",
" seed2 = gr.Number(420, label=\"seed 2\", interactive=True)\n",
" b_newseed2 = gr.Button(\"randomize seed 2\", variant='secondary')\n",
" with gr.Row():\n",
" b_compute_transition = gr.Button('compute transition', variant='primary')\n",
" \n",
" with gr.Row():\n",
" img1 = gr.Image(label=\"1/5\")\n",
" img2 = gr.Image(label=\"2/5\")\n",
" img3 = gr.Image(label=\"3/5\")\n",
" img4 = gr.Image(label=\"4/5\")\n",
" img5 = gr.Image(label=\"5/5\")\n",
" \n",
" with gr.Row():\n",
" vid_transition = gr.Video()\n",
" \n",
" # Bind the on-change methods\n",
" depth_strength.change(fn=self.change_depth_strength, inputs=depth_strength)\n",
" num_inference_steps.change(fn=self.change_num_inference_steps, inputs=num_inference_steps)\n",
" nmb_branches_final.change(fn=self.change_nmb_branches_final, inputs=nmb_branches_final)\n",
" \n",
" guidance_scale.change(fn=self.change_guidance_scale, inputs=guidance_scale)\n",
" guidance_scale_mid_damper.change(fn=self.change_guidance_scale_mid_damper, inputs=guidance_scale_mid_damper)\n",
" \n",
" height.change(fn=self.change_height, inputs=height)\n",
" width.change(fn=self.change_width, inputs=width)\n",
" negative_prompt.change(fn=self.change_negative_prompt, inputs=negative_prompt)\n",
" seed1.change(fn=self.change_seed1, inputs=seed1)\n",
" seed2.change(fn=self.change_seed2, inputs=seed2)\n",
" duration.change(fn=self.change_duration, inputs=duration)\n",
" branch1_influence.change(fn=self.change_branch1_influence, inputs=branch1_influence)\n",
" \n",
" b_newseed1.click(self.randomize_seed1, outputs=seed1)\n",
" b_newseed2.click(self.randomize_seed2, outputs=seed2)\n",
" b_compute_transition.click(self.compute_transition, \n",
" inputs=[prompt1, prompt2],\n",
" outputs=[img1, img2, img3, img4, img5, vid_transition])\n",
" \n",
" demo.launch(share=self.share, inbrowser=True, inline=False, debug=True)\n"
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"machine_shape": "hm",
"provenance": []
},
"gpuClass": "standard",
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"341059598bb54246ad013a06228104a4": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_1ab78359663e42be979657d5fc5d6219",
"IPY_MODEL_f99ee65269614dc1ab794e4b669ba17e",
"IPY_MODEL_1316532bb31e4b13a79af8bcbf06d646"
],
"layout": "IPY_MODEL_128efcd179fe430195be3065d96998bb"
}
},
"1ab78359663e42be979657d5fc5d6219": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_76bc701a660d46079b28b16b15d2e8e1",
"placeholder": "",
"style": "IPY_MODEL_e1b429046ead49fa93500211ef09f4c6",
"value": "Downloading: 100%"
}
},
"f99ee65269614dc1ab794e4b669ba17e": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_38b7d3452c2a4fd58509f857a6f678b9",
"max": 3944692325,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_919cd507a0744e89b6d289f633e420fd",
"value": 3944692325
}
},
"1316532bb31e4b13a79af8bcbf06d646": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_ac31a161b6a542c79b12d940e570b5e1",
"placeholder": "",
"style": "IPY_MODEL_0b87e9c86694421e9c980cabf0ad6000",
"value": " 3.94G/3.94G [00:52&lt;00:00, 85.6MB/s]"
}
},
"128efcd179fe430195be3065d96998bb": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"76bc701a660d46079b28b16b15d2e8e1": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"e1b429046ead49fa93500211ef09f4c6": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"38b7d3452c2a4fd58509f857a6f678b9": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"919cd507a0744e89b6d289f633e420fd": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"ac31a161b6a542c79b12d940e570b5e1": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"0b87e9c86694421e9c980cabf0ad6000": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"65415aaafdac4b48b69ad524a6cd6450": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_58e31ecfa254438589a048c01fdf9557",
"IPY_MODEL_258106bbf2094f10b70f98c0f399ac11",
"IPY_MODEL_53d20e434e2b4f6c8cab12882eaed1c3"
],
"layout": "IPY_MODEL_2c3acf3296fd4205bbb7f8af047de104"
}
},
"58e31ecfa254438589a048c01fdf9557": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_1974e3d2b5e84ef6834fa2930be83e82",
"placeholder": "",
"style": "IPY_MODEL_16f60055f74442e2b910ec73d12eca97",
"value": "computing transition: 100%"
}
},
"258106bbf2094f10b70f98c0f399ac11": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_8e4f1e942dfe4e1ca233bfaa07eee10a",
"max": 21,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_222befb69330421aa839bea35a125039",
"value": 21
}
},
"53d20e434e2b4f6c8cab12882eaed1c3": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_cdef154a0c4d4eaea3dbeec02af41897",
"placeholder": "",
"style": "IPY_MODEL_8234b932853445e19699a9026668dd06",
"value": " 21/21 [00:23&lt;00:00, 1.09s/it]"
}
},
"2c3acf3296fd4205bbb7f8af047de104": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"1974e3d2b5e84ef6834fa2930be83e82": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"16f60055f74442e2b910ec73d12eca97": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"8e4f1e942dfe4e1ca233bfaa07eee10a": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"222befb69330421aa839bea35a125039": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"cdef154a0c4d4eaea3dbeec02af41897": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"8234b932853445e19699a9026668dd06": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"a658eabcd03b4f28913e51f0f6aba716": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_48d59773d9654912be56ca4b3bd00937",
"IPY_MODEL_12ee22cd996049ed97fdba99fef238ce",
"IPY_MODEL_da4c9da1490a458e80a3418709b76253"
],
"layout": "IPY_MODEL_4fee8311391c4b4d82162ffdc1d9e96e"
}
},
"48d59773d9654912be56ca4b3bd00937": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_30ca0cf01a4443049a16ef13ddfa56cc",
"placeholder": "",
"style": "IPY_MODEL_e32a38e4ff534c9f98c2d4fded8941d7",
"value": "100%"
}
},
"12ee22cd996049ed97fdba99fef238ce": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_d5c15b9adc9b43568b2024e857f5d943",
"max": 300,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_4640a0426a21443a97c4cec0f4851864",
"value": 300
}
},
"da4c9da1490a458e80a3418709b76253": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_c40a78115f5444cda7930c00f0449d7f",
"placeholder": "",
"style": "IPY_MODEL_1595284570a3466d825bb04abce5f2b3",
"value": " 300/300 [00:03&lt;00:00, 146.93it/s]"
}
},
"4fee8311391c4b4d82162ffdc1d9e96e": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"30ca0cf01a4443049a16ef13ddfa56cc": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"e32a38e4ff534c9f98c2d4fded8941d7": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"d5c15b9adc9b43568b2024e857f5d943": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"4640a0426a21443a97c4cec0f4851864": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"c40a78115f5444cda7930c00f0449d7f": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"1595284570a3466d825bb04abce5f2b3": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
}
}
}
},
"nbformat": 4,
"nbformat_minor": 0
}