EVOLUTION-NINJA
Edit File: Colab-TextGen-GPU.ipynb
{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "private_outputs": true, "provenance": [], "gpuType": "T4" }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" }, "accelerator": "GPU" }, "cells": [ { "cell_type": "markdown", "source": [ "# oobabooga/text-generation-webui\n", "\n", "After running both cells, a public gradio URL will appear at the bottom in around 10 minutes. You can optionally generate an API link.\n", "\n", "* Project page: https://github.com/oobabooga/text-generation-webui\n", "* Gradio server status: https://status.gradio.app/" ], "metadata": { "id": "MFQl6-FjSYtY" } }, { "cell_type": "code", "source": [ "#@title 1. Keep this tab alive to prevent Colab from disconnecting you { display-mode: \"form\" }\n", "\n", "#@markdown Press play on the music player that will appear below:\n", "%%html\n", "<audio src=\"https://oobabooga.github.io/silence.m4a\" controls>" ], "metadata": { "id": "f7TVVj_z4flw" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "#@title 2. Launch the web UI\n", "\n", "#@markdown If unsure about the branch, write \"main\" or leave it blank.\n", "\n", "import os\n", "from pathlib import Path\n", "\n", "os.environ.pop('PYTHONPATH', None)\n", "os.environ.pop('MPLBACKEND', None)\n", "\n", "if Path.cwd().name != 'text-generation-webui':\n", " print(\"\\033[1;32;1m\\n --> Installing the web UI. This will take a while, but after the initial setup, you can download and test as many models as you like.\\033[0;37;0m\\n\")\n", "\n", " !git clone https://github.com/oobabooga/text-generation-webui\n", " %cd text-generation-webui\n", "\n", " # Install the project in an isolated environment\n", " !GPU_CHOICE=A \\\n", " LAUNCH_AFTER_INSTALL=FALSE \\\n", " INSTALL_EXTENSIONS=FALSE \\\n", " ./start_linux.sh\n", "\n", "# Parameters\n", "model_url = \"https://huggingface.co/turboderp/gemma-2-9b-it-exl2\" #@param {type:\"string\"}\n", "branch = \"8.0bpw\" #@param {type:\"string\"}\n", "command_line_flags = \"--n-gpu-layers 128 --load-in-4bit --use_double_quant --no_flash_attn\" #@param {type:\"string\"}\n", "api = False #@param {type:\"boolean\"}\n", "\n", "if api:\n", " for param in ['--api', '--public-api']:\n", " if param not in command_line_flags:\n", " command_line_flags += f\" {param}\"\n", "\n", "model_url = model_url.strip()\n", "if model_url != \"\":\n", " if not model_url.startswith('http'):\n", " model_url = 'https://huggingface.co/' + model_url\n", "\n", " # Download the model\n", " url_parts = model_url.strip('/').strip().split('/')\n", " output_folder = f\"{url_parts[-2]}_{url_parts[-1]}\"\n", " branch = branch.strip('\"\\' ')\n", " if branch.strip() not in ['', 'main']:\n", " output_folder += f\"_{branch}\"\n", " !python download-model.py {model_url} --branch {branch}\n", " else:\n", " !python download-model.py {model_url}\n", "else:\n", " output_folder = \"\"\n", "\n", "# Start the web UI\n", "cmd = f\"./start_linux.sh {command_line_flags} --share\"\n", "if output_folder != \"\":\n", " cmd += f\" --model {output_folder}\"\n", "\n", "!$cmd" ], "metadata": { "id": "LGQ8BiMuXMDG", "cellView": "form" }, "execution_count": null, "outputs": [] } ] }