From cccac62a5fd953de26cceafbf2592f1761ee3d48 Mon Sep 17 00:00:00 2001 From: w-okada <48346627+w-okada@users.noreply.github.com> Date: Thu, 8 Aug 2024 02:54:22 +0900 Subject: [PATCH 1/3] =?UTF-8?q?Colab=20=E3=82=92=E4=BD=BF=E7=94=A8?= =?UTF-8?q?=E3=81=97=E3=81=A6=E4=BD=9C=E6=88=90=E3=81=95=E3=82=8C=E3=81=BE?= =?UTF-8?q?=E3=81=97=E3=81=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- w_okada's_Voice_Changer_version_2_x.ipynb | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/w_okada's_Voice_Changer_version_2_x.ipynb b/w_okada's_Voice_Changer_version_2_x.ipynb index e6bfe8d3..e3cd806e 100644 --- a/w_okada's_Voice_Changer_version_2_x.ipynb +++ b/w_okada's_Voice_Changer_version_2_x.ipynb @@ -196,14 +196,19 @@ "# @title **[2]** Start server\n", "# @markdown This cell will start the server, the first time that you run it will download the models, so it can take a while (2~4 minutes)\n", "\n", + "# @markdown If you want to use ngrok, please input your token in the option section below. If you encounter a 403 error with the colab proxy, using ngrok can sometimes help to work around it.\n", + "# @markdown https://dashboard.ngrok.com/\n", + "\n", "\n", "# @markdown ### Options:\n", "ClearConsole = True # @param {type:\"boolean\"}\n", "Play_Notification = True # @param {type:\"boolean\"}\n", + "NgrokToken = \"\" # @param {type:\"string\"}\n", "\n", "PORT=8003\n", + "NGROK_URL_FILE = \"ngrok_url.txt\"\n", "\n", - "LOG_FILE = f\"/content/LOG_FILE_{PORT}\"\n", + "LOG_FILE = f\"/content/LOG_FILE_{PORT}.log\"\n", "\n", "from IPython.display import Audio, display\n", "def play_notification_sound(url):\n", @@ -214,8 +219,7 @@ "\n", "if mode == \"elf\":\n", " # !LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true &\n", - "\n", - " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true >{LOG_FILE} 2>&1 &')\n", + " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true --https False --ngrok_token {NgrokToken} --ngrok_proxy_url_file {NGROK_URL_FILE} >{LOG_FILE} 2>&1 &')\n", "elif mode == \"zip\":\n", " !LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./main cui --port {PORT} --no_cui true &\n", "\n", @@ -251,7 +255,11 @@ " print(\"--------- SERVER READY! ---------\")\n", " print(f\"Your server is available. elapsed: {elapsed_time}sec\")\n", " proxy = eval_js( \"google.colab.kernel.proxyPort(\" + str(PORT) + \")\" )\n", - " print(f\"{proxy}\")\n", + " print(f\"colab proxy: {proxy}\")\n", + " if NgrokToken != \"\":\n", + " with open(NGROK_URL_FILE, \"r\") as f:\n", + " ngrok_url = f.read().strip()\n", + " print(f\"Ngrok URL: {ngrok_url}\")\n", " print(\"---------------------------------\")\n", " if Play_Notification==True:\n", " play_notification_sound('https://huggingface.co/wok000/voices/resolve/main/vcclient001_vctk229_gpt-sovits_vcclient-ready.wav')\n", @@ -279,7 +287,7 @@ "colab": { "provenance": [], "gpuType": "T4", - "authorship_tag": "ABX9TyPpbFBssircyl/qT8uDX2zy", + "authorship_tag": "ABX9TyOQ2VZFXCzGQTAvv2SwjJz1", "include_colab_link": true }, "kernelspec": { From 0fbcd52729db50d39a492849c60210b07d9979dd Mon Sep 17 00:00:00 2001 From: w-okada <48346627+w-okada@users.noreply.github.com> Date: Tue, 20 Aug 2024 09:51:22 +0900 Subject: [PATCH 2/3] =?UTF-8?q?Colab=20=E3=82=92=E4=BD=BF=E7=94=A8?= =?UTF-8?q?=E3=81=97=E3=81=A6=E4=BD=9C=E6=88=90=E3=81=95=E3=82=8C=E3=81=BE?= =?UTF-8?q?=E3=81=97=E3=81=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- w_okada's_Voice_Changer_version_2_x.ipynb | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/w_okada's_Voice_Changer_version_2_x.ipynb b/w_okada's_Voice_Changer_version_2_x.ipynb index e3cd806e..6d5031c1 100644 --- a/w_okada's_Voice_Changer_version_2_x.ipynb +++ b/w_okada's_Voice_Changer_version_2_x.ipynb @@ -203,7 +203,7 @@ "# @markdown ### Options:\n", "ClearConsole = True # @param {type:\"boolean\"}\n", "Play_Notification = True # @param {type:\"boolean\"}\n", - "NgrokToken = \"\" # @param {type:\"string\"}\n", + "NgrokToken = \"2iIApCAGO7wODHEUtZPDjbFq2bT_5H9VfYRx8oQBaXUT7EEH9\" # @param {type:\"string\"}\n", "\n", "PORT=8003\n", "NGROK_URL_FILE = \"ngrok_url.txt\"\n", @@ -217,9 +217,14 @@ "from google.colab.output import eval_js\n", "\n", "\n", + "\n", + "\n", "if mode == \"elf\":\n", " # !LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true &\n", - " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true --https False --ngrok_token {NgrokToken} --ngrok_proxy_url_file {NGROK_URL_FILE} >{LOG_FILE} 2>&1 &')\n", + " if NgrokToken ==\"\":\n", + " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true --https False >{LOG_FILE} 2>&1 &')\n", + " else:\n", + " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true --https False --ngrok_token {NgrokToken} --ngrok_proxy_url_file {NGROK_URL_FILE} >{LOG_FILE} 2>&1 &')\n", "elif mode == \"zip\":\n", " !LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./main cui --port {PORT} --no_cui true &\n", "\n", @@ -266,6 +271,15 @@ "wait_for_server()\n" ] }, + { + "cell_type": "code", + "source": [], + "metadata": { + "id": "CRZALk96MxuH" + }, + "execution_count": null, + "outputs": [] + }, { "cell_type": "code", "source": [ @@ -287,7 +301,7 @@ "colab": { "provenance": [], "gpuType": "T4", - "authorship_tag": "ABX9TyOQ2VZFXCzGQTAvv2SwjJz1", + "authorship_tag": "ABX9TyM285GMdZ0Pq8hmQU8sArod", "include_colab_link": true }, "kernelspec": { From a0861a1b305e935a5a42d00dd61433f14533063e Mon Sep 17 00:00:00 2001 From: w-okada <48346627+w-okada@users.noreply.github.com> Date: Tue, 20 Aug 2024 09:57:49 +0900 Subject: [PATCH 3/3] =?UTF-8?q?Colab=20=E3=82=92=E4=BD=BF=E7=94=A8?= =?UTF-8?q?=E3=81=97=E3=81=A6=E4=BD=9C=E6=88=90=E3=81=95=E3=82=8C=E3=81=BE?= =?UTF-8?q?=E3=81=97=E3=81=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- w_okada's_Voice_Changer_version_2_x.ipynb | 38 ++++++++++++++++++++--- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/w_okada's_Voice_Changer_version_2_x.ipynb b/w_okada's_Voice_Changer_version_2_x.ipynb index 6d5031c1..ab6779a3 100644 --- a/w_okada's_Voice_Changer_version_2_x.ipynb +++ b/w_okada's_Voice_Changer_version_2_x.ipynb @@ -52,12 +52,40 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { "id": "W2GYWTHWmRIY", - "cellView": "form" + "cellView": "form", + "outputId": "ab23f187-546e-4156-813d-12e2f178d23a", + "colab": { + "base_uri": "https://localhost:8080/" + } }, - "outputs": [], + "outputs": [ + { + "metadata": { + "tags": null + }, + "name": "stdout", + "output_type": "stream", + "text": [ + "GPU is available\n", + "GPU Name: Tesla T4\n", + "Welcome to ColabMod\n", + "Downloading the latest vcclient... \n", + " % Total % Received % Xferd Average Speed Time Time Time Current\n", + " Dload Upload Total Spent Left Speed\n", + "100 1163 100 1163 0 0 2654 0 --:--:-- --:--:-- --:--:-- 2649\n", + "Warning: Failed to create the file /content/vcclient_latest_for_colab: Text \n", + "Warning: file busy\n", + " 0 2965M 0 15848 0 0 16360 0 52:47:53 --:--:-- 52:47:53 16360\n", + "curl: (23) Failure writing output to destination\n", + "Download is done.\n", + "/content\n", + "Installing modules... Install is done.\n" + ] + } + ], "source": [ "#=================Updated=================\n", "# @title **[1]** Clone repository and install dependencies\n", @@ -203,7 +231,7 @@ "# @markdown ### Options:\n", "ClearConsole = True # @param {type:\"boolean\"}\n", "Play_Notification = True # @param {type:\"boolean\"}\n", - "NgrokToken = \"2iIApCAGO7wODHEUtZPDjbFq2bT_5H9VfYRx8oQBaXUT7EEH9\" # @param {type:\"string\"}\n", + "NgrokToken = \"\" # @param {type:\"string\"}\n", "\n", "PORT=8003\n", "NGROK_URL_FILE = \"ngrok_url.txt\"\n", @@ -301,7 +329,7 @@ "colab": { "provenance": [], "gpuType": "T4", - "authorship_tag": "ABX9TyM285GMdZ0Pq8hmQU8sArod", + "authorship_tag": "ABX9TyMWd/nQ6LnQpMDqF9MqEpsm", "include_colab_link": true }, "kernelspec": {