diff --git a/w_okada's_Voice_Changer_version_2_x.ipynb b/w_okada's_Voice_Changer_version_2_x.ipynb index e6bfe8d3..e3cd806e 100644 --- a/w_okada's_Voice_Changer_version_2_x.ipynb +++ b/w_okada's_Voice_Changer_version_2_x.ipynb @@ -196,14 +196,19 @@ "# @title **[2]** Start server\n", "# @markdown This cell will start the server, the first time that you run it will download the models, so it can take a while (2~4 minutes)\n", "\n", + "# @markdown If you want to use ngrok, please input your token in the option section below. If you encounter a 403 error with the colab proxy, using ngrok can sometimes help to work around it.\n", + "# @markdown https://dashboard.ngrok.com/\n", + "\n", "\n", "# @markdown ### Options:\n", "ClearConsole = True # @param {type:\"boolean\"}\n", "Play_Notification = True # @param {type:\"boolean\"}\n", + "NgrokToken = \"\" # @param {type:\"string\"}\n", "\n", "PORT=8003\n", + "NGROK_URL_FILE = \"ngrok_url.txt\"\n", "\n", - "LOG_FILE = f\"/content/LOG_FILE_{PORT}\"\n", + "LOG_FILE = f\"/content/LOG_FILE_{PORT}.log\"\n", "\n", "from IPython.display import Audio, display\n", "def play_notification_sound(url):\n", @@ -214,8 +219,7 @@ "\n", "if mode == \"elf\":\n", " # !LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true &\n", - "\n", - " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true >{LOG_FILE} 2>&1 &')\n", + " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true --https False --ngrok_token {NgrokToken} --ngrok_proxy_url_file {NGROK_URL_FILE} >{LOG_FILE} 2>&1 &')\n", "elif mode == \"zip\":\n", " !LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./main cui --port {PORT} --no_cui true &\n", "\n", @@ -251,7 +255,11 @@ " print(\"--------- SERVER READY! ---------\")\n", " print(f\"Your server is available. elapsed: {elapsed_time}sec\")\n", " proxy = eval_js( \"google.colab.kernel.proxyPort(\" + str(PORT) + \")\" )\n", - " print(f\"{proxy}\")\n", + " print(f\"colab proxy: {proxy}\")\n", + " if NgrokToken != \"\":\n", + " with open(NGROK_URL_FILE, \"r\") as f:\n", + " ngrok_url = f.read().strip()\n", + " print(f\"Ngrok URL: {ngrok_url}\")\n", " print(\"---------------------------------\")\n", " if Play_Notification==True:\n", " play_notification_sound('https://huggingface.co/wok000/voices/resolve/main/vcclient001_vctk229_gpt-sovits_vcclient-ready.wav')\n", @@ -279,7 +287,7 @@ "colab": { "provenance": [], "gpuType": "T4", - "authorship_tag": "ABX9TyPpbFBssircyl/qT8uDX2zy", + "authorship_tag": "ABX9TyOQ2VZFXCzGQTAvv2SwjJz1", "include_colab_link": true }, "kernelspec": {