WIP: store stting 3

This commit is contained in:
wataru 2023-01-29 14:41:44 +09:00
parent dd7036cd08
commit ac967fdea1
9 changed files with 118 additions and 46 deletions

File diff suppressed because one or more lines are too long

View File

@ -9,7 +9,7 @@
"version": "1.0.0", "version": "1.0.0",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@dannadori/voice-changer-client-js": "^1.0.57", "@dannadori/voice-changer-client-js": "^1.0.58",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0" "react-dom": "^18.2.0"
}, },
@ -3211,9 +3211,9 @@
} }
}, },
"node_modules/@dannadori/voice-changer-client-js": { "node_modules/@dannadori/voice-changer-client-js": {
"version": "1.0.57", "version": "1.0.58",
"resolved": "https://registry.npmjs.org/@dannadori/voice-changer-client-js/-/voice-changer-client-js-1.0.57.tgz", "resolved": "https://registry.npmjs.org/@dannadori/voice-changer-client-js/-/voice-changer-client-js-1.0.58.tgz",
"integrity": "sha512-JJl4WedfJGZLMsvHFbUQiCi6HavkH7P5JSGQcSI4C8iAh4DmUON/0/R2STlhasxlMiqlFPVqHpqLK/tQTapU8g==", "integrity": "sha512-grdhyhYbAlJScIvYqNga9yD+sCexDL7WZ8oy3Jb6164lV3i+ceUQaSHMDEyhN/p4G73vcwp3QN4ROj1huCl28A==",
"dependencies": { "dependencies": {
"@types/readable-stream": "^2.3.15", "@types/readable-stream": "^2.3.15",
"amazon-chime-sdk-js": "^3.10.0", "amazon-chime-sdk-js": "^3.10.0",
@ -13296,9 +13296,9 @@
} }
}, },
"@dannadori/voice-changer-client-js": { "@dannadori/voice-changer-client-js": {
"version": "1.0.57", "version": "1.0.58",
"resolved": "https://registry.npmjs.org/@dannadori/voice-changer-client-js/-/voice-changer-client-js-1.0.57.tgz", "resolved": "https://registry.npmjs.org/@dannadori/voice-changer-client-js/-/voice-changer-client-js-1.0.58.tgz",
"integrity": "sha512-JJl4WedfJGZLMsvHFbUQiCi6HavkH7P5JSGQcSI4C8iAh4DmUON/0/R2STlhasxlMiqlFPVqHpqLK/tQTapU8g==", "integrity": "sha512-grdhyhYbAlJScIvYqNga9yD+sCexDL7WZ8oy3Jb6164lV3i+ceUQaSHMDEyhN/p4G73vcwp3QN4ROj1huCl28A==",
"requires": { "requires": {
"@types/readable-stream": "^2.3.15", "@types/readable-stream": "^2.3.15",
"amazon-chime-sdk-js": "^3.10.0", "amazon-chime-sdk-js": "^3.10.0",

View File

@ -48,7 +48,7 @@
"webpack-dev-server": "^4.11.1" "webpack-dev-server": "^4.11.1"
}, },
"dependencies": { "dependencies": {
"@dannadori/voice-changer-client-js": "^1.0.57", "@dannadori/voice-changer-client-js": "^1.0.58",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0" "react-dom": "^18.2.0"
} }

View File

@ -22,7 +22,10 @@ export const useServerSettingArea = (props: UseServerSettingProps): ServerSettin
} }
props.clientState.serverSetting.setFileUploadSetting({ props.clientState.serverSetting.setFileUploadSetting({
...props.clientState.serverSetting.fileUploadSetting, ...props.clientState.serverSetting.fileUploadSetting,
pyTorchModel: file pyTorchModel: {
data: await file.arrayBuffer(),
filename: file.name
}
}) })
} }
const onPyTorchFileClearClicked = () => { const onPyTorchFileClearClicked = () => {
@ -39,7 +42,10 @@ export const useServerSettingArea = (props: UseServerSettingProps): ServerSettin
} }
props.clientState.serverSetting.setFileUploadSetting({ props.clientState.serverSetting.setFileUploadSetting({
...props.clientState.serverSetting.fileUploadSetting, ...props.clientState.serverSetting.fileUploadSetting,
configFile: file configFile: {
data: await file.arrayBuffer(),
filename: file.name
}
}) })
} }
const onConfigFileClearClicked = () => { const onConfigFileClearClicked = () => {
@ -56,7 +62,10 @@ export const useServerSettingArea = (props: UseServerSettingProps): ServerSettin
} }
props.clientState.serverSetting.setFileUploadSetting({ props.clientState.serverSetting.setFileUploadSetting({
...props.clientState.serverSetting.fileUploadSetting, ...props.clientState.serverSetting.fileUploadSetting,
onnxModel: file onnxModel: {
data: await file.arrayBuffer(),
filename: file.name
}
}) })
} }
const onOnnxFileClearClicked = () => { const onOnnxFileClearClicked = () => {
@ -91,7 +100,7 @@ export const useServerSettingArea = (props: UseServerSettingProps): ServerSettin
<div className="body-row split-3-3-4 left-padding-1 guided"> <div className="body-row split-3-3-4 left-padding-1 guided">
<div className="body-item-title left-padding-2">Config(.json)</div> <div className="body-item-title left-padding-2">Config(.json)</div>
<div className="body-item-text"> <div className="body-item-text">
<div>{props.clientState.serverSetting.fileUploadSetting.configFile?.name}</div> <div>{props.clientState.serverSetting.fileUploadSetting.configFile?.filename}</div>
</div> </div>
<div className="body-button-container"> <div className="body-button-container">
<div className="body-button" onClick={onConfigFileLoadClicked}>select</div> <div className="body-button" onClick={onConfigFileLoadClicked}>select</div>
@ -101,7 +110,7 @@ export const useServerSettingArea = (props: UseServerSettingProps): ServerSettin
<div className="body-row split-3-3-4 left-padding-1 guided"> <div className="body-row split-3-3-4 left-padding-1 guided">
<div className="body-item-title left-padding-2">Onnx(.onnx)</div> <div className="body-item-title left-padding-2">Onnx(.onnx)</div>
<div className="body-item-text"> <div className="body-item-text">
<div>{props.clientState.serverSetting.fileUploadSetting.onnxModel?.name}</div> <div>{props.clientState.serverSetting.fileUploadSetting.onnxModel?.filename}</div>
</div> </div>
<div className="body-button-container"> <div className="body-button-container">
<div className="body-button" onClick={onOnnxFileLoadClicked}>select</div> <div className="body-button" onClick={onOnnxFileLoadClicked}>select</div>
@ -113,7 +122,7 @@ export const useServerSettingArea = (props: UseServerSettingProps): ServerSettin
<div className="body-row split-3-3-4 left-padding-1 guided"> <div className="body-row split-3-3-4 left-padding-1 guided">
<div className="body-item-title left-padding-2">PyTorch(.pth)</div> <div className="body-item-title left-padding-2">PyTorch(.pth)</div>
<div className="body-item-text"> <div className="body-item-text">
<div>{props.clientState.serverSetting.fileUploadSetting.pyTorchModel?.name}</div> <div>{props.clientState.serverSetting.fileUploadSetting.pyTorchModel?.filename}</div>
</div> </div>
<div className="body-button-container"> <div className="body-button-container">
<div className="body-button" onClick={onPyTorchFileLoadClicked}>select</div> <div className="body-button" onClick={onPyTorchFileLoadClicked}>select</div>

View File

@ -1,12 +1,12 @@
{ {
"name": "@dannadori/voice-changer-client-js", "name": "@dannadori/voice-changer-client-js",
"version": "1.0.57", "version": "1.0.58",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@dannadori/voice-changer-client-js", "name": "@dannadori/voice-changer-client-js",
"version": "1.0.57", "version": "1.0.58",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@types/readable-stream": "^2.3.15", "@types/readable-stream": "^2.3.15",

View File

@ -1,6 +1,6 @@
{ {
"name": "@dannadori/voice-changer-client-js", "name": "@dannadori/voice-changer-client-js",
"version": "1.0.57", "version": "1.0.58",
"description": "", "description": "",
"main": "dist/index.js", "main": "dist/index.js",
"directories": { "directories": {

View File

@ -3,7 +3,7 @@ import { ServerInfo, ServerSettingKey } from "./const";
type FileChunk = { type FileChunk = {
hash: number, hash: number,
chunk: Blob chunk: ArrayBuffer
} }
export class ServerConfigurator { export class ServerConfigurator {
private serverUrl = "" private serverUrl = ""
@ -42,16 +42,16 @@ export class ServerConfigurator {
return info return info
} }
uploadFile = async (file: File, onprogress: (progress: number, end: boolean) => void) => { uploadFile = async (buf: ArrayBuffer, filename: string, onprogress: (progress: number, end: boolean) => void) => {
const url = this.serverUrl + "/upload_file" const url = this.serverUrl + "/upload_file"
onprogress(0, false) onprogress(0, false)
const size = 1024 * 1024; const size = 1024 * 1024;
const fileChunks: FileChunk[] = []; const fileChunks: FileChunk[] = [];
let index = 0; // index値 let index = 0; // index値
for (let cur = 0; cur < file.size; cur += size) { for (let cur = 0; cur < buf.byteLength; cur += size) {
fileChunks.push({ fileChunks.push({
hash: index++, hash: index++,
chunk: file.slice(cur, cur + size), chunk: buf.slice(cur, cur + size),
}); });
} }
@ -68,8 +68,8 @@ export class ServerConfigurator {
} }
const p = new Promise<void>((resolve) => { const p = new Promise<void>((resolve) => {
const formData = new FormData(); const formData = new FormData();
formData.append("file", chunk.chunk); formData.append("file", new Blob([chunk.chunk]));
formData.append("filename", `${file.name}_${chunk.hash}`); formData.append("filename", `${filename}_${chunk.hash}`);
const request = new Request(url, { const request = new Request(url, {
method: 'POST', method: 'POST',
body: formData, body: formData,
@ -91,11 +91,11 @@ export class ServerConfigurator {
return chunkNum return chunkNum
} }
concatUploadedFile = async (file: File, chunkNum: number) => { concatUploadedFile = async (filename: string, chunkNum: number) => {
const url = this.serverUrl + "/concat_uploaded_file" const url = this.serverUrl + "/concat_uploaded_file"
await new Promise<void>((resolve) => { await new Promise<void>((resolve) => {
const formData = new FormData(); const formData = new FormData();
formData.append("filename", file.name); formData.append("filename", filename);
formData.append("filenameChunkNum", "" + chunkNum); formData.append("filenameChunkNum", "" + chunkNum);
const request = new Request(url, { const request = new Request(url, {
method: 'POST', method: 'POST',
@ -108,13 +108,13 @@ export class ServerConfigurator {
}) })
} }
loadModel = async (configFile: File, pyTorchModelFile: File | null, onnxModelFile: File | null) => { loadModel = async (configFilename: string, pyTorchModelFilename: string | null, onnxModelFilename: string | null) => {
const url = this.serverUrl + "/load_model" const url = this.serverUrl + "/load_model"
const info = new Promise<ServerInfo>(async (resolve) => { const info = new Promise<ServerInfo>(async (resolve) => {
const formData = new FormData(); const formData = new FormData();
formData.append("pyTorchModelFilename", pyTorchModelFile?.name || "-"); formData.append("pyTorchModelFilename", pyTorchModelFilename || "-");
formData.append("onnxModelFilename", onnxModelFile?.name || "-"); formData.append("onnxModelFilename", onnxModelFilename || "-");
formData.append("configFilename", configFile.name); formData.append("configFilename", configFilename);
const request = new Request(url, { const request = new Request(url, {
method: 'POST', method: 'POST',
body: formData, body: formData,

View File

@ -236,14 +236,14 @@ export class VoiceChangerClient {
} }
// Configurator Method // Configurator Method
uploadFile = (file: File, onprogress: (progress: number, end: boolean) => void) => { uploadFile = (buf: ArrayBuffer, filename: string, onprogress: (progress: number, end: boolean) => void) => {
return this.configurator.uploadFile(file, onprogress) return this.configurator.uploadFile(buf, filename, onprogress)
} }
concatUploadedFile = (file: File, chunkNum: number) => { concatUploadedFile = (filename: string, chunkNum: number) => {
return this.configurator.concatUploadedFile(file, chunkNum) return this.configurator.concatUploadedFile(filename, chunkNum)
} }
loadModel = (configFile: File, pyTorchModelFile: File | null, onnxModelFile: File | null) => { loadModel = (configFilename: string, pyTorchModelFilename: string | null, onnxModelFilename: string | null) => {
return this.configurator.loadModel(configFile, pyTorchModelFile, onnxModelFile) return this.configurator.loadModel(configFilename, pyTorchModelFilename, onnxModelFilename)
} }
updateServerSettings = (key: ServerSettingKey, val: string) => { updateServerSettings = (key: ServerSettingKey, val: string) => {
return this.configurator.updateSettings(key, val) return this.configurator.updateSettings(key, val)

View File

@ -4,16 +4,30 @@ import { VoiceChangerClient } from "../VoiceChangerClient"
import { useIndexedDB } from "./useIndexedDB" import { useIndexedDB } from "./useIndexedDB"
export type FileUploadSetting = { // export type FileUploadSetting = {
pyTorchModel: File | null // pyTorchModel: File | null
configFile: File | null // configFile: File | null
onnxModel: File | null // onnxModel: File | null
// }
type ModelData = {
data: ArrayBuffer
filename: string
} }
export type FileUploadSetting = {
pyTorchModel: ModelData | null
onnxModel: ModelData | null
configFile: ModelData | null
}
const InitialFileUploadSetting: FileUploadSetting = { const InitialFileUploadSetting: FileUploadSetting = {
pyTorchModel: null, pyTorchModel: null,
configFile: null, configFile: null,
onnxModel: null, onnxModel: null,
} }
export type UseServerSettingProps = { export type UseServerSettingProps = {
voiceChangerClient: VoiceChangerClient | null voiceChangerClient: VoiceChangerClient | null
} }
@ -185,10 +199,10 @@ export const useServerSetting = (props: UseServerSettingProps): ServerSettingSta
// (e) モデルアップロード // (e) モデルアップロード
const _uploadFile = useMemo(() => { const _uploadFile = useMemo(() => {
return async (file: File, onprogress: (progress: number, end: boolean) => void) => { return async (modelData: ModelData, onprogress: (progress: number, end: boolean) => void) => {
if (!props.voiceChangerClient) return if (!props.voiceChangerClient) return
const num = await props.voiceChangerClient.uploadFile(file, onprogress) const num = await props.voiceChangerClient.uploadFile(modelData.data, modelData.filename, onprogress)
const res = await props.voiceChangerClient.concatUploadedFile(file, num) const res = await props.voiceChangerClient.concatUploadedFile(modelData.filename, num)
console.log("uploaded", num, res) console.log("uploaded", num, res)
} }
}, [props.voiceChangerClient]) }, [props.voiceChangerClient])
@ -203,9 +217,12 @@ export const useServerSetting = (props: UseServerSettingProps): ServerSettingSta
return return
} }
if (!props.voiceChangerClient) return if (!props.voiceChangerClient) return
setUploadProgress(0) setUploadProgress(0)
setIsUploading(true) setIsUploading(true)
const models = [fileUploadSetting.pyTorchModel, fileUploadSetting.onnxModel].filter(x => { return x != null }) as File[]
const models = [fileUploadSetting.onnxModel, fileUploadSetting.pyTorchModel].filter(x => { return x != null }) as ModelData[]
for (let i = 0; i < models.length; i++) { for (let i = 0; i < models.length; i++) {
const progRate = 1 / models.length const progRate = 1 / models.length
const progOffset = 100 * i * progRate const progOffset = 100 * i * progRate
@ -219,13 +236,59 @@ export const useServerSetting = (props: UseServerSettingProps): ServerSettingSta
console.log(progress, end) console.log(progress, end)
}) })
await props.voiceChangerClient.loadModel(fileUploadSetting.configFile, fileUploadSetting.pyTorchModel, fileUploadSetting.onnxModel) await props.voiceChangerClient.loadModel(fileUploadSetting.configFile.filename, fileUploadSetting.pyTorchModel?.filename || null, fileUploadSetting.onnxModel?.filename || null)
setUploadProgress(0) setUploadProgress(0)
setIsUploading(false) setIsUploading(false)
reloadServerInfo() reloadServerInfo()
} }
}, [fileUploadSetting, props.voiceChangerClient]) }, [fileUploadSetting, props.voiceChangerClient])
// const _uploadFile = useMemo(() => {
// return async (file: File, onprogress: (progress: number, end: boolean) => void) => {
// if (!props.voiceChangerClient) return
// const num = await props.voiceChangerClient.uploadFile(file, onprogress)
// const res = await props.voiceChangerClient.concatUploadedFile(file, num)
// console.log("uploaded", num, res)
// }
// }, [props.voiceChangerClient])
// const loadModel = useMemo(() => {
// return async () => {
// if (!fileUploadSetting.pyTorchModel && !fileUploadSetting.onnxModel) {
// alert("PyTorchモデルとONNXモデルのどちらか一つ以上指定する必要があります。")
// return
// }
// if (!fileUploadSetting.configFile) {
// alert("Configファイルを指定する必要があります。")
// return
// }
// if (!props.voiceChangerClient) return
// setUploadProgress(0)
// setIsUploading(true)
// const models = [fileUploadSetting.pyTorchModel, fileUploadSetting.onnxModel].filter(x => { return x != null }) as File[]
// for (let i = 0; i < models.length; i++) {
// const progRate = 1 / models.length
// const progOffset = 100 * i * progRate
// await _uploadFile(models[i], (progress: number, _end: boolean) => {
// // console.log(progress * progRate + progOffset, end, progRate,)
// setUploadProgress(progress * progRate + progOffset)
// })
// }
// await _uploadFile(fileUploadSetting.configFile, (progress: number, end: boolean) => {
// console.log(progress, end)
// })
// await props.voiceChangerClient.loadModel(fileUploadSetting.configFile, fileUploadSetting.pyTorchModel, fileUploadSetting.onnxModel)
// setUploadProgress(0)
// setIsUploading(false)
// reloadServerInfo()
// }
// }, [fileUploadSetting, props.voiceChangerClient])
const reloadServerInfo = useMemo(() => { const reloadServerInfo = useMemo(() => {
return async () => { return async () => {
if (!props.voiceChangerClient) return if (!props.voiceChangerClient) return