mirror of
https://github.com/neodyland/sbv2-api.git
synced 2025-12-23 07:59:56 +00:00
Compare commits
75 Commits
commit-b83
...
commit-25c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
84e9118d99 | ||
|
|
3050cc1e99 | ||
|
|
d5fcacd799 | ||
|
|
25ca89e341 | ||
|
|
0c2a397775 | ||
|
|
470a0348fe | ||
|
|
9a99b88b00 | ||
|
|
29f39f0795 | ||
|
|
9f22694df0 | ||
|
|
62ba2c802f | ||
|
|
4f5b936f6f | ||
|
|
3c8efc716c | ||
|
|
e9ced32b70 | ||
|
|
e7a1575cbc | ||
|
|
873bbb77b6 | ||
|
|
1725863fca | ||
|
|
55f05580e4 | ||
|
|
320664eae2 | ||
|
|
87903827fa | ||
|
|
9b8e9dc39d | ||
|
|
bbc38081b6 | ||
|
|
0b822f704a | ||
|
|
132eb6386d | ||
|
|
ee56e9591d | ||
|
|
3194e599b2 | ||
|
|
00f4787f6e | ||
|
|
4b6c72aa51 | ||
|
|
7db6bb67a4 | ||
|
|
b3c75f973e | ||
|
|
e9529be559 | ||
|
|
a6694b5d81 | ||
|
|
096859de66 | ||
|
|
dabdc6712f | ||
|
|
45c3255a91 | ||
|
|
bf39890b3d | ||
|
|
120bc608d7 | ||
|
|
2fc547e38b | ||
|
|
98ddaa3c58 | ||
|
|
656e405cd7 | ||
|
|
9d6aa46fdf | ||
|
|
2fe90c6ede | ||
|
|
7faba2447b | ||
|
|
02ac0885e0 | ||
|
|
1f96b09f3b | ||
|
|
d583c1ca1c | ||
|
|
c135aac852 | ||
|
|
f31fa1d4f9 | ||
|
|
efec7cce14 | ||
|
|
61914129dc | ||
|
|
97c63a2e23 | ||
|
|
3475f47305 | ||
|
|
5493b91a84 | ||
|
|
bca6d04e7b | ||
|
|
d44ebe873e | ||
|
|
96b53d42cd | ||
|
|
9765ef51d2 | ||
|
|
655be55605 | ||
|
|
e68f58d698 | ||
|
|
2124fe4650 | ||
|
|
0217c0a4d5 | ||
|
|
1de09597f5 | ||
|
|
38d86c9249 | ||
|
|
ddc132b27b | ||
|
|
558cd24677 | ||
|
|
6657b06786 | ||
|
|
2a8c9bafde | ||
|
|
d7065ac6eb | ||
|
|
0b1dbe4991 | ||
|
|
1ad588bfcf | ||
|
|
9733ba95fa | ||
|
|
843c16995c | ||
|
|
f0821ea957 | ||
|
|
abc9cec7c7 | ||
|
|
19e6b7f0e6 | ||
|
|
451f4497b6 |
15
.github/dependabot.yml
vendored
Normal file
15
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# To get started with Dependabot version updates, you'll need to specify which
|
||||||
|
# package ecosystems to update and where the package manifests are located.
|
||||||
|
# Please see the documentation for all configuration options:
|
||||||
|
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "cargo" # See documentation for possible values
|
||||||
|
directory: "/" # Location of package manifests
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "npm" # See documentation for possible values
|
||||||
|
directory: "/" # Location of package manifests
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
name: CI
|
name: Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -6,7 +6,6 @@ on:
|
|||||||
- main
|
- main
|
||||||
tags:
|
tags:
|
||||||
- '*'
|
- '*'
|
||||||
pull_request:
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
@@ -29,7 +28,7 @@ jobs:
|
|||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: 3.x
|
python-version: 3.x
|
||||||
- run: docker build . -f .github/workflows/CI.Dockerfile --tag ci
|
- run: docker build . -f .github/workflows/build.Dockerfile --tag ci
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
@@ -111,6 +110,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: wheels-sdist
|
name: wheels-sdist
|
||||||
path: ./crates/sbv2_bindings/dist
|
path: ./crates/sbv2_bindings/dist
|
||||||
|
|
||||||
python-wheel:
|
python-wheel:
|
||||||
name: Wheel Upload
|
name: Wheel Upload
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
26
.github/workflows/lint.yml
vendored
Normal file
26
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
name: Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
components:
|
||||||
|
- rustfmt
|
||||||
|
- clippy
|
||||||
|
steps:
|
||||||
|
- name: Setup
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
components: ${{ matrix.components }}
|
||||||
|
- name: Format
|
||||||
|
if: ${{ matrix.components == 'rustfmt' }}
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
- name: Lint
|
||||||
|
if: ${{ matrix.components == 'clippy' }}
|
||||||
|
run: cargo clippy --all-targets --all-features -- -D warnings
|
||||||
927
Cargo.lock
generated
927
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "3"
|
||||||
members = ["./crates/sbv2_api", "./crates/sbv2_core", "./crates/sbv2_bindings", "./crates/sbv2_wasm"]
|
members = ["./crates/sbv2_api", "./crates/sbv2_core", "./crates/sbv2_bindings", "./crates/sbv2_wasm"]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
@@ -8,11 +8,11 @@ edition = "2021"
|
|||||||
description = "Style-Bert-VITSの推論ライブラリ"
|
description = "Style-Bert-VITSの推論ライブラリ"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
readme = "./README.md"
|
readme = "./README.md"
|
||||||
repository = "https://github.com/tuna2134/sbv2-api"
|
repository = "https://github.com/neodyland/sbv2-api"
|
||||||
documentation = "https://docs.rs/sbv2_core"
|
documentation = "https://docs.rs/sbv2_core"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
anyhow = "1.0.96"
|
anyhow = "1.0.99"
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
env_logger = "0.11.6"
|
env_logger = "0.11.6"
|
||||||
ndarray = "0.16.1"
|
ndarray = "0.16.1"
|
||||||
|
|||||||
1
LICENSE
1
LICENSE
@@ -1,6 +1,7 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2024 tuna2134
|
Copyright (c) 2024 tuna2134
|
||||||
|
Copyright (c) 2025- neodyland
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -17,7 +17,7 @@
|
|||||||
|
|
||||||
## プログラミングに詳しくない方向け
|
## プログラミングに詳しくない方向け
|
||||||
|
|
||||||
[こちら](https://github.com/tuna2134/sbv2-gui?tab=readme-ov-file)を参照してください。
|
[こちら](https://github.com/tuna2134/sbv2-gui)を参照してください。
|
||||||
|
|
||||||
コマンドやpythonの知識なしで簡単に使えるバージョンです。(できることはほぼ同じ)
|
コマンドやpythonの知識なしで簡単に使えるバージョンです。(できることはほぼ同じ)
|
||||||
|
|
||||||
@@ -75,7 +75,7 @@ CPUの場合は
|
|||||||
```sh
|
```sh
|
||||||
docker run -it --rm -p 3000:3000 --name sbv2 \
|
docker run -it --rm -p 3000:3000 --name sbv2 \
|
||||||
-v ./models:/work/models --env-file .env \
|
-v ./models:/work/models --env-file .env \
|
||||||
ghcr.io/tuna2134/sbv2-api:cpu
|
ghcr.io/neodyland/sbv2-api:cpu
|
||||||
```
|
```
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
@@ -90,7 +90,7 @@ CPUの場合は
|
|||||||
```bash
|
```bash
|
||||||
docker run --platform linux/amd64 -it --rm -p 3000:3000 --name sbv2 \
|
docker run --platform linux/amd64 -it --rm -p 3000:3000 --name sbv2 \
|
||||||
-v ./models:/work/models --env-file .env \
|
-v ./models:/work/models --env-file .env \
|
||||||
ghcr.io/tuna2134/sbv2-api:cpu
|
ghcr.io/neodyland/sbv2-api:cpu
|
||||||
```
|
```
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
@@ -99,7 +99,7 @@ CUDAの場合は
|
|||||||
docker run -it --rm -p 3000:3000 --name sbv2 \
|
docker run -it --rm -p 3000:3000 --name sbv2 \
|
||||||
-v ./models:/work/models --env-file .env \
|
-v ./models:/work/models --env-file .env \
|
||||||
--gpus all \
|
--gpus all \
|
||||||
ghcr.io/tuna2134/sbv2-api:cuda
|
ghcr.io/neodyland/sbv2-api:cuda
|
||||||
```
|
```
|
||||||
|
|
||||||
### 起動確認
|
### 起動確認
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ env_logger.workspace = true
|
|||||||
log = "0.4.22"
|
log = "0.4.22"
|
||||||
sbv2_core = { version = "0.2.0-alpha6", path = "../sbv2_core", features = ["aivmx"] }
|
sbv2_core = { version = "0.2.0-alpha6", path = "../sbv2_core", features = ["aivmx"] }
|
||||||
serde = { version = "1.0.210", features = ["derive"] }
|
serde = { version = "1.0.210", features = ["derive"] }
|
||||||
tokio = { version = "1.40.0", features = ["full"] }
|
tokio = { version = "1.47.1", features = ["full"] }
|
||||||
utoipa = { version = "5.0.0", features = ["axum_extras"] }
|
utoipa = { version = "5.4.0", features = ["axum_extras"] }
|
||||||
utoipa-scalar = { version = "0.3.0", features = ["axum"] }
|
utoipa-scalar = { version = "0.3.0", features = ["axum"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
|||||||
@@ -53,12 +53,16 @@ struct SynthesizeRequest {
|
|||||||
text: String,
|
text: String,
|
||||||
ident: String,
|
ident: String,
|
||||||
#[serde(default = "sdp_default")]
|
#[serde(default = "sdp_default")]
|
||||||
|
#[schema(example = 0.0_f32)]
|
||||||
sdp_ratio: f32,
|
sdp_ratio: f32,
|
||||||
#[serde(default = "length_default")]
|
#[serde(default = "length_default")]
|
||||||
|
#[schema(example = 1.0_f32)]
|
||||||
length_scale: f32,
|
length_scale: f32,
|
||||||
#[serde(default = "style_id_default")]
|
#[serde(default = "style_id_default")]
|
||||||
|
#[schema(example = 0_i32)]
|
||||||
style_id: i32,
|
style_id: i32,
|
||||||
#[serde(default = "speaker_id_default")]
|
#[serde(default = "speaker_id_default")]
|
||||||
|
#[schema(example = 0_i64)]
|
||||||
speaker_id: i64,
|
speaker_id: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ crate-type = ["cdylib"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
ndarray.workspace = true
|
ndarray.workspace = true
|
||||||
pyo3 = { version = "0.24.0", features = ["anyhow"] }
|
pyo3 = { version = "0.25.1", features = ["anyhow"] }
|
||||||
sbv2_core = { path = "../sbv2_core", features = ["std"], default-features = false }
|
sbv2_core = { path = "../sbv2_core", features = ["std"], default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
|||||||
@@ -136,6 +136,7 @@ impl TTSModel {
|
|||||||
/// -------
|
/// -------
|
||||||
/// voice_data : bytes
|
/// voice_data : bytes
|
||||||
/// 音声データ
|
/// 音声データ
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn synthesize<'p>(
|
fn synthesize<'p>(
|
||||||
&'p mut self,
|
&'p mut self,
|
||||||
py: Python<'p>,
|
py: Python<'p>,
|
||||||
|
|||||||
@@ -16,16 +16,16 @@ env_logger.workspace = true
|
|||||||
hound = "3.5.1"
|
hound = "3.5.1"
|
||||||
jpreprocess = { version = "0.12.0", features = ["naist-jdic"] }
|
jpreprocess = { version = "0.12.0", features = ["naist-jdic"] }
|
||||||
ndarray.workspace = true
|
ndarray.workspace = true
|
||||||
npyz = { version = "0.8.3", optional = true }
|
npyz = { version = "0.8.4", optional = true }
|
||||||
num_cpus = "1.16.0"
|
num_cpus = "1.17.0"
|
||||||
once_cell.workspace = true
|
once_cell.workspace = true
|
||||||
ort = { git = "https://github.com/pykeio/ort.git", version = "2.0.0-rc.9", optional = true }
|
ort = { git = "https://github.com/pykeio/ort.git", version = "2.0.0-rc.9", optional = true }
|
||||||
regex = "1.10.6"
|
regex = "1.10.6"
|
||||||
serde = { version = "1.0.210", features = ["derive"] }
|
serde = { version = "1.0.210", features = ["derive"] }
|
||||||
serde_json = "1.0.128"
|
serde_json = "1.0.142"
|
||||||
tar = "0.4.41"
|
tar = "0.4.41"
|
||||||
thiserror = "2.0.11"
|
thiserror = "2.0.15"
|
||||||
tokenizers = { version = "0.21.0", default-features = false }
|
tokenizers = { version = "0.21.4", default-features = false }
|
||||||
zstd = "0.13.2"
|
zstd = "0.13.2"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
@@ -44,4 +44,4 @@ base64 = ["dep:base64"]
|
|||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
dirs = "6.0.0"
|
dirs = "6.0.0"
|
||||||
ureq = "3.0.6"
|
ureq = "3.1.0"
|
||||||
|
|||||||
@@ -5,21 +5,27 @@ use std::io::copy;
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let static_path = home_dir().unwrap().join(".cache/sbv2/all.bin");
|
let static_dir = home_dir().unwrap().join(".cache/sbv2");
|
||||||
|
let static_path = static_dir.join("all.bin");
|
||||||
let out_path = PathBuf::from(&env::var("OUT_DIR").unwrap()).join("all.bin");
|
let out_path = PathBuf::from(&env::var("OUT_DIR").unwrap()).join("all.bin");
|
||||||
println!("cargo:rerun-if-changed=build.rs");
|
println!("cargo:rerun-if-changed=build.rs");
|
||||||
if static_path.exists() {
|
if static_path.exists() {
|
||||||
if fs::hard_link(&static_path, &out_path).is_err() {
|
println!("cargo:info=Dictionary file already exists, skipping download.");
|
||||||
fs::copy(static_path, out_path).unwrap();
|
|
||||||
};
|
|
||||||
} else {
|
} else {
|
||||||
println!("cargo:warning=Downloading dictionary file...");
|
println!("cargo:warning=Downloading dictionary file...");
|
||||||
let mut response =
|
let mut response =
|
||||||
ureq::get("https://huggingface.co/neody/sbv2-api-assets/resolve/main/dic/all.bin")
|
ureq::get("https://huggingface.co/neody/sbv2-api-assets/resolve/main/dic/all.bin")
|
||||||
.call()?;
|
.call()?;
|
||||||
let mut response = response.body_mut().as_reader();
|
let mut response = response.body_mut().as_reader();
|
||||||
let mut file = fs::File::create(&out_path)?;
|
if !static_dir.exists() {
|
||||||
|
fs::create_dir_all(static_dir)?;
|
||||||
|
}
|
||||||
|
let mut file = fs::File::create(&static_path)?;
|
||||||
copy(&mut response, &mut file)?;
|
copy(&mut response, &mut file)?;
|
||||||
}
|
}
|
||||||
|
if !out_path.exists() && fs::hard_link(&static_path, &out_path).is_err() {
|
||||||
|
println!("cargo:warning=Failed to create hard link, copying instead.");
|
||||||
|
fs::copy(static_path, out_path)?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,11 +14,9 @@ pub fn predict(
|
|||||||
"attention_mask" => TensorRef::from_array_view((vec![1, attention_masks.len() as i64], attention_masks.as_slice()))?,
|
"attention_mask" => TensorRef::from_array_view((vec![1, attention_masks.len() as i64], attention_masks.as_slice()))?,
|
||||||
}
|
}
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let output = outputs["output"]
|
let output = outputs["output"]
|
||||||
.try_extract_tensor::<f32>()?
|
.try_extract_array::<f32>()?
|
||||||
.into_dimensionality::<Ix2>()?
|
.into_dimensionality::<Ix2>()?
|
||||||
.to_owned();
|
.to_owned();
|
||||||
|
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,6 +28,8 @@ pub enum Error {
|
|||||||
Base64Error(#[from] base64::DecodeError),
|
Base64Error(#[from] base64::DecodeError),
|
||||||
#[error("other")]
|
#[error("other")]
|
||||||
OtherError(String),
|
OtherError(String),
|
||||||
|
#[error("Style error: {0}")]
|
||||||
|
StyleError(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|||||||
@@ -81,7 +81,7 @@ fn phone_tone_to_kana(phones: Vec<String>, tones: Vec<i32>) -> Vec<(String, i32)
|
|||||||
let tones = &tones[1..];
|
let tones = &tones[1..];
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
let mut current_mora = String::new();
|
let mut current_mora = String::new();
|
||||||
for ((phone, next_phone), (&tone, &next_tone)) in phones
|
for ((phone, _next_phone), (&tone, &next_tone)) in phones
|
||||||
.iter()
|
.iter()
|
||||||
.zip(phones.iter().skip(1))
|
.zip(phones.iter().skip(1))
|
||||||
.zip(tones.iter().zip(tones.iter().skip(1)))
|
.zip(tones.iter().zip(tones.iter().skip(1)))
|
||||||
@@ -123,24 +123,24 @@ impl JTalkProcess {
|
|||||||
.map(|(_letter, tone)| *tone)
|
.map(|(_letter, tone)| *tone)
|
||||||
.collect();
|
.collect();
|
||||||
if tone_values.len() == 1 {
|
if tone_values.len() == 1 {
|
||||||
assert!(tone_values == hash_set![0], "{:?}", tone_values);
|
assert!(tone_values == hash_set![0], "{tone_values:?}");
|
||||||
Ok(phone_tone_list)
|
Ok(phone_tone_list)
|
||||||
} else if tone_values.len() == 2 {
|
} else if tone_values.len() == 2 {
|
||||||
if tone_values == hash_set![0, 1] {
|
if tone_values == hash_set![0, 1] {
|
||||||
return Ok(phone_tone_list);
|
Ok(phone_tone_list)
|
||||||
} else if tone_values == hash_set![-1, 0] {
|
} else if tone_values == hash_set![-1, 0] {
|
||||||
return Ok(phone_tone_list
|
Ok(phone_tone_list
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| {
|
.map(|x| {
|
||||||
let new_tone = if x.1 == -1 { 0 } else { 1 };
|
let new_tone = if x.1 == -1 { 0 } else { 1 };
|
||||||
(x.0.clone(), new_tone)
|
(x.0.clone(), new_tone)
|
||||||
})
|
})
|
||||||
.collect());
|
.collect())
|
||||||
} else {
|
} else {
|
||||||
return Err(Error::ValueError("Invalid tone values 0".to_string()));
|
Err(Error::ValueError("Invalid tone values 0".to_string()))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(Error::ValueError("Invalid tone values 1".to_string()));
|
Err(Error::ValueError("Invalid tone values 1".to_string()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,12 +226,12 @@ impl JTalkProcess {
|
|||||||
} else if PUNCTUATIONS.contains(&phone.as_str()) {
|
} else if PUNCTUATIONS.contains(&phone.as_str()) {
|
||||||
result.push((phone, 0));
|
result.push((phone, 0));
|
||||||
} else {
|
} else {
|
||||||
println!("phones {:?}", phone_with_punct);
|
println!("phones {phone_with_punct:?}");
|
||||||
println!("phone_tone_list: {:?}", phone_tone_list);
|
println!("phone_tone_list: {phone_tone_list:?}");
|
||||||
println!("result: {:?}", result);
|
println!("result: {result:?}");
|
||||||
println!("tone_index: {:?}", tone_index);
|
println!("tone_index: {tone_index:?}");
|
||||||
println!("phone: {:?}", phone);
|
println!("phone: {phone:?}");
|
||||||
return Err(Error::ValueError(format!("Mismatched phoneme: {}", phone)));
|
return Err(Error::ValueError(format!("Mismatched phoneme: {phone}")));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -276,8 +276,7 @@ impl JTalkProcess {
|
|||||||
}
|
}
|
||||||
if !KATAKANA_PATTERN.is_match(&text) {
|
if !KATAKANA_PATTERN.is_match(&text) {
|
||||||
return Err(Error::ValueError(format!(
|
return Err(Error::ValueError(format!(
|
||||||
"Input must be katakana only: {}",
|
"Input must be katakana only: {text}"
|
||||||
text
|
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -285,7 +284,7 @@ impl JTalkProcess {
|
|||||||
let mora = mora.to_string();
|
let mora = mora.to_string();
|
||||||
let (consonant, vowel) = MORA_KATA_TO_MORA_PHONEMES.get(&mora).unwrap();
|
let (consonant, vowel) = MORA_KATA_TO_MORA_PHONEMES.get(&mora).unwrap();
|
||||||
if consonant.is_none() {
|
if consonant.is_none() {
|
||||||
text = text.replace(&mora, &format!(" {}", vowel));
|
text = text.replace(&mora, &format!(" {vowel}"));
|
||||||
} else {
|
} else {
|
||||||
text = text.replace(
|
text = text.replace(
|
||||||
&mora,
|
&mora,
|
||||||
@@ -319,7 +318,7 @@ impl JTalkProcess {
|
|||||||
let (string, pron) = self.parse_to_string_and_pron(parts.clone());
|
let (string, pron) = self.parse_to_string_and_pron(parts.clone());
|
||||||
let mut yomi = pron.replace('’', "");
|
let mut yomi = pron.replace('’', "");
|
||||||
let word = replace_punctuation(string);
|
let word = replace_punctuation(string);
|
||||||
assert!(!yomi.is_empty(), "Empty yomi: {}", word);
|
assert!(!yomi.is_empty(), "Empty yomi: {word}");
|
||||||
if yomi == "、" {
|
if yomi == "、" {
|
||||||
if !word
|
if !word
|
||||||
.chars()
|
.chars()
|
||||||
@@ -330,7 +329,7 @@ impl JTalkProcess {
|
|||||||
yomi = word.clone();
|
yomi = word.clone();
|
||||||
}
|
}
|
||||||
} else if yomi == "?" {
|
} else if yomi == "?" {
|
||||||
assert!(word == "?", "yomi `?` comes from: {}", word);
|
assert!(word == "?", "yomi `?` comes from: {word}");
|
||||||
yomi = "?".to_string();
|
yomi = "?".to_string();
|
||||||
}
|
}
|
||||||
seq_text.push(word);
|
seq_text.push(word);
|
||||||
|
|||||||
@@ -21,10 +21,7 @@ pub fn load_model<P: AsRef<[u8]>>(model_file: P, bert: bool) -> Result<Session>
|
|||||||
#[cfg(feature = "cuda")]
|
#[cfg(feature = "cuda")]
|
||||||
{
|
{
|
||||||
#[allow(unused_mut)]
|
#[allow(unused_mut)]
|
||||||
let mut cuda = ort::execution_providers::CUDAExecutionProvider::default()
|
let mut cuda = ort::execution_providers::CUDAExecutionProvider::default();
|
||||||
.with_conv_algorithm_search(
|
|
||||||
ort::execution_providers::cuda::CUDAExecutionProviderCuDNNConvAlgoSearch::Default,
|
|
||||||
);
|
|
||||||
#[cfg(feature = "cuda_tf32")]
|
#[cfg(feature = "cuda_tf32")]
|
||||||
{
|
{
|
||||||
cuda = cuda.with_tf32(true);
|
cuda = cuda.with_tf32(true);
|
||||||
@@ -101,11 +98,9 @@ pub fn synthesize(
|
|||||||
"noise_scale" => noise_scale,
|
"noise_scale" => noise_scale,
|
||||||
"noise_scale_w" => noise_scale_w,
|
"noise_scale_w" => noise_scale_w,
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let audio_array = outputs["output"]
|
let audio_array = outputs["output"]
|
||||||
.try_extract_tensor::<f32>()?
|
.try_extract_array::<f32>()?
|
||||||
.into_dimensionality::<Ix3>()?
|
.into_dimensionality::<Ix3>()?
|
||||||
.to_owned();
|
.to_owned();
|
||||||
|
|
||||||
Ok(audio_array)
|
Ok(audio_array)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use crate::error::Result;
|
use crate::error::{Error, Result};
|
||||||
use ndarray::{s, Array1, Array2};
|
use ndarray::{s, Array1, Array2};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
@@ -21,6 +21,18 @@ pub fn get_style_vector(
|
|||||||
style_id: i32,
|
style_id: i32,
|
||||||
weight: f32,
|
weight: f32,
|
||||||
) -> Result<Array1<f32>> {
|
) -> Result<Array1<f32>> {
|
||||||
|
if style_vectors.shape().len() != 2 {
|
||||||
|
return Err(Error::StyleError(
|
||||||
|
"Invalid shape for style vectors".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if style_id < 0 || style_id >= style_vectors.shape()[0] as i32 {
|
||||||
|
return Err(Error::StyleError(format!(
|
||||||
|
"Invalid style ID: {}. Max ID: {}",
|
||||||
|
style_id,
|
||||||
|
style_vectors.shape()[0] - 1
|
||||||
|
)));
|
||||||
|
}
|
||||||
let mean = style_vectors.slice(s![0, ..]).to_owned();
|
let mean = style_vectors.slice(s![0, ..]).to_owned();
|
||||||
let style_vector = style_vectors.slice(s![style_id as usize, ..]).to_owned();
|
let style_vector = style_vectors.slice(s![style_id as usize, ..]).to_owned();
|
||||||
let diff = (style_vector - &mean) * weight;
|
let diff = (style_vector - &mean) * weight;
|
||||||
|
|||||||
@@ -214,6 +214,7 @@ impl TTSModelHolder {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
pub fn parse_text_neo(
|
pub fn parse_text_neo(
|
||||||
&mut self,
|
&mut self,
|
||||||
text: String,
|
text: String,
|
||||||
@@ -239,39 +240,43 @@ impl TTSModelHolder {
|
|||||||
}
|
}
|
||||||
fn find_and_load_model<I: Into<TTSIdent>>(&mut self, ident: I) -> Result<bool> {
|
fn find_and_load_model<I: Into<TTSIdent>>(&mut self, ident: I) -> Result<bool> {
|
||||||
let ident = ident.into();
|
let ident = ident.into();
|
||||||
let (bytes, style_vectors) = {
|
// Locate target model entry
|
||||||
let model = self
|
let target_index = self
|
||||||
.models
|
|
||||||
.iter()
|
|
||||||
.find(|m| m.ident == ident)
|
|
||||||
.ok_or(Error::ModelNotFoundError(ident.to_string()))?;
|
|
||||||
if model.vits2.is_some() {
|
|
||||||
return Ok(true);
|
|
||||||
}
|
|
||||||
(model.bytes.clone().unwrap(), model.style_vectors.clone())
|
|
||||||
};
|
|
||||||
self.unload(ident.clone());
|
|
||||||
let s = model::load_model(&bytes, false)?;
|
|
||||||
if let Some(max) = self.max_loaded_models {
|
|
||||||
if self.models.iter().filter(|x| x.vits2.is_some()).count() >= max {
|
|
||||||
self.unload(self.models.first().unwrap().ident.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.models.push(TTSModel {
|
|
||||||
bytes: Some(bytes.to_vec()),
|
|
||||||
vits2: Some(s),
|
|
||||||
style_vectors,
|
|
||||||
ident: ident.clone(),
|
|
||||||
});
|
|
||||||
let model = self
|
|
||||||
.models
|
.models
|
||||||
.iter()
|
.iter()
|
||||||
.find(|m| m.ident == ident)
|
.position(|m| m.ident == ident)
|
||||||
.ok_or(Error::ModelNotFoundError(ident.to_string()))?;
|
.ok_or(Error::ModelNotFoundError(ident.to_string()))?;
|
||||||
if model.vits2.is_some() {
|
|
||||||
|
// Already loaded
|
||||||
|
if self.models[target_index].vits2.is_some() {
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
Err(Error::ModelNotFoundError(ident.to_string()))
|
|
||||||
|
// Get bytes to build a Session
|
||||||
|
let bytes = self.models[target_index]
|
||||||
|
.bytes
|
||||||
|
.clone()
|
||||||
|
.ok_or(Error::ModelNotFoundError(ident.to_string()))?;
|
||||||
|
|
||||||
|
// Enforce max loaded models by evicting a different loaded model's session, not removing the entry
|
||||||
|
if let Some(max) = self.max_loaded_models {
|
||||||
|
let loaded_count = self.models.iter().filter(|m| m.vits2.is_some()).count();
|
||||||
|
if loaded_count >= max {
|
||||||
|
if let Some(evict_index) = self
|
||||||
|
.models
|
||||||
|
.iter()
|
||||||
|
.position(|m| m.vits2.is_some() && m.ident != ident)
|
||||||
|
{
|
||||||
|
// Drop only the session to free memory; keep bytes/style for future reload
|
||||||
|
self.models[evict_index].vits2 = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build and set session in-place for the target model
|
||||||
|
let s = model::load_model(&bytes, false)?;
|
||||||
|
self.models[target_index].vits2 = Some(s);
|
||||||
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get style vector by style id and weight
|
/// Get style vector by style id and weight
|
||||||
|
|||||||
@@ -173,8 +173,15 @@ pub fn parse_text_blocking(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn array_to_vec(audio_array: Array3<f32>) -> Result<Vec<u8>> {
|
pub fn array_to_vec(audio_array: Array3<f32>) -> Result<Vec<u8>> {
|
||||||
|
// If SBV2_FORCE_STEREO is set ("1"/"true"), duplicate mono to stereo
|
||||||
|
let force_stereo = std::env::var("SBV2_FORCE_STEREO")
|
||||||
|
.ok()
|
||||||
|
.map(|v| matches!(v.as_str(), "1" | "true" | "TRUE" | "True"))
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
let channels: u16 = if force_stereo { 2 } else { 1 };
|
||||||
let spec = WavSpec {
|
let spec = WavSpec {
|
||||||
channels: 1,
|
channels,
|
||||||
sample_rate: 44100,
|
sample_rate: 44100,
|
||||||
bits_per_sample: 32,
|
bits_per_sample: 32,
|
||||||
sample_format: SampleFormat::Float,
|
sample_format: SampleFormat::Float,
|
||||||
@@ -183,8 +190,16 @@ pub fn array_to_vec(audio_array: Array3<f32>) -> Result<Vec<u8>> {
|
|||||||
let mut writer = WavWriter::new(&mut cursor, spec)?;
|
let mut writer = WavWriter::new(&mut cursor, spec)?;
|
||||||
for i in 0..audio_array.shape()[0] {
|
for i in 0..audio_array.shape()[0] {
|
||||||
let output = audio_array.slice(s![i, 0, ..]).to_vec();
|
let output = audio_array.slice(s![i, 0, ..]).to_vec();
|
||||||
for sample in output {
|
if force_stereo {
|
||||||
writer.write_sample(sample)?;
|
for sample in output {
|
||||||
|
// Write to Left and Right channels
|
||||||
|
writer.write_sample(sample)?;
|
||||||
|
writer.write_sample(sample)?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for sample in output {
|
||||||
|
writer.write_sample(sample)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
writer.finalize()?;
|
writer.finalize()?;
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
# StyleBertVITS2 wasm
|
# StyleBertVITS2 wasm
|
||||||
refer to https://github.com/tuna2134/sbv2-api
|
refer to https://github.com/neodyland/sbv2-api
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"author": "tuna2134",
|
"author": "tuna2134",
|
||||||
|
"contributes": ["neodyland"],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@biomejs/biome": "^1.9.4",
|
"@biomejs/biome": "^1.9.4",
|
||||||
|
|||||||
1
scripts/convert/.python-version
Normal file
1
scripts/convert/.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.11
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
style-bert-vits2
|
git+https://github.com/neodyland/style-bert-vits2-ref
|
||||||
onnxsim
|
onnxsim
|
||||||
numpy<2
|
numpy<2
|
||||||
zstandard
|
zstandard
|
||||||
onnxruntime
|
onnxruntime
|
||||||
|
cmake<4
|
||||||
@@ -2,8 +2,16 @@ FROM rust AS builder
|
|||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN cargo build -r --bin sbv2_api
|
RUN cargo build -r --bin sbv2_api
|
||||||
FROM gcr.io/distroless/cc-debian12
|
FROM ubuntu AS upx
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
|
RUN apt update && apt-get install -y upx binutils
|
||||||
COPY --from=builder /work/target/release/sbv2_api /work/main
|
COPY --from=builder /work/target/release/sbv2_api /work/main
|
||||||
COPY --from=builder /work/target/release/*.so /work
|
COPY --from=builder /work/target/release/*.so /work
|
||||||
CMD ["/work/main"]
|
RUN upx --best --lzma /work/main
|
||||||
|
RUN find /work -maxdepth 1 -name "*.so" -exec strip --strip-unneeded {} +
|
||||||
|
RUN find /work -maxdepth 1 -name "*.so" -exec upx --best --lzma {} +
|
||||||
|
FROM gcr.io/distroless/cc-debian12
|
||||||
|
WORKDIR /work
|
||||||
|
COPY --from=upx /work/main /work/main
|
||||||
|
COPY --from=upx /work/*.so /work
|
||||||
|
CMD ["/work/main"]
|
||||||
|
|||||||
@@ -2,9 +2,16 @@ FROM rust AS builder
|
|||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN cargo build -r --bin sbv2_api -F cuda,cuda_tf32
|
RUN cargo build -r --bin sbv2_api -F cuda,cuda_tf32
|
||||||
FROM nvidia/cuda:12.3.2-cudnn9-runtime-ubuntu22.04
|
FROM ubuntu AS upx
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
|
RUN apt update && apt-get install -y upx binutils
|
||||||
COPY --from=builder /work/target/release/sbv2_api /work/main
|
COPY --from=builder /work/target/release/sbv2_api /work/main
|
||||||
COPY --from=builder /work/target/release/*.so /work
|
COPY --from=builder /work/target/release/*.so /work
|
||||||
|
RUN upx --best --lzma /work/main
|
||||||
|
RUN find /work -maxdepth 1 -name "*.so" -exec strip --strip-unneeded {} +
|
||||||
|
FROM nvidia/cuda:12.3.2-cudnn9-runtime-ubuntu22.04
|
||||||
|
WORKDIR /work
|
||||||
|
COPY --from=upx /work/main /work/main
|
||||||
|
COPY --from=upx /work/*.so /work
|
||||||
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/work
|
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/work
|
||||||
CMD ["/work/main"]
|
CMD ["/work/main"]
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
docker run -it --rm -p 3000:3000 --name sbv2 \
|
docker run -it --rm -p 3000:3000 --name sbv2 \
|
||||||
-v ./models:/work/models --env-file .env \
|
-v ./models:/work/models --env-file .env \
|
||||||
ghcr.io/tuna2134/sbv2-api:cpu
|
ghcr.io/neodyland/sbv2-api:cpu
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
docker run -it --rm -p 3000:3000 --name sbv2 \
|
docker run -it --rm -p 3000:3000 --name sbv2 \
|
||||||
-v ./models:/work/models --env-file .env \
|
-v ./models:/work/models --env-file .env \
|
||||||
--gpus all \
|
--gpus all \
|
||||||
ghcr.io/tuna2134/sbv2-api:cuda
|
ghcr.io/neodyland/sbv2-api:cuda
|
||||||
|
|||||||
Reference in New Issue
Block a user