Compare commits

...

224 Commits

Author SHA1 Message Date
tuna2134@コマリン親衛隊
adca252272 Merge pull request #164 from tuna2134/renovate/tokio-1.x-lockfile
fix(deps): update rust crate tokio to v1.43.0
2025-01-20 08:55:49 +09:00
tuna2134@コマリン親衛隊
5c74773754 Merge pull request #166 from tuna2134/renovate/pyo3-0.x-lockfile
fix(deps): update rust crate pyo3 to v0.23.4
2025-01-20 08:55:38 +09:00
tuna2134@コマリン親衛隊
2e5edcfb32 Merge pull request #168 from tuna2134/renovate/serde_json-1.x-lockfile
fix(deps): update rust crate serde_json to v1.0.137
2025-01-20 08:55:25 +09:00
tuna2134@コマリン親衛隊
184080bec8 Merge pull request #169 from tuna2134/renovate/utoipa-monorepo
fix(deps): update rust crate utoipa-scalar to 0.3.0
2025-01-20 08:55:13 +09:00
renovate[bot]
d83fcb9f2c fix(deps): update rust crate utoipa-scalar to 0.3.0 2025-01-19 23:52:40 +00:00
renovate[bot]
7095e0ea89 fix(deps): update rust crate serde_json to v1.0.137 2025-01-19 23:51:54 +00:00
tuna2134@コマリン親衛隊
aa07496a08 Merge pull request #163 from tuna2134/renovate/axum-monorepo
fix(deps): update rust crate axum to 0.8.0
2025-01-20 08:51:27 +09:00
tuna2134@コマリン親衛隊
bf276f51e7 Merge pull request #167 from tuna2134/renovate/log-0.x-lockfile
fix(deps): update rust crate log to v0.4.25
2025-01-20 08:51:14 +09:00
tuna2134@コマリン親衛隊
cc664fae2d Update CI.yml 2025-01-20 08:43:05 +09:00
tuna2134@コマリン親衛隊
71ec658772 CI.yml を更新 2025-01-20 07:55:03 +09:00
renovate[bot]
5ea2dcff0f fix(deps): update rust crate log to v0.4.25 2025-01-14 12:51:47 +00:00
renovate[bot]
a9ea47dc51 fix(deps): update rust crate pyo3 to v0.23.4 2025-01-11 22:40:29 +00:00
コマリン親衛隊
dff939091c Merge pull request #165 from tuna2134/renovate/log-0.x-lockfile
fix(deps): update rust crate log to v0.4.24
2025-01-11 11:55:02 +09:00
renovate[bot]
8a28a4e7a5 fix(deps): update rust crate log to v0.4.24 2025-01-11 02:50:24 +00:00
コマリン親衛隊
21f845a799 Merge pull request #162 from tuna2134/renovate/serde_json-1.x-lockfile
fix(deps): update rust crate serde_json to v1.0.135
2025-01-09 09:40:07 +09:00
renovate[bot]
69015bdf81 fix(deps): update rust crate tokio to v1.43.0 2025-01-08 18:20:47 +00:00
renovate[bot]
c6e5b73128 fix(deps): update rust crate axum to 0.8.0 2025-01-08 15:24:14 +00:00
renovate[bot]
4ff9a38a80 fix(deps): update rust crate serde_json to v1.0.135 2025-01-08 15:24:08 +00:00
コマリン親衛隊
20cc0573b5 Merge pull request #161 from tuna2134/renovate/utoipa-monorepo
fix(deps): update utoipa monorepo
2025-01-06 13:28:46 +09:00
renovate[bot]
4b932d568d fix(deps): update utoipa monorepo 2025-01-06 00:39:56 +00:00
コマリン親衛隊
6237cd0fec Merge pull request #159 from tuna2134/renovate/serde-monorepo
fix(deps): update rust crate serde to v1.0.217
2024-12-28 13:09:10 +09:00
renovate[bot]
35fabdf681 fix(deps): update rust crate serde to v1.0.217 2024-12-27 22:05:25 +00:00
コマリン親衛隊
f09343c97f Merge pull request #155 from tuna2134/renovate/env_logger-0.x-lockfile
chore(deps): update rust crate env_logger to v0.11.6
2024-12-23 23:38:00 +09:00
コマリン親衛隊
f2570d89d0 Merge pull request #158 from tuna2134/renovate/anyhow-1.x-lockfile
chore(deps): update rust crate anyhow to v1.0.95
2024-12-22 21:52:38 +09:00
renovate[bot]
ac2a09d6af chore(deps): update rust crate anyhow to v1.0.95 2024-12-22 12:39:36 +00:00
コマリン親衛隊
c6eaf9cb9f Merge pull request #156 from tuna2134/renovate/serde_json-1.x-lockfile
fix(deps): update rust crate serde_json to v1.0.134
2024-12-22 10:30:46 +09:00
renovate[bot]
f2395096ca fix(deps): update rust crate serde_json to v1.0.134 2024-12-21 21:03:32 +00:00
renovate[bot]
3f6f4ccb6f chore(deps): update rust crate env_logger to v0.11.6 2024-12-20 20:48:36 +00:00
コマリン親衛隊
67eba8ee6c Merge pull request #152 from tuna2134/renovate/utoipa-monorepo
fix(deps): update rust crate utoipa to v5.3.0
2024-12-20 09:03:53 +09:00
renovate[bot]
0aa1bc8733 fix(deps): update rust crate utoipa to v5.3.0 2024-12-19 18:06:44 +00:00
コマリン親衛隊
d1970d99be Merge pull request #151 from tuna2134/renovate/pyo3-0.x-lockfile
fix(deps): update rust crate pyo3 to v0.23.3
2024-12-19 08:30:37 +09:00
コマリン親衛隊
fddb35e592 Update CI.yml 2024-12-19 08:28:53 +09:00
renovate[bot]
e26715c809 fix(deps): update rust crate pyo3 to v0.23.3 2024-12-18 23:20:53 +00:00
コマリン親衛隊
26aa4b7df0 Merge pull request #150 from tuna2134/next
Next
2024-12-19 08:19:48 +09:00
コマリン親衛隊
de18846280 Update sbv2.rs 2024-12-19 07:50:13 +09:00
コマリン親衛隊
38c2e69648 Merge pull request #133 from tuna2134/renovate/pyo3-0.x
fix(deps): update rust crate pyo3 to 0.23.0
2024-12-19 07:47:03 +09:00
コマリン親衛隊
593dbaf19d Merge pull request #144 from tuna2134/renovate/tokenizers-0.x
fix(deps): update rust crate tokenizers to 0.21.0
2024-12-19 07:44:45 +09:00
コマリン親衛隊
bf44b07be1 Merge pull request #146 from tuna2134/renovate/tokio-1.x-lockfile
fix(deps): update rust crate tokio to v1.42.0
2024-12-19 07:44:30 +09:00
コマリン親衛隊
102a8eb065 Merge pull request #149 from tuna2134/renovate/serde-monorepo
fix(deps): update rust crate serde to v1.0.216
2024-12-19 07:44:21 +09:00
renovate[bot]
68edb3187f fix(deps): update rust crate serde to v1.0.216 2024-12-11 03:08:44 +00:00
コマリン親衛隊
4a81a06faf Merge pull request #147 from tuna2134/renovate/anyhow-1.x-lockfile
chore(deps): update rust crate anyhow to v1.0.94
2024-12-07 14:27:55 +09:00
renovate[bot]
caf541ef65 chore(deps): update rust crate anyhow to v1.0.94 2024-12-03 23:23:13 +00:00
renovate[bot]
05c3846b7b fix(deps): update rust crate tokio to v1.42.0 2024-12-03 18:00:26 +00:00
renovate[bot]
1b2054c4b8 fix(deps): update rust crate tokenizers to 0.21.0 2024-11-27 14:26:20 +00:00
コマリン親衛隊
a7fbfa2017 Merge pull request #138 from tuna2134/aivmx
support aivmx
2024-11-20 16:10:06 +09:00
tuna2134
db09b73b32 support aivmx 2024-11-20 07:01:43 +00:00
tuna2134
843ef36148 Merge branch 'main' of https://github.com/tuna2134/sbv2-api into aivmx 2024-11-20 04:15:44 +00:00
コマリン親衛隊
aa7fc2e3b0 Delete convert/LICENSE 2024-11-20 13:13:17 +09:00
コマリン親衛隊
fc4a79c111 Create LICENSE 2024-11-20 13:12:40 +09:00
コマリン親衛隊
4db7f49fa5 Update and rename LICENSE to convert/LICENSE 2024-11-20 13:11:58 +09:00
tuna2134
edee0710aa support noise_scale 2024-11-20 02:53:14 +00:00
tuna2134
9bcbd496e5 fix 2024-11-20 02:42:33 +00:00
tuna2134
90b3ba2e40 fix bug 2024-11-20 02:42:19 +00:00
tuna2134
9ceec03bd0 fix bug 2024-11-20 02:39:38 +00:00
tuna2134
5e9df65656 add aivmx test 2024-11-20 02:36:42 +00:00
tuna2134
2eda2fe9ca fix 2024-11-20 02:14:59 +00:00
tuna2134
9c9119a107 support aivmx 2024-11-20 01:42:04 +00:00
コマリン親衛隊
2c1a1dffc0 Merge pull request #135 from tuna2134/renovate/serde_json-1.x-lockfile
fix(deps): update rust crate serde_json to v1.0.133
2024-11-17 14:50:23 +09:00
renovate[bot]
ed7bf53b89 fix(deps): update rust crate serde_json to v1.0.133 2024-11-17 03:44:30 +00:00
コマリン親衛隊
4375df2689 Merge pull request #134 from tuna2134/renovate/axum-0.x-lockfile
fix(deps): update rust crate axum to v0.7.9
2024-11-17 12:43:38 +09:00
renovate[bot]
789cef74ce fix(deps): update rust crate axum to v0.7.9 2024-11-16 22:43:20 +00:00
コマリン親衛隊
5b403a2255 Merge pull request #132 from tuna2134/renovate/axum-0.x-lockfile
fix(deps): update rust crate axum to v0.7.8
2024-11-16 09:22:17 +09:00
renovate[bot]
62653ec1c3 fix(deps): update rust crate pyo3 to 0.23.0 2024-11-15 23:06:13 +00:00
renovate[bot]
83076227e7 fix(deps): update rust crate axum to v0.7.8 2024-11-15 18:22:03 +00:00
tuna2134
f90904a337 fix version 2024-11-13 12:02:36 +00:00
tuna2134
4e0c8591cd fix 2024-11-13 12:00:59 +00:00
コマリン親衛隊
997b562682 Merge pull request #131 from tuna2134/add-spealer
話者指定を追加
2024-11-13 20:58:11 +09:00
tuna2134
fbd62315d0 clippy 2024-11-13 11:46:47 +00:00
tuna2134
060af0c187 format 2024-11-13 11:43:52 +00:00
tuna2134
b76738f467 add speaker id code 2024-11-13 11:39:05 +00:00
コマリン親衛隊
8598167114 Merge pull request #130 from tuna2134/tuna2134-patch-1
Style ID指定できるようにした
2024-11-13 11:46:17 +09:00
tuna2134
001f61bb6a fix types 2024-11-13 02:24:09 +00:00
コマリン親衛隊
9b9962ed29 Style ID指定できるようにした 2024-11-13 11:16:24 +09:00
コマリン親衛隊
b414d22a3b Merge pull request #129 from tuna2134/renovate/serde-monorepo
fix(deps): update rust crate serde to v1.0.215
2024-11-13 11:09:25 +09:00
renovate[bot]
248363ae4a fix(deps): update rust crate serde to v1.0.215 2024-11-12 00:53:00 +00:00
コマリン親衛隊
c4b61a36db Merge pull request #128 from tuna2134/renovate/thiserror-1.x-lockfile
fix(deps): update rust crate thiserror to v1.0.69
2024-11-10 20:38:21 +09:00
renovate[bot]
35d16d88a8 fix(deps): update rust crate thiserror to v1.0.69 2024-11-10 07:11:07 +00:00
コマリン親衛隊
fe48d6a034 Merge pull request #127 from tuna2134/renovate/tokio-1.x-lockfile
fix(deps): update rust crate tokio to v1.41.1
2024-11-08 09:55:35 +09:00
renovate[bot]
bca4b2053f fix(deps): update rust crate tokio to v1.41.1 2024-11-07 13:40:20 +00:00
コマリン親衛隊
3330242cd8 Merge pull request #120 from tuna2134/renovate/tokenizers-0.x-lockfile 2024-11-07 00:17:47 +08:00
コマリン親衛隊
f10f71f29b Merge pull request #124 from tuna2134/renovate/anyhow-1.x-lockfile 2024-11-06 21:12:26 +08:00
renovate[bot]
7bd39b7182 chore(deps): update rust crate anyhow to v1.0.93 2024-11-06 13:01:27 +00:00
コマリン親衛隊
2d557fb0ee Merge pull request #123 from Googlefan256/main 2024-11-06 21:00:38 +08:00
Googlefan
14d631eeaa wip: max loaded models 2024-11-06 10:43:41 +00:00
コマリン親衛隊
380daf479c Merge pull request #122 from tuna2134/renovate/pyo3-0.x-lockfile 2024-11-06 09:57:35 +08:00
renovate[bot]
cb814a9952 fix(deps): update rust crate pyo3 to v0.22.6 2024-11-06 01:24:53 +00:00
renovate[bot]
795caf626c fix(deps): update rust crate tokenizers to v0.20.3 2024-11-05 18:02:17 +00:00
コマリン親衛隊
fb32357f31 Merge pull request #119 from tuna2134/renovate/thiserror-1.x-lockfile 2024-11-05 09:49:04 +08:00
renovate[bot]
e4010b3b83 fix(deps): update rust crate thiserror to v1.0.68 2024-11-04 19:39:16 +00:00
コマリン親衛隊
17244a9ede Merge pull request #118 from tuna2134/renovate/thiserror-1.x-lockfile
fix(deps): update rust crate thiserror to v1.0.67
2024-11-04 01:23:40 +09:00
renovate[bot]
61b04fd3d7 fix(deps): update rust crate thiserror to v1.0.67 2024-11-03 16:01:48 +00:00
コマリン親衛隊
4e57a22a40 Merge pull request #117 from tuna2134/renovate/utoipa-5.x-lockfile
fix(deps): update rust crate utoipa to v5.2.0
2024-11-03 08:01:41 +09:00
renovate[bot]
8e10057882 fix(deps): update rust crate utoipa to v5.2.0 2024-11-02 15:44:49 +00:00
コマリン親衛隊
0222b9a189 Merge pull request #116 from tuna2134/renovate/tar-0.x-lockfile
fix(deps): update rust crate tar to v0.4.43
2024-11-02 16:14:41 +09:00
renovate[bot]
5e96d5aef7 fix(deps): update rust crate tar to v0.4.43 2024-11-02 06:41:37 +00:00
コマリン親衛隊
234120f510 Merge pull request #115 from tuna2134/renovate/thiserror-1.x-lockfile 2024-11-02 07:08:57 +09:00
コマリン親衛隊
08f7ab88ec Merge pull request #114 from tuna2134/renovate/anyhow-1.x-lockfile 2024-11-02 07:08:40 +09:00
renovate[bot]
005c67c9b6 fix(deps): update rust crate thiserror to v1.0.66 2024-11-01 17:30:59 +00:00
renovate[bot]
cb08b5b582 chore(deps): update rust crate anyhow to v1.0.92 2024-11-01 17:30:55 +00:00
コマリン親衛隊
105b3ce8de Merge pull request #113 from tuna2134/renovate/onnxruntime-web-1.x-lockfile
fix(deps): update dependency onnxruntime-web to v1.20.0
2024-10-31 12:55:53 +09:00
renovate[bot]
78a5016abc fix(deps): update dependency onnxruntime-web to v1.20.0 2024-10-31 01:30:18 +00:00
コマリン親衛隊
7e6bd4ad0a Merge pull request #112 from tuna2134/renovate/serde-monorepo 2024-10-29 07:47:34 +09:00
renovate[bot]
e1c6cd04b7 fix(deps): update rust crate serde to v1.0.214 2024-10-28 19:40:13 +00:00
コマリン親衛隊
a15efdff09 Merge pull request #110 from tuna2134/renovate/node-22.x-lockfile
chore(deps): update dependency @types/node to v22.8.1
2024-10-28 15:33:34 +09:00
コマリン親衛隊
21823721d0 Merge pull request #111 from tuna2134/renovate/utoipa-5.x-lockfile
fix(deps): update rust crate utoipa to v5.1.3
2024-10-28 15:33:24 +09:00
renovate[bot]
aad978be4b fix(deps): update rust crate utoipa to v5.1.3 2024-10-27 15:20:25 +00:00
renovate[bot]
6dd2cbd991 chore(deps): update dependency @types/node to v22.8.0 2024-10-25 13:50:26 +00:00
コマリン親衛隊
d7b76cc207 Merge pull request #109 from tuna2134/renovate/regex-1.x-lockfile
fix(deps): update rust crate regex to v1.11.1
2024-10-25 02:00:29 +09:00
renovate[bot]
ae0ccb29d2 fix(deps): update rust crate regex to v1.11.1 2024-10-24 16:27:25 +00:00
tuna2134
4bcde2e4b4 bump library version 2024-10-24 08:01:09 +00:00
コマリン親衛隊
2356c896f6 Merge pull request #108 from tuna2134/renovate/utoipa-5.x-lockfile
fix(deps): update rust crate utoipa to v5.1.2
2024-10-24 00:55:42 +09:00
renovate[bot]
d5445abeee fix(deps): update rust crate utoipa to v5.1.2 2024-10-23 15:50:59 +00:00
コマリン親衛隊
673ec0067d Merge pull request #107 from tuna2134/renovate/node-22.x-lockfile
chore(deps): update dependency @types/node to v22.7.9
2024-10-23 19:05:43 +09:00
renovate[bot]
74f657cb33 chore(deps): update dependency @types/node to v22.7.9 2024-10-23 04:51:07 +00:00
コマリン親衛隊
08be778cc5 Merge pull request #105 from tuna2134/renovate/thiserror-1.x-lockfile
fix(deps): update rust crate thiserror to v1.0.65
2024-10-23 08:08:57 +09:00
コマリン親衛隊
6da2f5a0bb Merge pull request #104 from tuna2134/renovate/serde-monorepo
fix(deps): update rust crate serde to v1.0.213
2024-10-23 08:08:50 +09:00
コマリン親衛隊
107190765f Merge pull request #106 from tuna2134/renovate/anyhow-1.x-lockfile
chore(deps): update rust crate anyhow to v1.0.91
2024-10-23 08:08:41 +09:00
renovate[bot]
df726e6f7b fix(deps): update rust crate serde to v1.0.213 2024-10-22 22:07:24 +00:00
renovate[bot]
e5b1ccc36b chore(deps): update rust crate anyhow to v1.0.91 2024-10-22 22:07:14 +00:00
renovate[bot]
40cb604c57 fix(deps): update rust crate thiserror to v1.0.65 2024-10-22 18:02:37 +00:00
コマリン親衛隊
9152c80c76 Merge pull request #102 from tuna2134/renovate/serde-monorepo
fix(deps): update rust crate serde to v1.0.211
2024-10-22 19:30:19 +09:00
コマリン親衛隊
574092562e Merge pull request #103 from tuna2134/renovate/tokio-1.x-lockfile
fix(deps): update rust crate tokio to v1.41.0
2024-10-22 19:30:09 +09:00
renovate[bot]
2e931adce7 fix(deps): update rust crate tokio to v1.41.0 2024-10-22 10:09:19 +00:00
renovate[bot]
e36c395db1 fix(deps): update rust crate serde to v1.0.211 2024-10-22 10:09:13 +00:00
コマリン親衛隊
cfe88629ab Merge pull request #101 from tuna2134/renovate/node-22.x-lockfile
chore(deps): update dependency @types/node to v22.7.8
2024-10-22 13:18:01 +09:00
renovate[bot]
30a98f0968 chore(deps): update dependency @types/node to v22.7.8 2024-10-22 03:41:35 +00:00
コマリン親衛隊
92ae4bc300 Merge pull request #100 from tuna2134/renovate/serde_json-1.x-lockfile
fix(deps): update rust crate serde_json to v1.0.132
2024-10-20 18:30:12 +09:00
renovate[bot]
b6a9bea7ea fix(deps): update rust crate serde_json to v1.0.132 2024-10-19 19:02:47 +00:00
コマリン親衛隊
8c88dd7c87 Merge pull request #98 from Mofa-Xingche/patch-2
Create Colab-sbv2_bindings-CPU.ipynb
2024-10-19 15:56:16 +09:00
コマリン親衛隊
61760b8d7d Merge pull request #99 from tuna2134/renovate/node-22.x-lockfile
chore(deps): update dependency @types/node to v22.7.7
2024-10-19 13:19:28 +09:00
renovate[bot]
5bbc247a89 chore(deps): update dependency @types/node to v22.7.7 2024-10-19 03:48:15 +00:00
コマリン親衛隊
b6f36def58 Merge pull request #96 from tuna2134/renovate/anyhow-1.x-lockfile 2024-10-19 12:47:58 +09:00
コマリン親衛隊
664176a11b Merge pull request #97 from tuna2134/renovate/serde_json-1.x-lockfile 2024-10-19 12:47:51 +09:00
renovate[bot]
432b68590c fix(deps): update rust crate serde_json to v1.0.131 2024-10-19 00:47:24 +00:00
魔法星辰
6283cfedfe Create Colab-sbv2_bindings-CPU.ipynb
すみません 失礼します
ひとまず、ColabのCPU sbv2_bindingsのジュピターノートブックの追加
2024-10-19 05:00:28 +09:00
renovate[bot]
df9c5d792d chore(deps): update rust crate anyhow to v1.0.90 2024-10-18 17:53:27 +00:00
コマリン親衛隊
d1cc8de976 Merge pull request #94 from tuna2134/refine
コードのリファイン
2024-10-18 22:49:35 +09:00
tuna2134
c7d911220b bump 2024-10-18 13:46:22 +00:00
tuna2134
e73514e5d3 bump cersion 2024-10-18 13:37:33 +00:00
tuna2134
45a671cf52 fix compile 2024-10-18 13:35:23 +00:00
tuna2134
c4005808bd fixed 2024-10-18 13:32:35 +00:00
コマリン親衛隊
c312fb0ce4 Merge pull request #89 from tuna2134/renovate/biomejs-biome-1.x-lockfile 2024-10-18 17:41:57 +09:00
コマリン親衛隊
4b4ce82654 Merge pull request #90 from tuna2134/renovate/serde_json-1.x-lockfile 2024-10-18 13:06:55 +09:00
renovate[bot]
3ff226659b fix(deps): update rust crate serde_json to v1.0.129 2024-10-17 20:25:12 +00:00
renovate[bot]
86d0e60eec chore(deps): update dependency @biomejs/biome to v1.9.4 2024-10-17 20:24:07 +00:00
コマリン親衛隊
d337d7caf8 Merge pull request #87 from tuna2134/renovate/node-22.x-lockfile
chore(deps): update dependency @types/node to v22.7.6
2024-10-17 16:21:41 +09:00
renovate[bot]
cbd12a369b chore(deps): update dependency @types/node to v22.7.6 2024-10-17 02:57:14 +00:00
コマリン親衛隊
4a09b50a59 Merge pull request #86 from tuna2134/renovate/utoipa-5.x-lockfile
fix(deps): update rust crate utoipa to v5.1.1
2024-10-17 06:54:35 +09:00
renovate[bot]
1c5863441c fix(deps): update rust crate utoipa to v5.1.1 2024-10-16 16:00:07 +00:00
コマリン親衛隊
42c5e32a5a Merge pull request #85 from tuna2134/renovate/pyo3-0.x-lockfile
fix(deps): update rust crate pyo3 to v0.22.5
2024-10-16 12:04:22 +09:00
renovate[bot]
76bdd8f025 fix(deps): update rust crate pyo3 to v0.22.5 2024-10-15 23:27:29 +00:00
コマリン親衛隊
8e14e0b942 Merge pull request #84 from tuna2134/renovate/utoipa-5.x
fix(deps): update rust crate utoipa to v5
2024-10-15 09:36:02 +09:00
renovate[bot]
378f7d7095 fix(deps): update rust crate utoipa to v5 2024-10-14 21:31:48 +00:00
コマリン親衛隊
b63a3ccf78 Merge pull request #83 from tuna2134/renovate/utoipa-scalar-0.x
fix(deps): update rust crate utoipa-scalar to 0.2.0
2024-10-15 06:30:57 +09:00
renovate[bot]
5238640144 fix(deps): update rust crate utoipa-scalar to 0.2.0 2024-10-14 18:51:30 +00:00
コマリン親衛隊
da3a61a5e7 Merge pull request #82 from tuna2134/renovate/pyo3-0.x-lockfile 2024-10-12 19:22:14 +09:00
renovate[bot]
74043c636f fix(deps): update rust crate pyo3 to v0.22.4 2024-10-12 09:44:38 +00:00
コマリン親衛隊
7663a754a6 Merge pull request #81 from tuna2134/renovate/rust-wasm-bindgen-monorepo
fix(deps): update rust-wasm-bindgen monorepo
2024-10-11 08:41:52 +09:00
renovate[bot]
cb2e52fb18 fix(deps): update rust-wasm-bindgen monorepo 2024-10-10 23:11:03 +00:00
コマリン親衛隊
ac3945748a Merge pull request #80 from tuna2134/renovate/tokenizers-0.x-lockfile 2024-10-10 20:46:36 +09:00
renovate[bot]
1e2cde365f fix(deps): update rust crate tokenizers to v0.20.1 2024-10-10 11:29:48 +00:00
コマリン親衛隊
eecf6d90f7 Merge pull request #79 from tuna2134/renovate/rust-wasm-bindgen-monorepo
fix(deps): update rust-wasm-bindgen monorepo
2024-10-10 09:47:47 +09:00
renovate[bot]
e154fbf493 fix(deps): update rust-wasm-bindgen monorepo 2024-10-09 22:56:05 +00:00
tuna2134
f5de643a21 Merge branch 'main' of https://github.com/tuna2134/sbv2-api 2024-10-09 11:54:07 +00:00
コマリン親衛隊
4b661e3b5f Merge pull request #78 from tuna2134/tuna2134-patch-3
Add sponsor button
2024-10-09 19:21:23 +09:00
コマリン親衛隊
055c08b5d0 Create FUNDING.yml 2024-10-09 19:20:51 +09:00
コマリン親衛隊
cdbcbde04c Merge pull request #77 from tuna2134/renovate/typescript-5.x-lockfile
chore(deps): update dependency typescript to v5.6.3
2024-10-09 19:19:21 +09:00
renovate[bot]
cfd30764d0 chore(deps): update dependency typescript to v5.6.3 2024-10-08 22:34:55 +00:00
コマリン親衛隊
3708d9fec3 Merge pull request #76 from tuna2134/renovate/node-22.x-lockfile 2024-10-08 15:45:09 +09:00
renovate[bot]
065a7b9215 chore(deps): update dependency @types/node to v22.7.5 2024-10-08 00:47:20 +00:00
コマリン親衛隊
dc88251d41 Update README.md 2024-10-06 21:27:27 +09:00
コマリン親衛隊
1550ce6ee4 Merge pull request #73 from tuna2134/renovate/once_cell-1.x-lockfile 2024-10-06 20:29:38 +09:00
renovate[bot]
c1bebea69b chore(deps): update rust crate once_cell to v1.20.2 2024-10-05 16:59:26 +00:00
コマリン親衛隊
af5a550b8f Merge pull request #72 from tuna2134/renovate/biomejs-biome-1.x-lockfile 2024-10-02 08:04:30 +09:00
renovate[bot]
febfd0d84f chore(deps): update dependency @biomejs/biome to v1.9.3 2024-10-01 15:57:52 +00:00
コマリン親衛隊
55698f4a61 Merge pull request #71 from tuna2134/tuna2134-patch-2
プルリクエストのテンプレートの変更
2024-10-01 14:50:28 +09:00
コマリン親衛隊
b0155f5ffa Merge pull request #70 from tuna2134/tuna2134-patch-1
GitHub actionsをまとめた
2024-10-01 14:50:17 +09:00
コマリン親衛隊
0e9c7b6522 プルリクなどが作成された時に動かないようにした 2024-10-01 01:52:09 +09:00
コマリン親衛隊
b0d8be32b6 Update pull_request_template.md 2024-10-01 01:49:30 +09:00
コマリン親衛隊
f76f5e6d1c Delete .github/workflows/build.yml 2024-10-01 01:45:16 +09:00
コマリン親衛隊
e8cc450693 Update CI.yml 2024-10-01 01:44:53 +09:00
コマリン親衛隊
6f0fcd491c Merge pull request #68 from Googlefan256/main
remove webgl support
2024-09-30 22:15:25 +09:00
Googlefan
5cf4149024 feat: web example 2024-09-30 11:53:58 +00:00
Googlefan
65303173a8 fix: wasm webgl 2024-09-30 10:35:37 +00:00
コマリン親衛隊
30e4cde3ed Merge pull request #66 from Googlefan256/main
WASM version finished
2024-09-30 19:29:10 +09:00
Googlefan
596eec654d feat: sbv2 wasm 2024-09-30 08:04:37 +00:00
コマリン親衛隊
ee292315e1 Merge pull request #65 from tuna2134/renovate/regex-1.x-lockfile
fix(deps): update rust crate regex to v1.11.0
2024-09-30 00:15:13 +09:00
コマリン親衛隊
731c751455 Merge pull request #64 from tuna2134/renovate/once_cell-1.x-lockfile
chore(deps): update rust crate once_cell to v1.20.1
2024-09-30 00:14:45 +09:00
renovate[bot]
497bdd79ea fix(deps): update rust crate regex to v1.11.0 2024-09-29 15:09:37 +00:00
renovate[bot]
b887fae47b chore(deps): update rust crate once_cell to v1.20.1 2024-09-29 15:09:32 +00:00
コマリン親衛隊
ca0b8553e4 Merge pull request #63 from tuna2134/renovate/axum-0.x-lockfile
fix(deps): update rust crate axum to v0.7.7
2024-09-28 07:58:06 +09:00
renovate[bot]
29b14895bb fix(deps): update rust crate axum to v0.7.7 2024-09-27 22:47:20 +00:00
tuna2134
c2910ad9e8 add content_type 2024-09-27 12:43:41 +00:00
tuna2134
5c092e8cbb format 2024-09-27 12:40:51 +00:00
tuna2134
d380e549c4 fix bug 2024-09-27 12:40:38 +00:00
tuna2134
395f5b0004 add scalar 2024-09-27 12:35:33 +00:00
tuna2134
f5609035b7 Merge branch 'main' of https://github.com/tuna2134/sbv2-api 2024-09-27 12:30:58 +00:00
tuna2134
1e9f25dcb1 add utoipa 2024-09-27 12:30:56 +00:00
コマリン親衛隊
321ca4e749 Merge pull request #62 from Googlefan256/main
WIP wasm support
2024-09-27 21:30:36 +09:00
Googlefan
bb23bd145b wip: wasm 2024-09-27 12:20:34 +00:00
tuna2134
30e79d0df6 delete 2024-09-27 10:32:39 +00:00
tuna2134
04c21aa97c bumped 2024-09-27 10:30:33 +00:00
tuna2134
6f388052ae bump version 2024-09-27 10:26:41 +00:00
tuna2134
04af3abad5 delete comment 2024-09-27 10:26:11 +00:00
tuna2134
414e42db50 format 2024-09-27 10:24:05 +00:00
tuna2134
b8b0198ca8 fix: bug 2024-09-27 10:23:44 +00:00
コマリン親衛隊
a99fd39834 Merge pull request #60 from tuna2134/label
正規表現使うのやめた
2024-09-25 22:32:08 +09:00
tuna2134
886ab78eeb Merge branch 'label' of https://github.com/tuna2134/sbv2-api into label 2024-09-25 13:22:59 +00:00
コマリン親衛隊
c85f474dbf Update jtalk.rs 2024-09-25 22:22:52 +09:00
tuna2134
6d160d7ae8 remove 2024-09-25 13:16:09 +00:00
tuna2134
ee927d65cb remove e3 2024-09-25 12:59:12 +00:00
tuna2134
6e7d641ecb fix bug 2024-09-25 12:56:13 +00:00
tuna2134
eb249aad81 Merge branch 'main' of https://github.com/tuna2134/sbv2-api 2024-09-25 12:53:26 +00:00
tuna2134
f79a67138f fix stop to use re 2024-09-25 12:53:23 +00:00
コマリン親衛隊
09945e2c1c Merge pull request #59 from tuna2134/renovate/tar-0.x-lockfile
fix(deps): update rust crate tar to v0.4.42
2024-09-25 17:25:04 +09:00
renovate[bot]
821b4c7fb3 fix(deps): update rust crate tar to v0.4.42 2024-09-25 03:03:08 +00:00
コマリン親衛隊
ec06c35929 Merge pull request #56 from tuna2134/fix-coreml
fix coremlのビルド失敗を修正
2024-09-24 06:42:45 +09:00
コマリン親衛隊
1373aef4b2 Merge pull request #57 from tuna2134/renovate/thiserror-1.x-lockfile
fix(deps): update rust crate thiserror to v1.0.64
2024-09-23 07:43:50 +09:00
renovate[bot]
e2e49fd0e8 fix(deps): update rust crate thiserror to v1.0.64 2024-09-22 19:16:03 +00:00
tuna2134
0cf9f87cc9 fix build 2024-09-22 14:26:15 +00:00
コマリン親衛隊
5e500b2c42 Support arm64 2024-09-22 19:12:29 +09:00
コマリン親衛隊
136375e5b6 Merge pull request #48 from tuna2134/renovate/pyo3-0.x-lockfile
fix(deps): update rust crate pyo3 to v0.22.3
2024-09-22 18:56:40 +09:00
tuna2134
aade119ddb add stripe 2024-09-22 08:05:48 +00:00
renovate[bot]
0bb3c5b8ea Update Rust crate pyo3 to v0.22.3 2024-09-16 09:25:40 +00:00
40 changed files with 2214 additions and 466 deletions

3
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,3 @@
# These are supported funding model platforms
github: [tuna2134]

View File

@@ -1,8 +1,13 @@
## 概要
(ここに本PRの説明をしてください。)
<!--
ここに本PRの説明をしてください。
-->
## 関連issue
(ここに該当するissueの番号を書いてください。)
<!--
ここに該当するissueの番号を書いてください。
#nの前にfixesを置くとプルリクが閉じた時に自動的に該当issueもクローズします、
-->
## 確認
- [ ] 動作確認しましたか?

View File

@@ -79,8 +79,6 @@ jobs:
strategy:
matrix:
platform:
- runner: macos-12
target: x86_64
- runner: macos-14
target: aarch64
steps:
@@ -130,3 +128,34 @@ jobs:
with:
command: upload
args: --non-interactive --skip-existing wheels-*/*
push-docker:
runs-on: ${{ matrix.platform }}
if: "startsWith(github.ref, 'refs/tags/')"
permissions:
contents: read
packages: write
strategy:
matrix:
tag: [cpu, cuda]
platform: [ubuntu-latest, ubuntu-24.04-arm]
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push image
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: |
ghcr.io/${{ github.repository }}:${{ matrix.tag }}
file: docker/${{ matrix.tag }}.Dockerfile

View File

@@ -1,40 +0,0 @@
name: Push to github container register
on:
release:
types: [created]
workflow_dispatch:
jobs:
push-docker:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
strategy:
matrix:
tag: [cpu, cuda]
platform:
- linux/amd64
- linux/arm64
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push image
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: |
ghcr.io/${{ github.repository }}:${{ matrix.tag }}
file: docker/${{ matrix.tag }}.Dockerfile
platforms: ${{ matrix.platform }}

3
.gitignore vendored
View File

@@ -4,4 +4,5 @@ models/
venv/
.env
output.wav
node_modules
node_modules
dist/

599
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,15 @@
[workspace]
resolver = "2"
members = ["sbv2_api", "sbv2_core", "sbv2_bindings"]
members = ["sbv2_api", "sbv2_core", "sbv2_bindings", "sbv2_wasm"]
[workspace.dependencies]
anyhow = "1.0.86"
dotenvy = "0.15.7"
env_logger = "0.11.5"
ndarray = "0.16.1"
once_cell = "1.19.0"
[profile.release]
lto = true
debug = false
strip = true
strip = true

View File

@@ -0,0 +1,180 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# 音声合成プログラム\n",
"\n",
"このノートブックでは、`sbv2_bindings` パッケージを使用して音声合成を行います。必要なモデルをダウンロードし、ユーザーが入力したテキストから音声を生成します。音声合成が終わったら、再度テキストの入力を求め、ユーザーが終了するまで繰り返します。"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 必要なパッケージのインストール\n",
"!pip install sbv2_bindings\n",
"\n",
"# 必要なモジュールのインポート\n",
"import os\n",
"import urllib.request\n",
"import time\n",
"from sbv2_bindings import TTSModel"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## モデルのダウンロード\n",
"\n",
"モデルファイルとトークナイザーをダウンロードします。ユーザーが独自のモデルを使用したい場合は、該当するURLまたはローカルパスを指定してください。"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# モデルの URL またはローカルパスの指定\n",
"user_sbv2_model_url = \"\" # カスタムモデルのURLがあればここに指定\n",
"user_sbv2_model_path = \"\" # カスタムモデルのローカルパスがあればここに指定\n",
"\n",
"# モデル用のディレクトリを作成\n",
"model_dir = 'models'\n",
"os.makedirs(model_dir, exist_ok=True)\n",
"\n",
"# ダウンロードするファイルの URL\n",
"file_urls = [\n",
" \"https://huggingface.co/googlefan/sbv2_onnx_models/resolve/main/tokenizer.json\",\n",
" \"https://huggingface.co/googlefan/sbv2_onnx_models/resolve/main/deberta.onnx\",\n",
"]\n",
"\n",
"# モデルのパス決定\n",
"if user_sbv2_model_path:\n",
" sbv2_model_path = user_sbv2_model_path # ローカルモデルのパスを使用\n",
"elif user_sbv2_model_url:\n",
" sbv2_model_filename = os.path.basename(user_sbv2_model_url)\n",
" sbv2_model_path = os.path.join(model_dir, sbv2_model_filename)\n",
" file_urls.append(user_sbv2_model_url)\n",
"else:\n",
" # デフォルトのモデルを使用\n",
" sbv2_model_filename = \"tsukuyomi.sbv2\"\n",
" sbv2_model_path = os.path.join(model_dir, sbv2_model_filename)\n",
" file_urls.append(\"https://huggingface.co/googlefan/sbv2_onnx_models/resolve/main/tsukuyomi.sbv2\")\n",
"\n",
"# ファイルをダウンロード\n",
"for url in file_urls:\n",
" file_name = os.path.join(model_dir, os.path.basename(url))\n",
" if not os.path.exists(file_name):\n",
" print(f\"{file_name} をダウンロードしています...\")\n",
" urllib.request.urlretrieve(url, file_name)\n",
" else:\n",
" print(f\"{file_name} は既に存在します。\")\n",
"\n",
"# ダウンロードまたは使用するファイルを確認\n",
"print(\"\\n使用するファイル:\")\n",
"for file in os.listdir(model_dir):\n",
" print(file)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## モデルの読み込みと音声合成\n",
"\n",
"モデルを読み込み、ユーザーが入力したテキストから音声を生成します。話者名は使用する `.sbv2` ファイル名から自動的に取得します。音声合成が終わったら、再度テキストの入力を求め、ユーザーが終了するまで繰り返します。"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 音声合成の実行\n",
"def main():\n",
" try:\n",
" print(\"\\nモデルを読み込んでいます...\")\n",
" model = TTSModel.from_path(\n",
" os.path.join(model_dir, \"deberta.onnx\"),\n",
" os.path.join(model_dir, \"tokenizer.json\")\n",
" )\n",
" print(\"モデルの読み込みが完了しました!\")\n",
" except Exception as e:\n",
" print(f\"モデルの読み込みに失敗しました: {e}\")\n",
" return\n",
"\n",
" # 話者名を取得(.sbv2 ファイル名の拡張子を除いた部分)\n",
" speaker_name = os.path.splitext(os.path.basename(sbv2_model_path))[0]\n",
" \n",
" # 指定されたモデルのパスを使用\n",
" try:\n",
" model.load_sbv2file_from_path(speaker_name, sbv2_model_path)\n",
" print(f\"話者 '{speaker_name}' のセットアップが完了しました!\")\n",
" except Exception as e:\n",
" print(f\"SBV2ファイルの読み込みに失敗しました: {e}\")\n",
" return\n",
"\n",
" # 音声合成を繰り返し実行\n",
" while True:\n",
" # 合成したいテキストをユーザーから入力\n",
" user_input = input(\"\\n音声合成したいテキストを入力してください終了するには 'exit' と入力): \")\n",
" \n",
" if user_input.strip().lower() == 'exit':\n",
" print(\"音声合成を終了します。\")\n",
" break\n",
"\n",
" # 出力ファイル名\n",
" output_file = \"output.wav\"\n",
"\n",
" # 音声合成を実行\n",
" try:\n",
" print(\"\\n音声合成を開始します...\")\n",
" start_time = time.time()\n",
"\n",
" audio_data = model.synthesize(user_input, speaker_name, 0, 0.0, 1)\n",
"\n",
" with open(output_file, \"wb\") as f:\n",
" f.write(audio_data)\n",
"\n",
" end_time = time.time()\n",
" elapsed_time = end_time - start_time\n",
"\n",
" print(f\"\\n音声が '{output_file}' に保存されました。\")\n",
" print(f\"音声合成にかかった時間: {elapsed_time:.2f} 秒\")\n",
" except Exception as e:\n",
" print(f\"音声合成に失敗しました: {e}\")\n",
"\n",
"if __name__ == \"__main__\":\n",
" main()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.x"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

View File

@@ -1,5 +1,8 @@
# SBV2-API
## 注意:本バージョンはアルファ版です。
安定版を利用したい場合は[こちら](https://github.com/tuna2134/sbv2-api/tree/v0.1.x)をご覧ください。
## プログラミングに詳しくない方向け
[こちら](https://github.com/tuna2134/sbv2-gui?tab=readme-ov-file)を参照してください。
@@ -27,7 +30,8 @@ JP-Extra しか対応していません。(基本的に対応する予定もあ
- [x] GPU 対応(DirectML)
- [x] GPU 対応(CoreML)
- [ ] WASM 変換(依存ライブラリの関係により現在は不可)
- [ ] arm64のdockerサポート
- [x] arm64のdockerサポート
- [ ] MeCabを利用する
## 構造説明

View File

@@ -1 +1,14 @@
10,000年前までコロナが流行っていました
悪徳貴族として名高いヴェレット家の長男――オウガ・ヴェレットは転生者である。
ブラック企業に勤め、過労死した彼には一つの夢があった。
「可愛いハーレム作って、美味い物を食べる。領民の税金で楽して好き放題な生活を送ってみせる!」
素晴らしき異世界ライフを夢見た彼は実現へ向けて、努力を始めた。
ハーレムを築くためにいじめられてる平民の子を助けて恩を売ってやったり。
労働力を手に入れるために多くの孤児を雇って教育したり。
反乱を起きても鎮圧できるように魔法学院へ通って魔法を極める。
「クックック……! 順調、順調! 未来は明るいなぁ!」
――オウガはまだ知らない。
楽な生活を送るためにしてきたことが評価され、世間から『聖者』様として呼ばれる未来を。

View File

@@ -94,7 +94,7 @@ model = get_net_g(
)
def forward(x, x_len, sid, tone, lang, bert, style, length_scale, sdp_ratio):
def forward(x, x_len, sid, tone, lang, bert, style, length_scale, sdp_ratio, noise_scale, noise_scale_w):
return model.infer(
x,
x_len,
@@ -105,6 +105,8 @@ def forward(x, x_len, sid, tone, lang, bert, style, length_scale, sdp_ratio):
style,
sdp_ratio=sdp_ratio,
length_scale=length_scale,
noise_scale=noise_scale,
noise_scale_w=noise_scale_w,
)
@@ -122,6 +124,8 @@ torch.onnx.export(
style_vec_tensor,
torch.tensor(1.0),
torch.tensor(0.0),
torch.tensor(0.6777),
torch.tensor(0.8),
),
f"../models/model_{out_name}.onnx",
verbose=True,
@@ -144,6 +148,8 @@ torch.onnx.export(
"style_vec",
"length_scale",
"sdp_ratio",
"noise_scale",
"noise_scale_w"
],
output_names=["output"],
)

View File

@@ -1,17 +1,19 @@
[package]
name = "sbv2_api"
version = "0.1.0"
version = "0.2.0-alpha4"
edition = "2021"
[dependencies]
anyhow.workspace = true
axum = "0.7.5"
axum = "0.8.0"
dotenvy.workspace = true
env_logger.workspace = true
log = "0.4.22"
sbv2_core = { version = "0.1.3", path = "../sbv2_core" }
sbv2_core = { version = "0.2.0-alpha2", path = "../sbv2_core", features = ["aivmx"] }
serde = { version = "1.0.210", features = ["derive"] }
tokio = { version = "1.40.0", features = ["full"] }
utoipa = { version = "5.0.0", features = ["axum_extras"] }
utoipa-scalar = { version = "0.3.0", features = ["axum"] }
[features]
coreml = ["sbv2_core/coreml"]
@@ -19,4 +21,4 @@ cuda = ["sbv2_core/cuda"]
cuda_tf32 = ["sbv2_core/cuda_tf32"]
dynamic = ["sbv2_core/dynamic"]
directml = ["sbv2_core/directml"]
tensorrt = ["sbv2_core/tensorrt"]
tensorrt = ["sbv2_core/tensorrt"]

5
sbv2_api/build.rs Normal file
View File

@@ -0,0 +1,5 @@
fn main() {
if cfg!(feature = "coreml") {
println!("cargo:rustc-link-arg=-fapple-link-rtlib");
}
}

View File

@@ -11,10 +11,23 @@ use std::env;
use std::sync::Arc;
use tokio::fs;
use tokio::sync::Mutex;
use utoipa::{OpenApi, ToSchema};
use utoipa_scalar::{Scalar, Servable};
mod error;
use crate::error::AppResult;
#[derive(OpenApi)]
#[openapi(paths(models, synthesize), components(schemas(SynthesizeRequest)))]
struct ApiDoc;
#[utoipa::path(
get,
path = "/models",
responses(
(status = 200, description = "Return model list", body = Vec<String>),
)
)]
async fn models(State(state): State<AppState>) -> AppResult<impl IntoResponse> {
Ok(Json(state.tts_model.lock().await.models()))
}
@@ -27,7 +40,15 @@ fn length_default() -> f32 {
1.0
}
#[derive(Deserialize)]
fn style_id_default() -> i32 {
0
}
fn speaker_id_default() -> i64 {
0
}
#[derive(Deserialize, ToSchema)]
struct SynthesizeRequest {
text: String,
ident: String,
@@ -35,8 +56,20 @@ struct SynthesizeRequest {
sdp_ratio: f32,
#[serde(default = "length_default")]
length_scale: f32,
#[serde(default = "style_id_default")]
style_id: i32,
#[serde(default = "speaker_id_default")]
speaker_id: i64,
}
#[utoipa::path(
post,
path = "/synthesize",
request_body = SynthesizeRequest,
responses(
(status = 200, description = "Return audio/wav", body = Vec<u8>, content_type = "audio/wav")
)
)]
async fn synthesize(
State(state): State<AppState>,
Json(SynthesizeRequest {
@@ -44,15 +77,18 @@ async fn synthesize(
ident,
sdp_ratio,
length_scale,
style_id,
speaker_id,
}): Json<SynthesizeRequest>,
) -> AppResult<impl IntoResponse> {
log::debug!("processing request: text={text}, ident={ident}, sdp_ratio={sdp_ratio}, length_scale={length_scale}");
let buffer = {
let tts_model = state.tts_model.lock().await;
let mut tts_model = state.tts_model.lock().await;
tts_model.easy_synthesize(
&ident,
&text,
0,
style_id,
speaker_id,
SynthesizeOptions {
sdp_ratio,
length_scale,
@@ -73,6 +109,9 @@ impl AppState {
let mut tts_model = TTSModelHolder::new(
&fs::read(env::var("BERT_MODEL_PATH")?).await?,
&fs::read(env::var("TOKENIZER_PATH")?).await?,
env::var("HOLDER_MAX_LOADED_MODElS")
.ok()
.and_then(|x| x.parse().ok()),
)?;
let models = env::var("MODELS_PATH").unwrap_or("models".to_string());
let mut f = fs::read_dir(&models).await?;
@@ -101,6 +140,20 @@ impl AppState {
log::warn!("Error loading {entry}: {e}");
};
log::info!("Loaded: {entry}");
} else if name.ends_with(".aivmx") {
let entry = &name[..name.len() - 6];
log::info!("Try loading: {entry}");
let aivmx_bytes = match fs::read(format!("{models}/{entry}.aivmx")).await {
Ok(b) => b,
Err(e) => {
log::warn!("Error loading aivmx bytes from file {entry}: {e}");
continue;
}
};
if let Err(e) = tts_model.load_aivmx(entry, aivmx_bytes) {
log::error!("Error loading {entry}: {e}");
}
log::info!("Loaded: {entry}");
}
}
for entry in entries {
@@ -139,7 +192,8 @@ async fn main() -> anyhow::Result<()> {
.route("/", get(|| async { "Hello, World!" }))
.route("/synthesize", post(synthesize))
.route("/models", get(models))
.with_state(AppState::new().await?);
.with_state(AppState::new().await?)
.merge(Scalar::with_url("/docs", ApiDoc::openapi()));
let addr = env::var("ADDR").unwrap_or("0.0.0.0:3000".to_string());
let listener = tokio::net::TcpListener::bind(&addr).await?;
log::info!("Listening on {addr}");

View File

@@ -1,6 +1,6 @@
[package]
name = "sbv2_bindings"
version = "0.1.1"
version = "0.2.0-alpha4"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -11,5 +11,5 @@ crate-type = ["cdylib"]
[dependencies]
anyhow.workspace = true
ndarray.workspace = true
pyo3 = { version = "0.22.0", features = ["anyhow"] }
sbv2_core = { version = "0.1.4", path = "../sbv2_core" }
pyo3 = { version = "0.23.0", features = ["anyhow"] }
sbv2_core = { version = "0.2.0-alpha2", path = "../sbv2_core" }

View File

@@ -11,5 +11,7 @@ classifiers = [
"Programming Language :: Python :: Implementation :: PyPy",
]
dynamic = ["version"]
[tool.maturin]
features = ["pyo3/extension-module"]
strip = true

View File

@@ -1,6 +1,6 @@
use pyo3::prelude::*;
use pyo3::types::PyBytes;
use sbv2_core::tts::{TTSModelHolder, SynthesizeOptions};
use sbv2_core::tts::{SynthesizeOptions, TTSModelHolder};
use crate::style::StyleVector;
@@ -23,10 +23,15 @@ pub struct TTSModel {
#[pymethods]
impl TTSModel {
#[pyo3(signature = (bert_model_bytes, tokenizer_bytes, max_loaded_models=None))]
#[new]
fn new(bert_model_bytes: Vec<u8>, tokenizer_bytes: Vec<u8>) -> anyhow::Result<Self> {
fn new(
bert_model_bytes: Vec<u8>,
tokenizer_bytes: Vec<u8>,
max_loaded_models: Option<usize>,
) -> anyhow::Result<Self> {
Ok(Self {
model: TTSModelHolder::new(bert_model_bytes, tokenizer_bytes)?,
model: TTSModelHolder::new(bert_model_bytes, tokenizer_bytes, max_loaded_models)?,
})
}
@@ -38,10 +43,21 @@ impl TTSModel {
/// BERTモデルのパス
/// tokenizer_path : str
/// トークナイザーのパス
/// max_loaded_models: int | None
/// 同時にVRAMに存在するモデルの数
#[pyo3(signature = (bert_model_path, tokenizer_path, max_loaded_models=None))]
#[staticmethod]
fn from_path(bert_model_path: String, tokenizer_path: String) -> anyhow::Result<Self> {
fn from_path(
bert_model_path: String,
tokenizer_path: String,
max_loaded_models: Option<usize>,
) -> anyhow::Result<Self> {
Ok(Self {
model: TTSModelHolder::new(fs::read(bert_model_path)?, fs::read(tokenizer_path)?)?,
model: TTSModelHolder::new(
fs::read(bert_model_path)?,
fs::read(tokenizer_path)?,
max_loaded_models,
)?,
})
}
@@ -121,11 +137,12 @@ impl TTSModel {
/// voice_data : bytes
/// 音声データ
fn synthesize<'p>(
&'p self,
&'p mut self,
py: Python<'p>,
text: String,
ident: String,
style_id: i32,
speaker_id: i64,
sdp_ratio: f32,
length_scale: f32,
) -> anyhow::Result<Bound<PyBytes>> {
@@ -133,13 +150,14 @@ impl TTSModel {
ident.as_str(),
&text,
style_id,
speaker_id,
SynthesizeOptions {
sdp_ratio,
length_scale,
..Default::default()
},
)?;
Ok(PyBytes::new_bound(py, &data))
Ok(PyBytes::new(py, &data))
}
fn unload(&mut self, ident: String) -> bool {

View File

@@ -1,7 +1,7 @@
[package]
name = "sbv2_core"
description = "Style-Bert-VITSの推論ライブラリ"
version = "0.1.4"
version = "0.2.0-alpha4"
edition = "2021"
license = "MIT"
readme = "../README.md"
@@ -10,26 +10,33 @@ documentation = "https://docs.rs/sbv2_core"
[dependencies]
anyhow.workspace = true
base64 = { version = "0.22.1", optional = true }
dotenvy.workspace = true
env_logger.workspace = true
hound = "3.5.1"
jpreprocess = { version = "0.10.0", features = ["naist-jdic"] }
ndarray.workspace = true
npyz = { version = "0.8.3", optional = true }
num_cpus = "1.16.0"
once_cell = "1.19.0"
ort = { git = "https://github.com/pykeio/ort.git", version = "2.0.0-rc.6" }
once_cell.workspace = true
ort = { git = "https://github.com/pykeio/ort.git", version = "2.0.0-rc.8", optional = true }
regex = "1.10.6"
serde = { version = "1.0.210", features = ["derive"] }
serde_json = "1.0.128"
tar = "0.4.41"
thiserror = "1.0.63"
tokenizers = "0.20.0"
tokenizers = { version = "0.21.0", default-features = false }
zstd = "0.13.2"
[features]
cuda = ["ort/cuda"]
cuda_tf32 = []
dynamic = ["ort/load-dynamic"]
directml = ["ort/directml"]
tensorrt = ["ort/tensorrt"]
coreml = ["ort/coreml"]
cuda = ["ort/cuda", "std"]
cuda_tf32 = ["std", "cuda"]
std = ["dep:ort", "tokenizers/progressbar", "tokenizers/onig", "tokenizers/esaxx_fast"]
dynamic = ["ort/load-dynamic", "std"]
directml = ["ort/directml", "std"]
tensorrt = ["ort/tensorrt", "std"]
coreml = ["ort/coreml", "std"]
default = ["std"]
no_std = ["tokenizers/unstable_wasm"]
aivmx = ["npyz", "base64"]
base64 = ["dep:base64"]

View File

@@ -1,5 +1,5 @@
use crate::error::Result;
use ndarray::Array2;
use ndarray::{Array2, Ix2};
use ort::Session;
pub fn predict(
@@ -14,10 +14,10 @@ pub fn predict(
}?
)?;
let output = outputs.get("output").unwrap();
let output = outputs["output"]
.try_extract_tensor::<f32>()?
.into_dimensionality::<Ix2>()?
.to_owned();
let content = output.try_extract_tensor::<f32>()?.to_owned();
let (data, _) = content.clone().into_raw_vec_and_offset();
Ok(Array2::from_shape_vec((content.shape()[0], content.shape()[1]), data).unwrap())
Ok(output)
}

View File

@@ -6,6 +6,7 @@ pub enum Error {
TokenizerError(#[from] tokenizers::Error),
#[error("JPreprocess error: {0}")]
JPreprocessError(#[from] jpreprocess::error::JPreprocessError),
#[cfg(feature = "std")]
#[error("ONNX error: {0}")]
OrtError(#[from] ort::Error),
#[error("NDArray error: {0}")]
@@ -20,6 +21,11 @@ pub enum Error {
HoundError(#[from] hound::Error),
#[error("model not found error")]
ModelNotFoundError(String),
#[cfg(feature = "base64")]
#[error("base64 error")]
Base64Error(#[from] base64::DecodeError),
#[error("other")]
OtherError(String),
}
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -19,21 +19,6 @@ fn initialize_jtalk() -> Result<JPreprocessType> {
Ok(jpreprocess)
}
static JTALK_G2P_G_A1_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"/A:([0-9\-]+)\+").unwrap());
static JTALK_G2P_G_A2_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"\+(\d+)\+").unwrap());
static JTALK_G2P_G_A3_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"\+(\d+)/").unwrap());
static JTALK_G2P_G_E3_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"!(\d+)_").unwrap());
static JTALK_G2P_G_F1_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"/F:(\d+)_").unwrap());
static JTALK_G2P_G_P3_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"\-(.*?)\+").unwrap());
fn numeric_feature_by_regex(regex: &Regex, text: &str) -> i32 {
if let Some(mat) = regex.captures(text) {
mat[1].parse::<i32>().unwrap()
} else {
-50
}
}
macro_rules! hash_set {
($($elem:expr),* $(,)?) => {{
let mut set = HashSet::new();
@@ -239,7 +224,10 @@ impl JTalkProcess {
}
fn kata_to_phoneme_list(mut text: String) -> Result<Vec<String>> {
if PUNCTUATIONS.contains(&text.as_str()) {
let chars: HashSet<String> = text.chars().map(|x| x.to_string()).collect();
if chars.is_subset(&HashSet::from_iter(
PUNCTUATIONS.iter().map(|x| x.to_string()),
)) {
return Ok(text.chars().map(|x| x.to_string()).collect());
}
if !KATAKANA_PATTERN.is_match(&text) {
@@ -351,11 +339,7 @@ impl JTalkProcess {
let mut phones: Vec<String> = Vec::new();
for (i, label) in labels.iter().enumerate() {
let mut p3 = {
let label_text = label.to_string();
let mattched = JTALK_G2P_G_P3_PATTERN.captures(&label_text).unwrap();
mattched[1].to_string()
};
let mut p3 = label.phoneme.c.clone().unwrap();
if "AIUEO".contains(&p3) {
// 文字をlowerする
p3 = p3.to_lowercase();
@@ -365,10 +349,10 @@ impl JTalkProcess {
if i == 0 {
phones.push("^".to_string());
} else if i == labels.len() - 1 {
let e3 = numeric_feature_by_regex(&JTALK_G2P_G_E3_PATTERN, &label.to_string());
if e3 == 0 {
let e3 = label.accent_phrase_prev.clone().unwrap().is_interrogative;
if e3 {
phones.push("$".to_string());
} else if e3 == 1 {
} else {
phones.push("?".to_string());
}
}
@@ -380,14 +364,33 @@ impl JTalkProcess {
phones.push(p3.clone());
}
let a1 = numeric_feature_by_regex(&JTALK_G2P_G_A1_PATTERN, &label.to_string());
let a2 = numeric_feature_by_regex(&JTALK_G2P_G_A2_PATTERN, &label.to_string());
let a3 = numeric_feature_by_regex(&JTALK_G2P_G_A3_PATTERN, &label.to_string());
let a1 = if let Some(mora) = &label.mora {
mora.relative_accent_position as i32
} else {
-50
};
let a2 = if let Some(mora) = &label.mora {
mora.position_forward as i32
} else {
-50
};
let a3 = if let Some(mora) = &label.mora {
mora.position_backward as i32
} else {
-50
};
let f1 = numeric_feature_by_regex(&JTALK_G2P_G_F1_PATTERN, &label.to_string());
let f1 = if let Some(accent_phrase) = &label.accent_phrase_curr {
accent_phrase.mora_count as i32
} else {
-50
};
let a2_next =
numeric_feature_by_regex(&JTALK_G2P_G_A2_PATTERN, &labels[i + 1].to_string());
let a2_next = if let Some(mora) = &labels[i + 1].mora {
mora.position_forward as i32
} else {
-50
};
if a3 == 1 && a2_next == 1 && "aeiouAEIOUNcl".contains(&p3) {
phones.push("#".to_string());

View File

@@ -1,11 +1,16 @@
#[cfg(feature = "std")]
pub mod bert;
pub mod error;
pub mod jtalk;
#[cfg(feature = "std")]
pub mod model;
pub mod mora;
pub mod nlp;
pub mod norm;
pub mod sbv2file;
pub mod style;
pub mod tokenizer;
#[cfg(feature = "std")]
pub mod tts;
pub mod tts_util;
pub mod utils;

View File

@@ -1,9 +1,9 @@
use std::env;
use std::fs;
use sbv2_core::tts;
use std::env;
fn main() -> anyhow::Result<()> {
#[cfg(feature = "std")]
fn main_inner() -> anyhow::Result<()> {
use sbv2_core::tts;
dotenvy::dotenv_override().ok();
env_logger::init();
let text = fs::read_to_string("content.txt")?;
@@ -11,11 +11,33 @@ fn main() -> anyhow::Result<()> {
let mut tts_holder = tts::TTSModelHolder::new(
&fs::read(env::var("BERT_MODEL_PATH")?)?,
&fs::read(env::var("TOKENIZER_PATH")?)?,
env::var("HOLDER_MAX_LOADED_MODElS")
.ok()
.and_then(|x| x.parse().ok()),
)?;
tts_holder.load_sbv2file(ident, fs::read(env::var("MODEL_PATH")?)?)?;
#[cfg(not(feature = "aivmx"))]
{
tts_holder.load_sbv2file(ident, fs::read(env::var("MODEL_PATH")?)?)?;
}
#[cfg(feature = "aivmx")]
{
tts_holder.load_aivmx(ident, fs::read(env::var("MODEL_PATH")?)?)?;
}
let audio = tts_holder.easy_synthesize(ident, &text, 0, tts::SynthesizeOptions::default())?;
let audio =
tts_holder.easy_synthesize(ident, &text, 0, 0, tts::SynthesizeOptions::default())?;
fs::write("output.wav", audio)?;
Ok(())
}
#[cfg(not(feature = "std"))]
fn main_inner() -> anyhow::Result<()> {
Ok(())
}
fn main() {
if let Err(e) = main_inner() {
println!("Error: {e}");
}
}

View File

@@ -1,5 +1,5 @@
use crate::error::Result;
use ndarray::{array, Array1, Array2, Array3, Axis};
use ndarray::{array, Array1, Array2, Array3, Axis, Ix3};
use ort::{GraphOptimizationLevel, Session};
#[allow(clippy::vec_init_then_push, unused_variables)]
@@ -52,11 +52,14 @@ pub fn synthesize(
session: &Session,
bert_ori: Array2<f32>,
x_tst: Array1<i64>,
sid: Array1<i64>,
tones: Array1<i64>,
lang_ids: Array1<i64>,
style_vector: Array1<f32>,
sdp_ratio: f32,
length_scale: f32,
noise_scale: f32,
noise_scale_w: f32,
) -> Result<Array3<f32>> {
let bert = bert_ori.insert_axis(Axis(0));
let x_tst_lengths: Array1<i64> = array![x_tst.shape()[0] as i64];
@@ -67,27 +70,21 @@ pub fn synthesize(
let outputs = session.run(ort::inputs! {
"x_tst" => x_tst,
"x_tst_lengths" => x_tst_lengths,
"sid" => array![0_i64],
"sid" => sid,
"tones" => tones,
"language" => lang_ids,
"bert" => bert,
"style_vec" => style_vector,
"sdp_ratio" => array![sdp_ratio],
"length_scale" => array![length_scale],
"noise_scale" => array![noise_scale],
"noise_scale_w" => array![noise_scale_w]
}?)?;
let audio_array = outputs
.get("output")
.unwrap()
let audio_array = outputs["output"]
.try_extract_tensor::<f32>()?
.into_dimensionality::<Ix3>()?
.to_owned();
Ok(Array3::from_shape_vec(
(
audio_array.shape()[0],
audio_array.shape()[1],
audio_array.shape()[2],
),
audio_array.into_raw_vec_and_offset().0,
)?)
Ok(audio_array)
}

37
sbv2_core/src/sbv2file.rs Normal file
View File

@@ -0,0 +1,37 @@
use std::io::{Cursor, Read};
use tar::Archive;
use zstd::decode_all;
use crate::error::{Error, Result};
/// Parse a .sbv2 file binary
///
/// # Examples
///
/// ```rs
/// parse_sbv2file("tsukuyomi", std::fs::read("tsukuyomi.sbv2")?)?;
/// ```
pub fn parse_sbv2file<P: AsRef<[u8]>>(sbv2_bytes: P) -> Result<(Vec<u8>, Vec<u8>)> {
let mut arc = Archive::new(Cursor::new(decode_all(Cursor::new(sbv2_bytes.as_ref()))?));
let mut vits2 = None;
let mut style_vectors = None;
let mut et = arc.entries()?;
while let Some(Ok(mut e)) = et.next() {
let pth = String::from_utf8_lossy(&e.path_bytes()).to_string();
let mut b = Vec::with_capacity(e.size() as usize);
e.read_to_end(&mut b)?;
match pth.as_str() {
"model.onnx" => vits2 = Some(b),
"style_vectors.json" => style_vectors = Some(b),
_ => continue,
}
}
if style_vectors.is_none() {
return Err(Error::ModelNotFoundError("style_vectors".to_string()));
}
if vits2.is_none() {
return Err(Error::ModelNotFoundError("vits2".to_string()));
}
Ok((style_vectors.unwrap(), vits2.unwrap()))
}

View File

@@ -1,5 +1,5 @@
use crate::error::Result;
use tokenizers::Tokenizer;
pub use tokenizers::Tokenizer;
pub fn get_tokenizer<P: AsRef<[u8]>>(p: P) -> Result<Tokenizer> {
let tokenizer = Tokenizer::from_bytes(p)?;

View File

@@ -1,12 +1,14 @@
use crate::error::{Error, Result};
use crate::{bert, jtalk, model, nlp, norm, style, tokenizer, utils};
use hound::{SampleFormat, WavSpec, WavWriter};
use ndarray::{concatenate, s, Array, Array1, Array2, Array3, Axis};
use crate::{jtalk, model, style, tokenizer, tts_util};
#[cfg(feature = "aivmx")]
use base64::prelude::{Engine as _, BASE64_STANDARD};
#[cfg(feature = "aivmx")]
use ndarray::ShapeBuilder;
use ndarray::{concatenate, Array1, Array2, Array3, Axis};
use ort::Session;
use std::io::{Cursor, Read};
use tar::Archive;
#[cfg(feature = "aivmx")]
use std::io::Cursor;
use tokenizers::Tokenizer;
use zstd::decode_all;
#[derive(PartialEq, Eq, Clone)]
pub struct TTSIdent(String);
@@ -28,9 +30,10 @@ where
}
pub struct TTSModel {
vits2: Session,
vits2: Option<Session>,
style_vectors: Array2<f32>,
ident: TTSIdent,
bytes: Option<Vec<u8>>,
}
/// High-level Style-Bert-VITS2's API
@@ -39,6 +42,7 @@ pub struct TTSModelHolder {
bert: Session,
models: Vec<TTSModel>,
jtalk: jtalk::JTalk,
max_loaded_models: Option<usize>,
}
impl TTSModelHolder {
@@ -47,9 +51,13 @@ impl TTSModelHolder {
/// # Examples
///
/// ```rs
/// let mut tts_holder = TTSModelHolder::new(std::fs::read("deberta.onnx")?, std::fs::read("tokenizer.json")?)?;
/// let mut tts_holder = TTSModelHolder::new(std::fs::read("deberta.onnx")?, std::fs::read("tokenizer.json")?, None)?;
/// ```
pub fn new<P: AsRef<[u8]>>(bert_model_bytes: P, tokenizer_bytes: P) -> Result<Self> {
pub fn new<P: AsRef<[u8]>>(
bert_model_bytes: P,
tokenizer_bytes: P,
max_loaded_models: Option<usize>,
) -> Result<Self> {
let bert = model::load_model(bert_model_bytes, true)?;
let jtalk = jtalk::JTalk::new()?;
let tokenizer = tokenizer::get_tokenizer(tokenizer_bytes)?;
@@ -58,6 +66,7 @@ impl TTSModelHolder {
models: vec![],
jtalk,
tokenizer,
max_loaded_models,
})
}
@@ -66,6 +75,53 @@ impl TTSModelHolder {
self.models.iter().map(|m| m.ident.to_string()).collect()
}
#[cfg(feature = "aivmx")]
pub fn load_aivmx<I: Into<TTSIdent>, P: AsRef<[u8]>>(
&mut self,
ident: I,
aivmx_bytes: P,
) -> Result<()> {
let ident = ident.into();
if self.find_model(ident.clone()).is_err() {
let mut load = true;
if let Some(max) = self.max_loaded_models {
if self.models.iter().filter(|x| x.vits2.is_some()).count() >= max {
load = false;
}
}
let model = model::load_model(&aivmx_bytes, false)?;
let metadata = model.metadata()?;
if let Some(aivm_style_vectors) = metadata.custom("aivm_style_vectors")? {
let aivm_style_vectors = BASE64_STANDARD.decode(aivm_style_vectors)?;
let style_vectors = Cursor::new(&aivm_style_vectors);
let reader = npyz::NpyFile::new(style_vectors)?;
let style_vectors = {
let shape = reader.shape().to_vec();
let order = reader.order();
let data = reader.into_vec::<f32>()?;
let shape = match shape[..] {
[i1, i2] => [i1 as usize, i2 as usize],
_ => panic!("expected 2D array"),
};
let true_shape = shape.set_f(order == npyz::Order::Fortran);
ndarray::Array2::from_shape_vec(true_shape, data)?
};
drop(metadata);
self.models.push(TTSModel {
vits2: if load { Some(model) } else { None },
bytes: if self.max_loaded_models.is_some() {
Some(aivmx_bytes.as_ref().to_vec())
} else {
None
},
ident,
style_vectors,
})
}
}
Ok(())
}
/// Load a .sbv2 file binary
///
/// # Examples
@@ -78,27 +134,8 @@ impl TTSModelHolder {
ident: I,
sbv2_bytes: P,
) -> Result<()> {
let mut arc = Archive::new(Cursor::new(decode_all(Cursor::new(sbv2_bytes.as_ref()))?));
let mut vits2 = None;
let mut style_vectors = None;
let mut et = arc.entries()?;
while let Some(Ok(mut e)) = et.next() {
let pth = String::from_utf8_lossy(&e.path_bytes()).to_string();
let mut b = Vec::with_capacity(e.size() as usize);
e.read_to_end(&mut b)?;
match pth.as_str() {
"model.onnx" => vits2 = Some(b),
"style_vectors.json" => style_vectors = Some(b),
_ => continue,
}
}
if style_vectors.is_none() {
return Err(Error::ModelNotFoundError("style_vectors".to_string()));
}
if vits2.is_none() {
return Err(Error::ModelNotFoundError("vits2".to_string()));
}
self.load(ident, style_vectors.unwrap(), vits2.unwrap())?;
let (style_vectors, vits2) = crate::sbv2file::parse_sbv2file(sbv2_bytes)?;
self.load(ident, style_vectors, vits2)?;
Ok(())
}
@@ -117,10 +154,25 @@ impl TTSModelHolder {
) -> Result<()> {
let ident = ident.into();
if self.find_model(ident.clone()).is_err() {
let mut load = true;
if let Some(max) = self.max_loaded_models {
if self.models.iter().filter(|x| x.vits2.is_some()).count() >= max {
load = false;
}
}
self.models.push(TTSModel {
vits2: model::load_model(vits2_bytes, false)?,
vits2: if load {
Some(model::load_model(&vits2_bytes, false)?)
} else {
None
},
style_vectors: style::load_style(style_vectors_bytes)?,
ident,
bytes: if self.max_loaded_models.is_some() {
Some(vits2_bytes.as_ref().to_vec())
} else {
None
},
})
}
Ok(())
@@ -151,69 +203,14 @@ impl TTSModelHolder {
&self,
text: &str,
) -> Result<(Array2<f32>, Array1<i64>, Array1<i64>, Array1<i64>)> {
let text = self.jtalk.num2word(text)?;
let normalized_text = norm::normalize_text(&text);
let process = self.jtalk.process_text(&normalized_text)?;
let (phones, tones, mut word2ph) = process.g2p()?;
let (phones, tones, lang_ids) = nlp::cleaned_text_to_sequence(phones, tones);
let phones = utils::intersperse(&phones, 0);
let tones = utils::intersperse(&tones, 0);
let lang_ids = utils::intersperse(&lang_ids, 0);
for item in &mut word2ph {
*item *= 2;
}
word2ph[0] += 1;
let text = {
let (seq_text, _) = process.text_to_seq_kata()?;
seq_text.join("")
};
let (token_ids, attention_masks) = tokenizer::tokenize(&text, &self.tokenizer)?;
let bert_content = bert::predict(&self.bert, token_ids, attention_masks)?;
assert!(
word2ph.len() == text.chars().count() + 2,
"{} {}",
word2ph.len(),
normalized_text.chars().count()
);
let mut phone_level_feature = vec![];
for (i, reps) in word2ph.iter().enumerate() {
let repeat_feature = {
let (reps_rows, reps_cols) = (*reps, 1);
let arr_len = bert_content.slice(s![i, ..]).len();
let mut results: Array2<f32> =
Array::zeros((reps_rows as usize, arr_len * reps_cols));
for j in 0..reps_rows {
for k in 0..reps_cols {
let mut view = results.slice_mut(s![j, k * arr_len..(k + 1) * arr_len]);
view.assign(&bert_content.slice(s![i, ..]));
}
}
results
};
phone_level_feature.push(repeat_feature);
}
let phone_level_feature = concatenate(
Axis(0),
&phone_level_feature
.iter()
.map(|x| x.view())
.collect::<Vec<_>>(),
)?;
let bert_ori = phone_level_feature.t();
Ok((
bert_ori.to_owned(),
phones.into(),
tones.into(),
lang_ids.into(),
))
crate::tts_util::parse_text_blocking(
text,
&self.jtalk,
&self.tokenizer,
|token_ids, attention_masks| {
crate::bert::predict(&self.bert, token_ids, attention_masks)
},
)
}
fn find_model<I: Into<TTSIdent>>(&self, ident: I) -> Result<&TTSModel> {
@@ -223,6 +220,42 @@ impl TTSModelHolder {
.find(|m| m.ident == ident)
.ok_or(Error::ModelNotFoundError(ident.to_string()))
}
fn find_and_load_model<I: Into<TTSIdent>>(&mut self, ident: I) -> Result<bool> {
let ident = ident.into();
let (bytes, style_vectors) = {
let model = self
.models
.iter()
.find(|m| m.ident == ident)
.ok_or(Error::ModelNotFoundError(ident.to_string()))?;
if model.vits2.is_some() {
return Ok(true);
}
(model.bytes.clone().unwrap(), model.style_vectors.clone())
};
self.unload(ident.clone());
let s = model::load_model(&bytes, false)?;
if let Some(max) = self.max_loaded_models {
if self.models.iter().filter(|x| x.vits2.is_some()).count() >= max {
self.unload(self.models.first().unwrap().ident.clone());
}
}
self.models.push(TTSModel {
bytes: Some(bytes.to_vec()),
vits2: Some(s),
style_vectors,
ident: ident.clone(),
});
let model = self
.models
.iter()
.find(|m| m.ident == ident)
.ok_or(Error::ModelNotFoundError(ident.to_string()))?;
if model.vits2.is_some() {
return Ok(true);
}
Err(Error::ModelNotFoundError(ident.to_string()))
}
/// Get style vector by style id and weight
///
@@ -245,12 +278,19 @@ impl TTSModelHolder {
/// let audio = tts_holder.easy_synthesize("tsukuyomi", "こんにちは", 0, SynthesizeOptions::default())?;
/// ```
pub fn easy_synthesize<I: Into<TTSIdent> + Copy>(
&self,
&mut self,
ident: I,
text: &str,
style_id: i32,
speaker_id: i64,
options: SynthesizeOptions,
) -> Result<Vec<u8>> {
self.find_and_load_model(ident)?;
let vits2 = &self
.find_model(ident)?
.vits2
.as_ref()
.ok_or(Error::ModelNotFoundError(ident.into().to_string()))?;
let style_vector = self.get_style_vector(ident, style_id, options.style_weight)?;
let audio_array = if options.split_sentences {
let texts: Vec<&str> = text.split('\n').collect();
@@ -261,14 +301,17 @@ impl TTSModelHolder {
}
let (bert_ori, phones, tones, lang_ids) = self.parse_text(t)?;
let audio = model::synthesize(
&self.find_model(ident)?.vits2,
vits2,
bert_ori.to_owned(),
phones,
Array1::from_vec(vec![speaker_id]),
tones,
lang_ids,
style_vector.clone(),
options.sdp_ratio,
options.length_scale,
0.677,
0.8,
)?;
audios.push(audio.clone());
if i != texts.len() - 1 {
@@ -282,65 +325,20 @@ impl TTSModelHolder {
} else {
let (bert_ori, phones, tones, lang_ids) = self.parse_text(text)?;
model::synthesize(
&self.find_model(ident)?.vits2,
vits2,
bert_ori.to_owned(),
phones,
Array1::from_vec(vec![speaker_id]),
tones,
lang_ids,
style_vector,
options.sdp_ratio,
options.length_scale,
0.677,
0.8,
)?
};
Self::array_to_vec(audio_array)
}
fn array_to_vec(audio_array: Array3<f32>) -> Result<Vec<u8>> {
let spec = WavSpec {
channels: 1,
sample_rate: 44100,
bits_per_sample: 32,
sample_format: SampleFormat::Float,
};
let mut cursor = Cursor::new(Vec::new());
let mut writer = WavWriter::new(&mut cursor, spec)?;
for i in 0..audio_array.shape()[0] {
let output = audio_array.slice(s![i, 0, ..]).to_vec();
for sample in output {
writer.write_sample(sample)?;
}
}
writer.finalize()?;
Ok(cursor.into_inner())
}
/// Synthesize text to audio
///
/// # Note
/// This function is for low-level usage, use `easy_synthesize` for high-level usage.
#[allow(clippy::too_many_arguments)]
pub fn synthesize<I: Into<TTSIdent>>(
&self,
ident: I,
bert_ori: Array2<f32>,
phones: Array1<i64>,
tones: Array1<i64>,
lang_ids: Array1<i64>,
style_vector: Array1<f32>,
sdp_ratio: f32,
length_scale: f32,
) -> Result<Vec<u8>> {
let audio_array = model::synthesize(
&self.find_model(ident)?.vits2,
bert_ori.to_owned(),
phones,
tones,
lang_ids,
style_vector,
sdp_ratio,
length_scale,
)?;
Self::array_to_vec(audio_array)
tts_util::array_to_vec(audio_array)
}
}

180
sbv2_core/src/tts_util.rs Normal file
View File

@@ -0,0 +1,180 @@
use std::io::Cursor;
use crate::error::Result;
use crate::{jtalk, nlp, norm, tokenizer, utils};
use hound::{SampleFormat, WavSpec, WavWriter};
use ndarray::{concatenate, s, Array, Array1, Array2, Array3, Axis};
use tokenizers::Tokenizer;
/// Parse text and return the input for synthesize
///
/// # Note
/// This function is for low-level usage, use `easy_synthesize` for high-level usage.
#[allow(clippy::type_complexity)]
pub async fn parse_text(
text: &str,
jtalk: &jtalk::JTalk,
tokenizer: &Tokenizer,
bert_predict: impl FnOnce(
Vec<i64>,
Vec<i64>,
) -> std::pin::Pin<
Box<dyn std::future::Future<Output = Result<ndarray::Array2<f32>>>>,
>,
) -> Result<(Array2<f32>, Array1<i64>, Array1<i64>, Array1<i64>)> {
let text = jtalk.num2word(text)?;
let normalized_text = norm::normalize_text(&text);
let process = jtalk.process_text(&normalized_text)?;
let (phones, tones, mut word2ph) = process.g2p()?;
let (phones, tones, lang_ids) = nlp::cleaned_text_to_sequence(phones, tones);
let phones = utils::intersperse(&phones, 0);
let tones = utils::intersperse(&tones, 0);
let lang_ids = utils::intersperse(&lang_ids, 0);
for item in &mut word2ph {
*item *= 2;
}
word2ph[0] += 1;
let text = {
let (seq_text, _) = process.text_to_seq_kata()?;
seq_text.join("")
};
let (token_ids, attention_masks) = tokenizer::tokenize(&text, tokenizer)?;
let bert_content = bert_predict(token_ids, attention_masks).await?;
assert!(
word2ph.len() == text.chars().count() + 2,
"{} {}",
word2ph.len(),
normalized_text.chars().count()
);
let mut phone_level_feature = vec![];
for (i, reps) in word2ph.iter().enumerate() {
let repeat_feature = {
let (reps_rows, reps_cols) = (*reps, 1);
let arr_len = bert_content.slice(s![i, ..]).len();
let mut results: Array2<f32> = Array::zeros((reps_rows as usize, arr_len * reps_cols));
for j in 0..reps_rows {
for k in 0..reps_cols {
let mut view = results.slice_mut(s![j, k * arr_len..(k + 1) * arr_len]);
view.assign(&bert_content.slice(s![i, ..]));
}
}
results
};
phone_level_feature.push(repeat_feature);
}
let phone_level_feature = concatenate(
Axis(0),
&phone_level_feature
.iter()
.map(|x| x.view())
.collect::<Vec<_>>(),
)?;
let bert_ori = phone_level_feature.t();
Ok((
bert_ori.to_owned(),
phones.into(),
tones.into(),
lang_ids.into(),
))
}
/// Parse text and return the input for synthesize
///
/// # Note
/// This function is for low-level usage, use `easy_synthesize` for high-level usage.
#[allow(clippy::type_complexity)]
pub fn parse_text_blocking(
text: &str,
jtalk: &jtalk::JTalk,
tokenizer: &Tokenizer,
bert_predict: impl FnOnce(Vec<i64>, Vec<i64>) -> Result<ndarray::Array2<f32>>,
) -> Result<(Array2<f32>, Array1<i64>, Array1<i64>, Array1<i64>)> {
let text = jtalk.num2word(text)?;
let normalized_text = norm::normalize_text(&text);
let process = jtalk.process_text(&normalized_text)?;
let (phones, tones, mut word2ph) = process.g2p()?;
let (phones, tones, lang_ids) = nlp::cleaned_text_to_sequence(phones, tones);
let phones = utils::intersperse(&phones, 0);
let tones = utils::intersperse(&tones, 0);
let lang_ids = utils::intersperse(&lang_ids, 0);
for item in &mut word2ph {
*item *= 2;
}
word2ph[0] += 1;
let text = {
let (seq_text, _) = process.text_to_seq_kata()?;
seq_text.join("")
};
let (token_ids, attention_masks) = tokenizer::tokenize(&text, tokenizer)?;
let bert_content = bert_predict(token_ids, attention_masks)?;
assert!(
word2ph.len() == text.chars().count() + 2,
"{} {}",
word2ph.len(),
normalized_text.chars().count()
);
let mut phone_level_feature = vec![];
for (i, reps) in word2ph.iter().enumerate() {
let repeat_feature = {
let (reps_rows, reps_cols) = (*reps, 1);
let arr_len = bert_content.slice(s![i, ..]).len();
let mut results: Array2<f32> = Array::zeros((reps_rows as usize, arr_len * reps_cols));
for j in 0..reps_rows {
for k in 0..reps_cols {
let mut view = results.slice_mut(s![j, k * arr_len..(k + 1) * arr_len]);
view.assign(&bert_content.slice(s![i, ..]));
}
}
results
};
phone_level_feature.push(repeat_feature);
}
let phone_level_feature = concatenate(
Axis(0),
&phone_level_feature
.iter()
.map(|x| x.view())
.collect::<Vec<_>>(),
)?;
let bert_ori = phone_level_feature.t();
Ok((
bert_ori.to_owned(),
phones.into(),
tones.into(),
lang_ids.into(),
))
}
pub fn array_to_vec(audio_array: Array3<f32>) -> Result<Vec<u8>> {
let spec = WavSpec {
channels: 1,
sample_rate: 44100,
bits_per_sample: 32,
sample_format: SampleFormat::Float,
};
let mut cursor = Cursor::new(Vec::new());
let mut writer = WavWriter::new(&mut cursor, spec)?;
for i in 0..audio_array.shape()[0] {
let output = audio_array.slice(s![i, 0, ..]).to_vec();
for sample in output {
writer.write_sample(sample)?;
}
}
writer.finalize()?;
Ok(cursor.into_inner())
}

19
sbv2_wasm/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "sbv2_wasm"
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
wasm-bindgen = "0.2.93"
sbv2_core = { path = "../sbv2_core", default-features = false, features = ["no_std"] }
once_cell.workspace = true
js-sys = "0.3.70"
ndarray.workspace = true
wasm-bindgen-futures = "0.4.43"
[profile.release]
lto = true
opt-level = "s"

2
sbv2_wasm/README.md Normal file
View File

@@ -0,0 +1,2 @@
# StyleBertVITS2 wasm
refer to https://github.com/tuna2134/sbv2-api

31
sbv2_wasm/biome.json Normal file
View File

@@ -0,0 +1,31 @@
{
"$schema": "https://biomejs.dev/schemas/1.9.2/schema.json",
"vcs": {
"enabled": false,
"clientKind": "git",
"useIgnoreFile": false
},
"files": {
"ignoreUnknown": false,
"ignore": []
},
"formatter": {
"enabled": true,
"indentStyle": "tab",
"ignore": ["dist/", "pkg/"]
},
"organizeImports": {
"enabled": true
},
"linter": {
"enabled": true,
"rules": {
"recommended": true
}
},
"javascript": {
"formatter": {
"quoteStyle": "double"
}
}
}

4
sbv2_wasm/build.sh Executable file
View File

@@ -0,0 +1,4 @@
wasm-pack build --target web sbv2_wasm
wasm-opt -O3 -o sbv2_wasm/pkg/sbv2_wasm_bg.wasm sbv2_wasm/pkg/sbv2_wasm_bg.wasm
mkdir -p sbv2_wasm/dist
cp sbv2_wasm/sbv2_wasm/pkg/sbv2_wasm_bg.wasm sbv2_wasm/dist/sbv2_wasm_bg.wasm

51
sbv2_wasm/example.html Normal file
View File

@@ -0,0 +1,51 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Style Bert VITS2 Web</title>
<script type="importmap">
{
"imports": {
"onnxruntime-web": "https://cdn.jsdelivr.net/npm/onnxruntime-web@1.19.2/dist/ort.all.min.mjs",
"sbv2": "https://cdn.jsdelivr.net/npm/sbv2@0.1.1+esm"
}
}
</script>
<script type="module" async defer>
import { ModelHolder } from "sbv2";
await ModelHolder.globalInit(
await (
await fetch("https://esm.sh/sbv2@0.1.1/dist/sbv2_wasm_bg.wasm", { cache: "force-cache" })
).arrayBuffer(),
);
const holder = await ModelHolder.create(
await (
await fetch("/models/tokenizer.json", { cache: "force-cache" })
).text(),
await (
await fetch("/models/deberta.onnx", { cache: "force-cache" })
).arrayBuffer(),
);
if (typeof window.onready == "function") {
window.onready(holder);
}
</script>
<script type="module" async defer>
window.onready = async function (holder) {
await holder.load(
"amitaro",
await (await fetch("/models/amitaro.sbv2")).arrayBuffer(),
);
const wave = await holder.synthesize("amitaro", "おはよう");
console.log(wave);
};
</script>
</head>
<body>
<div id="root"></div>
</body>
</html>

11
sbv2_wasm/example.js Normal file
View File

@@ -0,0 +1,11 @@
import { ModelHolder } from "./dist/index.js";
import fs from "node:fs/promises";
ModelHolder.globalInit(await fs.readFile("./dist/sbv2_wasm_bg.wasm"));
const holder = await ModelHolder.create(
(await fs.readFile("../models/tokenizer.json")).toString("utf-8"),
await fs.readFile("../models/deberta.onnx"),
);
await holder.load("tsukuyomi", await fs.readFile("../models/iroha2.sbv2"));
await fs.writeFile("out.wav", await holder.synthesize("tsukuyomi", "おはよう"));
holder.unload("tsukuyomi");

25
sbv2_wasm/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "sbv2",
"version": "0.1.1",
"description": "Style Bert VITS2 wasm",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc && esbuild src-js/index.ts --outfile=dist/index.js --minify --format=esm --bundle --external:onnxruntime-web",
"format": "biome format --write ."
},
"keywords": [],
"author": "tuna2134",
"license": "MIT",
"devDependencies": {
"@biomejs/biome": "^1.9.2",
"@types/node": "^22.7.4",
"esbuild": "^0.24.0",
"typescript": "^5.6.2"
},
"dependencies": {
"onnxruntime-web": "^1.19.2"
},
"files": ["dist/*", "package.json", "README.md"]
}

494
sbv2_wasm/pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,494 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
onnxruntime-web:
specifier: ^1.19.2
version: 1.20.0
devDependencies:
'@biomejs/biome':
specifier: ^1.9.2
version: 1.9.4
'@types/node':
specifier: ^22.7.4
version: 22.8.0
esbuild:
specifier: ^0.24.0
version: 0.24.0
typescript:
specifier: ^5.6.2
version: 5.6.3
packages:
'@biomejs/biome@1.9.4':
resolution: {integrity: sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog==}
engines: {node: '>=14.21.3'}
hasBin: true
'@biomejs/cli-darwin-arm64@1.9.4':
resolution: {integrity: sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw==}
engines: {node: '>=14.21.3'}
cpu: [arm64]
os: [darwin]
'@biomejs/cli-darwin-x64@1.9.4':
resolution: {integrity: sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg==}
engines: {node: '>=14.21.3'}
cpu: [x64]
os: [darwin]
'@biomejs/cli-linux-arm64-musl@1.9.4':
resolution: {integrity: sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA==}
engines: {node: '>=14.21.3'}
cpu: [arm64]
os: [linux]
'@biomejs/cli-linux-arm64@1.9.4':
resolution: {integrity: sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g==}
engines: {node: '>=14.21.3'}
cpu: [arm64]
os: [linux]
'@biomejs/cli-linux-x64-musl@1.9.4':
resolution: {integrity: sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg==}
engines: {node: '>=14.21.3'}
cpu: [x64]
os: [linux]
'@biomejs/cli-linux-x64@1.9.4':
resolution: {integrity: sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg==}
engines: {node: '>=14.21.3'}
cpu: [x64]
os: [linux]
'@biomejs/cli-win32-arm64@1.9.4':
resolution: {integrity: sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg==}
engines: {node: '>=14.21.3'}
cpu: [arm64]
os: [win32]
'@biomejs/cli-win32-x64@1.9.4':
resolution: {integrity: sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA==}
engines: {node: '>=14.21.3'}
cpu: [x64]
os: [win32]
'@esbuild/aix-ppc64@0.24.0':
resolution: {integrity: sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.24.0':
resolution: {integrity: sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.24.0':
resolution: {integrity: sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.24.0':
resolution: {integrity: sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.24.0':
resolution: {integrity: sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.24.0':
resolution: {integrity: sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.24.0':
resolution: {integrity: sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.24.0':
resolution: {integrity: sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.24.0':
resolution: {integrity: sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.24.0':
resolution: {integrity: sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.24.0':
resolution: {integrity: sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.24.0':
resolution: {integrity: sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.24.0':
resolution: {integrity: sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.24.0':
resolution: {integrity: sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.24.0':
resolution: {integrity: sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.24.0':
resolution: {integrity: sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.24.0':
resolution: {integrity: sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-x64@0.24.0':
resolution: {integrity: sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-arm64@0.24.0':
resolution: {integrity: sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.24.0':
resolution: {integrity: sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
'@esbuild/sunos-x64@0.24.0':
resolution: {integrity: sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.24.0':
resolution: {integrity: sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.24.0':
resolution: {integrity: sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.24.0':
resolution: {integrity: sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
'@protobufjs/aspromise@1.1.2':
resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==}
'@protobufjs/base64@1.1.2':
resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==}
'@protobufjs/codegen@2.0.4':
resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==}
'@protobufjs/eventemitter@1.1.0':
resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==}
'@protobufjs/fetch@1.1.0':
resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==}
'@protobufjs/float@1.0.2':
resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==}
'@protobufjs/inquire@1.1.0':
resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==}
'@protobufjs/path@1.1.2':
resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==}
'@protobufjs/pool@1.1.0':
resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==}
'@protobufjs/utf8@1.1.0':
resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
'@types/node@22.8.0':
resolution: {integrity: sha512-84rafSBHC/z1i1E3p0cJwKA+CfYDNSXX9WSZBRopjIzLET8oNt6ht2tei4C7izwDeEiLLfdeSVBv1egOH916hg==}
esbuild@0.24.0:
resolution: {integrity: sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==}
engines: {node: '>=18'}
hasBin: true
flatbuffers@1.12.0:
resolution: {integrity: sha512-c7CZADjRcl6j0PlvFy0ZqXQ67qSEZfrVPynmnL+2zPc+NtMvrF8Y0QceMo7QqnSPc7+uWjUIAbvCQ5WIKlMVdQ==}
guid-typescript@1.0.9:
resolution: {integrity: sha512-Y8T4vYhEfwJOTbouREvG+3XDsjr8E3kIr7uf+JZ0BYloFsttiHU0WfvANVsR7TxNUJa/WpCnw/Ino/p+DeBhBQ==}
long@5.2.3:
resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==}
onnxruntime-common@1.20.0:
resolution: {integrity: sha512-9ehS4ul5fBszIcHhfxuDgk45lO+Fqrxmrgwk1Pxb1JRvbQiCB/v9Royv95SRCWHktLMviqNjBsEd/biJhd39cg==}
onnxruntime-web@1.20.0:
resolution: {integrity: sha512-IoUf8dqHFJLV4DUSz+Ok+xxyN6cQk57gb20m6PZE5gag3QXuvegYMq9dG8t/QF4JjTKIwvfvnr16ouzCCB9IMA==}
platform@1.3.6:
resolution: {integrity: sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==}
protobufjs@7.4.0:
resolution: {integrity: sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==}
engines: {node: '>=12.0.0'}
typescript@5.6.3:
resolution: {integrity: sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.19.8:
resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==}
snapshots:
'@biomejs/biome@1.9.4':
optionalDependencies:
'@biomejs/cli-darwin-arm64': 1.9.4
'@biomejs/cli-darwin-x64': 1.9.4
'@biomejs/cli-linux-arm64': 1.9.4
'@biomejs/cli-linux-arm64-musl': 1.9.4
'@biomejs/cli-linux-x64': 1.9.4
'@biomejs/cli-linux-x64-musl': 1.9.4
'@biomejs/cli-win32-arm64': 1.9.4
'@biomejs/cli-win32-x64': 1.9.4
'@biomejs/cli-darwin-arm64@1.9.4':
optional: true
'@biomejs/cli-darwin-x64@1.9.4':
optional: true
'@biomejs/cli-linux-arm64-musl@1.9.4':
optional: true
'@biomejs/cli-linux-arm64@1.9.4':
optional: true
'@biomejs/cli-linux-x64-musl@1.9.4':
optional: true
'@biomejs/cli-linux-x64@1.9.4':
optional: true
'@biomejs/cli-win32-arm64@1.9.4':
optional: true
'@biomejs/cli-win32-x64@1.9.4':
optional: true
'@esbuild/aix-ppc64@0.24.0':
optional: true
'@esbuild/android-arm64@0.24.0':
optional: true
'@esbuild/android-arm@0.24.0':
optional: true
'@esbuild/android-x64@0.24.0':
optional: true
'@esbuild/darwin-arm64@0.24.0':
optional: true
'@esbuild/darwin-x64@0.24.0':
optional: true
'@esbuild/freebsd-arm64@0.24.0':
optional: true
'@esbuild/freebsd-x64@0.24.0':
optional: true
'@esbuild/linux-arm64@0.24.0':
optional: true
'@esbuild/linux-arm@0.24.0':
optional: true
'@esbuild/linux-ia32@0.24.0':
optional: true
'@esbuild/linux-loong64@0.24.0':
optional: true
'@esbuild/linux-mips64el@0.24.0':
optional: true
'@esbuild/linux-ppc64@0.24.0':
optional: true
'@esbuild/linux-riscv64@0.24.0':
optional: true
'@esbuild/linux-s390x@0.24.0':
optional: true
'@esbuild/linux-x64@0.24.0':
optional: true
'@esbuild/netbsd-x64@0.24.0':
optional: true
'@esbuild/openbsd-arm64@0.24.0':
optional: true
'@esbuild/openbsd-x64@0.24.0':
optional: true
'@esbuild/sunos-x64@0.24.0':
optional: true
'@esbuild/win32-arm64@0.24.0':
optional: true
'@esbuild/win32-ia32@0.24.0':
optional: true
'@esbuild/win32-x64@0.24.0':
optional: true
'@protobufjs/aspromise@1.1.2': {}
'@protobufjs/base64@1.1.2': {}
'@protobufjs/codegen@2.0.4': {}
'@protobufjs/eventemitter@1.1.0': {}
'@protobufjs/fetch@1.1.0':
dependencies:
'@protobufjs/aspromise': 1.1.2
'@protobufjs/inquire': 1.1.0
'@protobufjs/float@1.0.2': {}
'@protobufjs/inquire@1.1.0': {}
'@protobufjs/path@1.1.2': {}
'@protobufjs/pool@1.1.0': {}
'@protobufjs/utf8@1.1.0': {}
'@types/node@22.8.0':
dependencies:
undici-types: 6.19.8
esbuild@0.24.0:
optionalDependencies:
'@esbuild/aix-ppc64': 0.24.0
'@esbuild/android-arm': 0.24.0
'@esbuild/android-arm64': 0.24.0
'@esbuild/android-x64': 0.24.0
'@esbuild/darwin-arm64': 0.24.0
'@esbuild/darwin-x64': 0.24.0
'@esbuild/freebsd-arm64': 0.24.0
'@esbuild/freebsd-x64': 0.24.0
'@esbuild/linux-arm': 0.24.0
'@esbuild/linux-arm64': 0.24.0
'@esbuild/linux-ia32': 0.24.0
'@esbuild/linux-loong64': 0.24.0
'@esbuild/linux-mips64el': 0.24.0
'@esbuild/linux-ppc64': 0.24.0
'@esbuild/linux-riscv64': 0.24.0
'@esbuild/linux-s390x': 0.24.0
'@esbuild/linux-x64': 0.24.0
'@esbuild/netbsd-x64': 0.24.0
'@esbuild/openbsd-arm64': 0.24.0
'@esbuild/openbsd-x64': 0.24.0
'@esbuild/sunos-x64': 0.24.0
'@esbuild/win32-arm64': 0.24.0
'@esbuild/win32-ia32': 0.24.0
'@esbuild/win32-x64': 0.24.0
flatbuffers@1.12.0: {}
guid-typescript@1.0.9: {}
long@5.2.3: {}
onnxruntime-common@1.20.0: {}
onnxruntime-web@1.20.0:
dependencies:
flatbuffers: 1.12.0
guid-typescript: 1.0.9
long: 5.2.3
onnxruntime-common: 1.20.0
platform: 1.3.6
protobufjs: 7.4.0
platform@1.3.6: {}
protobufjs@7.4.0:
dependencies:
'@protobufjs/aspromise': 1.1.2
'@protobufjs/base64': 1.1.2
'@protobufjs/codegen': 2.0.4
'@protobufjs/eventemitter': 1.1.0
'@protobufjs/fetch': 1.1.0
'@protobufjs/float': 1.0.2
'@protobufjs/inquire': 1.1.0
'@protobufjs/path': 1.1.2
'@protobufjs/pool': 1.1.0
'@protobufjs/utf8': 1.1.0
'@types/node': 22.8.0
long: 5.2.3
typescript@5.6.3: {}
undici-types@6.19.8: {}

106
sbv2_wasm/src-js/index.ts Normal file
View File

@@ -0,0 +1,106 @@
import * as wasm from "../pkg/sbv2_wasm.js";
import { InferenceSession, Tensor } from "onnxruntime-web";
export class ModelHolder {
private models: Map<string, [InferenceSession, wasm.StyleVectorWrap]> =
new Map();
constructor(
private tok: wasm.TokenizerWrap,
private deberta: InferenceSession,
) {}
public static async globalInit(buf: ArrayBufferLike) {
await wasm.default(buf);
}
public static async create(tok: string, deberta: ArrayBufferLike) {
return new ModelHolder(
wasm.load_tokenizer(tok),
await InferenceSession.create(deberta, {
executionProviders: ["webnn", "webgpu", "wasm", "cpu"],
graphOptimizationLevel: "all",
}),
);
}
public async synthesize(
name: string,
text: string,
style_id: number = 0,
style_weight: number = 1.0,
sdp_ratio: number = 0.4,
speed: number = 1.0,
) {
const mod = this.models.get(name);
if (!mod) throw new Error(`No model named ${name}`);
const [vits2, style] = mod;
return wasm.synthesize(
text,
this.tok,
async (a: BigInt64Array, b: BigInt64Array) => {
try {
const res = (
await this.deberta.run({
input_ids: new Tensor("int64", a, [1, a.length]),
attention_mask: new Tensor("int64", b, [1, b.length]),
})
)["output"];
return [new Uint32Array(res.dims), await res.getData(true)];
} catch (e) {
console.warn(e);
throw e;
}
},
async (
[a_shape, a_array]: any,
b_d: any,
c_d: any,
d_d: any,
e_d: any,
f: number,
g: number,
) => {
try {
const a = new Tensor("float32", a_array, [1, ...a_shape]);
const b = new Tensor("int64", b_d, [1, b_d.length]);
const c = new Tensor("int64", c_d, [1, c_d.length]);
const d = new Tensor("int64", d_d, [1, d_d.length]);
const e = new Tensor("float32", e_d, [1, e_d.length]);
const res = (
await vits2.run({
x_tst: b,
x_tst_lengths: new Tensor("int64", [b_d.length]),
sid: new Tensor("int64", [0]),
tones: c,
language: d,
bert: a,
style_vec: e,
sdp_ratio: new Tensor("float32", [f]),
length_scale: new Tensor("float32", [g]),
})
).output;
return [new Uint32Array(res.dims), await res.getData(true)];
} catch (e) {
console.warn(e);
throw e;
}
},
sdp_ratio,
1.0 / speed,
style_id,
style_weight,
style,
);
}
public async load(name: string, b: Uint8Array) {
const [style, vits2_b] = wasm.load_sbv2file(b);
const vits2 = await InferenceSession.create(vits2_b as Uint8Array, {
executionProviders: ["webnn", "webgpu", "wasm", "cpu"],
graphOptimizationLevel: "all",
});
this.models.set(name, [vits2, style]);
}
public async unload(name: string) {
return this.models.delete(name);
}
public modelList() {
return this.models.keys();
}
}

View File

@@ -0,0 +1,102 @@
pub fn vec8_to_array8(v: Vec<u8>) -> js_sys::Uint8Array {
let arr = js_sys::Uint8Array::new_with_length(v.len() as u32);
arr.copy_from(&v);
arr
}
pub fn vec_f32_to_array_f32(v: Vec<f32>) -> js_sys::Float32Array {
let arr = js_sys::Float32Array::new_with_length(v.len() as u32);
arr.copy_from(&v);
arr
}
pub fn array8_to_vec8(buf: js_sys::Uint8Array) -> Vec<u8> {
let mut body = vec![0; buf.length() as usize];
buf.copy_to(&mut body[..]);
body
}
pub fn vec64_to_array64(v: Vec<i64>) -> js_sys::BigInt64Array {
let arr = js_sys::BigInt64Array::new_with_length(v.len() as u32);
arr.copy_from(&v);
arr
}
pub fn vec_to_array(v: Vec<wasm_bindgen::JsValue>) -> js_sys::Array {
let arr = js_sys::Array::new_with_length(v.len() as u32);
for (i, v) in v.into_iter().enumerate() {
arr.set(i as u32, v);
}
arr
}
struct A {
shape: Vec<u32>,
data: Vec<f32>,
}
impl TryFrom<wasm_bindgen::JsValue> for A {
type Error = sbv2_core::error::Error;
fn try_from(value: wasm_bindgen::JsValue) -> Result<Self, Self::Error> {
let value: js_sys::Array = value.into();
let mut shape = vec![];
let mut data = vec![];
for (i, v) in value.iter().enumerate() {
match i {
0 => {
let v: js_sys::Uint32Array = v.into();
shape = vec![0; v.length() as usize];
v.copy_to(&mut shape);
}
1 => {
let v: js_sys::Float32Array = v.into();
data = vec![0.0; v.length() as usize];
v.copy_to(&mut data);
}
_ => {}
};
}
Ok(A { shape, data })
}
}
pub fn array_to_array2_f32(
a: wasm_bindgen::JsValue,
) -> sbv2_core::error::Result<ndarray::Array2<f32>> {
let a = A::try_from(a)?;
if a.shape.len() != 2 {
return Err(sbv2_core::error::Error::OtherError(
"Length mismatch".to_string(),
));
}
let shape = [a.shape[0] as usize, a.shape[1] as usize];
let arr = ndarray::Array2::from_shape_vec(shape, a.data.to_vec())
.map_err(|e| sbv2_core::error::Error::OtherError(e.to_string()))?;
Ok(arr)
}
pub fn array_to_array3_f32(
a: wasm_bindgen::JsValue,
) -> sbv2_core::error::Result<ndarray::Array3<f32>> {
let a = A::try_from(a)?;
if a.shape.len() != 3 {
return Err(sbv2_core::error::Error::OtherError(
"Length mismatch".to_string(),
));
}
let shape = [
a.shape[0] as usize,
a.shape[1] as usize,
a.shape[2] as usize,
];
let arr = ndarray::Array3::from_shape_vec(shape, a.data.to_vec())
.map_err(|e| sbv2_core::error::Error::OtherError(e.to_string()))?;
Ok(arr)
}
pub fn array2_f32_to_array(a: ndarray::Array2<f32>) -> js_sys::Array {
let shape: Vec<wasm_bindgen::JsValue> = a.shape().iter().map(|f| (*f as u32).into()).collect();
let typed_array = js_sys::Float32Array::new_with_length(a.len() as u32);
typed_array.copy_from(&a.into_flat().to_vec());
vec_to_array(vec![vec_to_array(shape).into(), typed_array.into()])
}

123
sbv2_wasm/src/lib.rs Normal file
View File

@@ -0,0 +1,123 @@
use once_cell::sync::Lazy;
use sbv2_core::*;
use wasm_bindgen::prelude::*;
use wasm_bindgen_futures::JsFuture;
mod array_helper;
static JTALK: Lazy<jtalk::JTalk> = Lazy::new(|| jtalk::JTalk::new().unwrap());
#[wasm_bindgen]
pub struct TokenizerWrap {
tokenizer: tokenizer::Tokenizer,
}
#[wasm_bindgen]
pub fn load_tokenizer(s: js_sys::JsString) -> Result<TokenizerWrap, JsError> {
if let Some(s) = s.as_string() {
Ok(TokenizerWrap {
tokenizer: tokenizer::Tokenizer::from_bytes(s.as_bytes())
.map_err(|e| JsError::new(&e.to_string()))?,
})
} else {
Err(JsError::new("invalid utf8"))
}
}
#[wasm_bindgen]
pub struct StyleVectorWrap {
style_vector: ndarray::Array2<f32>,
}
#[wasm_bindgen]
pub fn load_sbv2file(buf: js_sys::Uint8Array) -> Result<js_sys::Array, JsError> {
let (style_vectors, vits2) = sbv2file::parse_sbv2file(array_helper::array8_to_vec8(buf))?;
let buf = array_helper::vec8_to_array8(vits2);
Ok(array_helper::vec_to_array(vec![
StyleVectorWrap {
style_vector: style::load_style(style_vectors)?,
}
.into(),
buf.into(),
]))
}
#[allow(clippy::too_many_arguments)]
#[wasm_bindgen]
pub async fn synthesize(
text: &str,
tokenizer: &TokenizerWrap,
bert_predict_fn: js_sys::Function,
synthesize_fn: js_sys::Function,
sdp_ratio: f32,
length_scale: f32,
style_id: i32,
style_weight: f32,
style_vectors: &StyleVectorWrap,
) -> Result<js_sys::Uint8Array, JsError> {
let synthesize_wrap = |bert_ori: ndarray::Array2<f32>,
x_tst: ndarray::Array1<i64>,
tones: ndarray::Array1<i64>,
lang_ids: ndarray::Array1<i64>,
style_vector: ndarray::Array1<f32>,
sdp_ratio: f32,
length_scale: f32| async move {
let arr = array_helper::vec_to_array(vec![
array_helper::array2_f32_to_array(bert_ori).into(),
array_helper::vec64_to_array64(x_tst.to_vec()).into(),
array_helper::vec64_to_array64(tones.to_vec()).into(),
array_helper::vec64_to_array64(lang_ids.to_vec()).into(),
array_helper::vec_f32_to_array_f32(style_vector.to_vec()).into(),
sdp_ratio.into(),
length_scale.into(),
]);
let res = synthesize_fn
.apply(&js_sys::Object::new().into(), &arr)
.map_err(|e| {
error::Error::OtherError(e.as_string().unwrap_or("unknown".to_string()))
})?;
let res = JsFuture::from(Into::<js_sys::Promise>::into(res))
.await
.map_err(|e| {
sbv2_core::error::Error::OtherError(e.as_string().unwrap_or("unknown".to_string()))
})?;
array_helper::array_to_array3_f32(res)
};
let (bert_ori, phones, tones, lang_ids) = tts_util::parse_text(
text,
&JTALK,
&tokenizer.tokenizer,
|token_ids: Vec<i64>, attention_masks: Vec<i64>| {
Box::pin(async move {
let arr = array_helper::vec_to_array(vec![
array_helper::vec64_to_array64(token_ids).into(),
array_helper::vec64_to_array64(attention_masks).into(),
]);
let res = bert_predict_fn
.apply(&js_sys::Object::new().into(), &arr)
.map_err(|e| {
error::Error::OtherError(e.as_string().unwrap_or("unknown".to_string()))
})?;
let res = JsFuture::from(Into::<js_sys::Promise>::into(res))
.await
.map_err(|e| {
sbv2_core::error::Error::OtherError(
e.as_string().unwrap_or("unknown".to_string()),
)
})?;
array_helper::array_to_array2_f32(res)
})
},
)
.await?;
let audio = synthesize_wrap(
bert_ori.to_owned(),
phones,
tones,
lang_ids,
style::get_style_vector(&style_vectors.style_vector, style_id, style_weight)?,
sdp_ratio,
length_scale,
)
.await?;
Ok(array_helper::vec8_to_array8(tts_util::array_to_vec(audio)?))
}

15
sbv2_wasm/tsconfig.json Normal file
View File

@@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"rootDir": "./src-js",
"outDir": "./dist",
"moduleResolution": "node",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"skipLibCheck": true,
"declaration": true,
"emitDeclarationOnly": true
}
}