mirror of
https://github.com/neodyland/sbv2-api.git
synced 2025-12-23 07:59:56 +00:00
Compare commits
296 Commits
py-v0.1.1
...
commit-99a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
99a4b130af | ||
|
|
d430a6cb51 | ||
|
|
61aae68d2d | ||
|
|
abb40d4d2d | ||
|
|
adb699efe7 | ||
|
|
00fa8025d7 | ||
|
|
38c5471dcc | ||
|
|
28e116e67d | ||
|
|
5127d48260 | ||
|
|
f6e9a52b13 | ||
|
|
9b7de85c46 | ||
|
|
45a221af23 | ||
|
|
97541d6a28 | ||
|
|
640ef16c4b | ||
|
|
2b5bc27db7 | ||
|
|
4d00fcd0bc | ||
|
|
6fc0a47a78 | ||
|
|
80e5ddee5b | ||
|
|
143d05c068 | ||
|
|
14d604091b | ||
|
|
6fc97b1f33 | ||
|
|
6c5ea9adce | ||
|
|
e262694702 | ||
|
|
554b82a504 | ||
|
|
0a911105a3 | ||
|
|
e5a4774e1a | ||
|
|
f036417046 | ||
|
|
3e0c24e0ec | ||
|
|
17c1a3467a | ||
|
|
db954ff710 | ||
|
|
3d2f36a0bf | ||
|
|
6a8b64208c | ||
|
|
f33791cf67 | ||
|
|
96eb51cf04 | ||
|
|
0f11b9a192 | ||
|
|
eefd5b723c | ||
|
|
c86a79cce5 | ||
|
|
fd7ba84eef | ||
|
|
6afa667f2e | ||
|
|
70b5852d1b | ||
|
|
208ac216b5 | ||
|
|
5f0c836a66 | ||
|
|
baebe4efd6 | ||
|
|
024751cb71 | ||
|
|
12269d9b86 | ||
|
|
d13b0f6952 | ||
|
|
ba40e89411 | ||
|
|
52f7a1779b | ||
|
|
e6932aeeae | ||
|
|
881b431e7b | ||
|
|
755876605e | ||
|
|
9a5da399be | ||
|
|
c1c1bbe69a | ||
|
|
9654ca4781 | ||
|
|
5fed7e8f41 | ||
|
|
8ae456b20b | ||
|
|
908e384cfa | ||
|
|
932af5cbfb | ||
|
|
635dacb653 | ||
|
|
cb884297c7 | ||
|
|
b9aa8d1b7c | ||
|
|
5b364a3c10 | ||
|
|
a11e57d175 | ||
|
|
2ddc6e57eb | ||
|
|
ddfaf7f28b | ||
|
|
506ee4d883 | ||
|
|
a1198d6380 | ||
|
|
c2430fc794 | ||
|
|
2bfca72f41 | ||
|
|
95b84ca55b | ||
|
|
033dd99fb6 | ||
|
|
15aef30867 | ||
|
|
adca252272 | ||
|
|
5c74773754 | ||
|
|
2e5edcfb32 | ||
|
|
184080bec8 | ||
|
|
d83fcb9f2c | ||
|
|
7095e0ea89 | ||
|
|
aa07496a08 | ||
|
|
bf276f51e7 | ||
|
|
cc664fae2d | ||
|
|
71ec658772 | ||
|
|
5ea2dcff0f | ||
|
|
a9ea47dc51 | ||
|
|
dff939091c | ||
|
|
8a28a4e7a5 | ||
|
|
21f845a799 | ||
|
|
69015bdf81 | ||
|
|
c6e5b73128 | ||
|
|
4ff9a38a80 | ||
|
|
20cc0573b5 | ||
|
|
4b932d568d | ||
|
|
6237cd0fec | ||
|
|
35fabdf681 | ||
|
|
f09343c97f | ||
|
|
f2570d89d0 | ||
|
|
ac2a09d6af | ||
|
|
c6eaf9cb9f | ||
|
|
f2395096ca | ||
|
|
3f6f4ccb6f | ||
|
|
67eba8ee6c | ||
|
|
0aa1bc8733 | ||
|
|
d1970d99be | ||
|
|
fddb35e592 | ||
|
|
e26715c809 | ||
|
|
26aa4b7df0 | ||
|
|
de18846280 | ||
|
|
38c2e69648 | ||
|
|
593dbaf19d | ||
|
|
bf44b07be1 | ||
|
|
102a8eb065 | ||
|
|
68edb3187f | ||
|
|
4a81a06faf | ||
|
|
caf541ef65 | ||
|
|
05c3846b7b | ||
|
|
1b2054c4b8 | ||
|
|
a7fbfa2017 | ||
|
|
db09b73b32 | ||
|
|
843ef36148 | ||
|
|
aa7fc2e3b0 | ||
|
|
fc4a79c111 | ||
|
|
4db7f49fa5 | ||
|
|
edee0710aa | ||
|
|
9bcbd496e5 | ||
|
|
90b3ba2e40 | ||
|
|
9ceec03bd0 | ||
|
|
5e9df65656 | ||
|
|
2eda2fe9ca | ||
|
|
9c9119a107 | ||
|
|
2c1a1dffc0 | ||
|
|
ed7bf53b89 | ||
|
|
4375df2689 | ||
|
|
789cef74ce | ||
|
|
5b403a2255 | ||
|
|
62653ec1c3 | ||
|
|
83076227e7 | ||
|
|
f90904a337 | ||
|
|
4e0c8591cd | ||
|
|
997b562682 | ||
|
|
fbd62315d0 | ||
|
|
060af0c187 | ||
|
|
b76738f467 | ||
|
|
8598167114 | ||
|
|
001f61bb6a | ||
|
|
9b9962ed29 | ||
|
|
b414d22a3b | ||
|
|
248363ae4a | ||
|
|
c4b61a36db | ||
|
|
35d16d88a8 | ||
|
|
fe48d6a034 | ||
|
|
bca4b2053f | ||
|
|
3330242cd8 | ||
|
|
f10f71f29b | ||
|
|
7bd39b7182 | ||
|
|
2d557fb0ee | ||
|
|
14d631eeaa | ||
|
|
380daf479c | ||
|
|
cb814a9952 | ||
|
|
795caf626c | ||
|
|
fb32357f31 | ||
|
|
e4010b3b83 | ||
|
|
17244a9ede | ||
|
|
61b04fd3d7 | ||
|
|
4e57a22a40 | ||
|
|
8e10057882 | ||
|
|
0222b9a189 | ||
|
|
5e96d5aef7 | ||
|
|
234120f510 | ||
|
|
08f7ab88ec | ||
|
|
005c67c9b6 | ||
|
|
cb08b5b582 | ||
|
|
105b3ce8de | ||
|
|
78a5016abc | ||
|
|
7e6bd4ad0a | ||
|
|
e1c6cd04b7 | ||
|
|
a15efdff09 | ||
|
|
21823721d0 | ||
|
|
aad978be4b | ||
|
|
6dd2cbd991 | ||
|
|
d7b76cc207 | ||
|
|
ae0ccb29d2 | ||
|
|
4bcde2e4b4 | ||
|
|
2356c896f6 | ||
|
|
d5445abeee | ||
|
|
673ec0067d | ||
|
|
74f657cb33 | ||
|
|
08be778cc5 | ||
|
|
6da2f5a0bb | ||
|
|
107190765f | ||
|
|
df726e6f7b | ||
|
|
e5b1ccc36b | ||
|
|
40cb604c57 | ||
|
|
9152c80c76 | ||
|
|
574092562e | ||
|
|
2e931adce7 | ||
|
|
e36c395db1 | ||
|
|
cfe88629ab | ||
|
|
30a98f0968 | ||
|
|
92ae4bc300 | ||
|
|
b6a9bea7ea | ||
|
|
8c88dd7c87 | ||
|
|
61760b8d7d | ||
|
|
5bbc247a89 | ||
|
|
b6f36def58 | ||
|
|
664176a11b | ||
|
|
432b68590c | ||
|
|
6283cfedfe | ||
|
|
df9c5d792d | ||
|
|
d1cc8de976 | ||
|
|
c7d911220b | ||
|
|
e73514e5d3 | ||
|
|
45a671cf52 | ||
|
|
c4005808bd | ||
|
|
c312fb0ce4 | ||
|
|
4b4ce82654 | ||
|
|
3ff226659b | ||
|
|
86d0e60eec | ||
|
|
d337d7caf8 | ||
|
|
cbd12a369b | ||
|
|
4a09b50a59 | ||
|
|
1c5863441c | ||
|
|
42c5e32a5a | ||
|
|
76bdd8f025 | ||
|
|
8e14e0b942 | ||
|
|
378f7d7095 | ||
|
|
b63a3ccf78 | ||
|
|
5238640144 | ||
|
|
da3a61a5e7 | ||
|
|
74043c636f | ||
|
|
7663a754a6 | ||
|
|
cb2e52fb18 | ||
|
|
ac3945748a | ||
|
|
1e2cde365f | ||
|
|
eecf6d90f7 | ||
|
|
e154fbf493 | ||
|
|
f5de643a21 | ||
|
|
4b661e3b5f | ||
|
|
055c08b5d0 | ||
|
|
cdbcbde04c | ||
|
|
cfd30764d0 | ||
|
|
3708d9fec3 | ||
|
|
065a7b9215 | ||
|
|
dc88251d41 | ||
|
|
1550ce6ee4 | ||
|
|
c1bebea69b | ||
|
|
af5a550b8f | ||
|
|
febfd0d84f | ||
|
|
55698f4a61 | ||
|
|
b0155f5ffa | ||
|
|
0e9c7b6522 | ||
|
|
b0d8be32b6 | ||
|
|
f76f5e6d1c | ||
|
|
e8cc450693 | ||
|
|
6f0fcd491c | ||
|
|
5cf4149024 | ||
|
|
65303173a8 | ||
|
|
30e4cde3ed | ||
|
|
596eec654d | ||
|
|
ee292315e1 | ||
|
|
731c751455 | ||
|
|
497bdd79ea | ||
|
|
b887fae47b | ||
|
|
ca0b8553e4 | ||
|
|
29b14895bb | ||
|
|
c2910ad9e8 | ||
|
|
5c092e8cbb | ||
|
|
d380e549c4 | ||
|
|
395f5b0004 | ||
|
|
f5609035b7 | ||
|
|
1e9f25dcb1 | ||
|
|
321ca4e749 | ||
|
|
bb23bd145b | ||
|
|
30e79d0df6 | ||
|
|
04c21aa97c | ||
|
|
6f388052ae | ||
|
|
04af3abad5 | ||
|
|
414e42db50 | ||
|
|
b8b0198ca8 | ||
|
|
a99fd39834 | ||
|
|
886ab78eeb | ||
|
|
c85f474dbf | ||
|
|
6d160d7ae8 | ||
|
|
ee927d65cb | ||
|
|
6e7d641ecb | ||
|
|
eb249aad81 | ||
|
|
f79a67138f | ||
|
|
09945e2c1c | ||
|
|
821b4c7fb3 | ||
|
|
ec06c35929 | ||
|
|
1373aef4b2 | ||
|
|
e2e49fd0e8 | ||
|
|
0cf9f87cc9 | ||
|
|
5e500b2c42 | ||
|
|
136375e5b6 | ||
|
|
aade119ddb | ||
|
|
0bb3c5b8ea |
@@ -3,4 +3,5 @@ MODEL_PATH=models/tsukuyomi.sbv2
|
||||
MODELS_PATH=models
|
||||
TOKENIZER_PATH=models/tokenizer.json
|
||||
ADDR=localhost:3000
|
||||
RUST_LOG=warn
|
||||
RUST_LOG=warn
|
||||
HOLDER_MAX_LOADED_MODElS=20
|
||||
|
||||
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [tuna2134]
|
||||
9
.github/pull_request_template.md
vendored
9
.github/pull_request_template.md
vendored
@@ -1,8 +1,13 @@
|
||||
## 概要
|
||||
(ここに本PRの説明をしてください。)
|
||||
<!--
|
||||
ここに本PRの説明をしてください。
|
||||
-->
|
||||
|
||||
## 関連issue
|
||||
(ここに該当するissueの番号を書いてください。)
|
||||
<!--
|
||||
ここに該当するissueの番号を書いてください。
|
||||
#nの前にfixesを置くとプルリクが閉じた時に自動的に該当issueもクローズします、
|
||||
-->
|
||||
|
||||
## 確認
|
||||
- [ ] 動作確認しましたか?
|
||||
|
||||
4
.github/workflows/CI.Dockerfile
vendored
Normal file
4
.github/workflows/CI.Dockerfile
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
FROM ubuntu:latest
|
||||
RUN apt update && apt install openssl libssl-dev curl pkg-config software-properties-common -y && add-apt-repository ppa:deadsnakes/ppa && apt update && apt install python3.7 python3.8 python3.9 python3.10 python3.11 python3.12 python3.13 python3-pip python3 -y
|
||||
ENV PIP_BREAK_SYSTEM_PACKAGES=1
|
||||
RUN mkdir -p /root/.cache/sbv2 && curl https://huggingface.co/neody/sbv2-api-assets/resolve/main/dic/all.bin -o /root/.cache/sbv2/all.bin -L
|
||||
138
.github/workflows/CI.yml
vendored
138
.github/workflows/CI.yml
vendored
@@ -1,39 +1,35 @@
|
||||
# This file is autogenerated by maturin v1.7.1
|
||||
# To update, run
|
||||
#
|
||||
# maturin generate-ci github
|
||||
#
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
tags:
|
||||
- '*'
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
contents: write
|
||||
id-token: write
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
python-linux:
|
||||
runs-on: ${{ matrix.platform.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- runner: ubuntu-latest
|
||||
target: x86_64
|
||||
- runner: ubuntu-latest
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
- run: docker build . -f .github/workflows/CI.Dockerfile --tag ci
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
@@ -41,14 +37,15 @@ jobs:
|
||||
args: --release --out dist --find-interpreter
|
||||
sccache: 'true'
|
||||
manylinux: auto
|
||||
working-directory: sbv2_bindings
|
||||
container: ci
|
||||
working-directory: ./crates/sbv2_bindings
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-linux-${{ matrix.platform.target }}
|
||||
path: sbv2_bindings/dist
|
||||
path: ./crates/sbv2_bindings/dist
|
||||
|
||||
windows:
|
||||
python-windows:
|
||||
runs-on: ${{ matrix.platform.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -67,20 +64,18 @@ jobs:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --out dist --find-interpreter
|
||||
sccache: 'true'
|
||||
working-directory: sbv2_bindings
|
||||
working-directory: ./crates/sbv2_bindings
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-windows-${{ matrix.platform.target }}
|
||||
path: sbv2_bindings/dist
|
||||
path: ./crates/sbv2_bindings/dist
|
||||
|
||||
macos:
|
||||
python-macos:
|
||||
runs-on: ${{ matrix.platform.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- runner: macos-12
|
||||
target: x86_64
|
||||
- runner: macos-14
|
||||
target: aarch64
|
||||
steps:
|
||||
@@ -94,14 +89,14 @@ jobs:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --out dist --find-interpreter
|
||||
sccache: 'true'
|
||||
working-directory: sbv2_bindings
|
||||
working-directory: ./crates/sbv2_bindings
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-macos-${{ matrix.platform.target }}
|
||||
path: sbv2_bindings/dist
|
||||
path: ./crates/sbv2_bindings/dist
|
||||
|
||||
sdist:
|
||||
python-sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -110,23 +105,116 @@ jobs:
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
working-directory: sbv2_bindings
|
||||
working-directory: ./crates/sbv2_bindings
|
||||
- name: Upload sdist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: sbv2_bindings/dist
|
||||
path: ./crates/sbv2_bindings/dist
|
||||
python-wheel:
|
||||
name: Wheel Upload
|
||||
runs-on: ubuntu-latest
|
||||
needs: [python-linux, python-windows, python-macos, python-sdist]
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: gh run download ${{ github.run_id }} -p wheels-*
|
||||
- name: release
|
||||
run: |
|
||||
gh release create commit-${GITHUB_SHA:0:8} --prerelease wheels-*/*
|
||||
|
||||
release:
|
||||
python-release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
if: "startsWith(github.ref, 'refs/tags/')"
|
||||
needs: [linux, windows, macos, sdist]
|
||||
needs: [python-linux, python-windows, python-macos, python-sdist]
|
||||
environment: release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/checkout@v4
|
||||
- run: gh run download ${{ github.run_id }} -p wheels-*
|
||||
- name: Publish to PyPI
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: upload
|
||||
args: --non-interactive --skip-existing wheels-*/*
|
||||
|
||||
docker:
|
||||
runs-on: ${{ matrix.machine.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
machine:
|
||||
- platform: amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
tag: [cpu, cuda]
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.machine.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ github.repository }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
file: ./scripts/docker/${{ matrix.tag }}.Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository }}:latest-${{ matrix.tag }}-${{ matrix.machine.platform }}
|
||||
|
||||
docker-merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Merge
|
||||
run: |
|
||||
docker buildx imagetools create -t ghcr.io/${{ github.repository }}:cuda \
|
||||
ghcr.io/${{ github.repository }}:latest-cuda-amd64 \
|
||||
ghcr.io/${{ github.repository }}:latest-cuda-arm64
|
||||
docker buildx imagetools create -t ghcr.io/${{ github.repository }}:cpu \
|
||||
ghcr.io/${{ github.repository }}:latest-cpu-amd64 \
|
||||
ghcr.io/${{ github.repository }}:latest-cpu-arm64
|
||||
|
||||
40
.github/workflows/build.yml
vendored
40
.github/workflows/build.yml
vendored
@@ -1,40 +0,0 @@
|
||||
name: Push to github container register
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
push-docker:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
tag: [cpu, cuda]
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository }}:${{ matrix.tag }}
|
||||
file: docker/${{ matrix.tag }}.Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -1,7 +1,10 @@
|
||||
target
|
||||
target/
|
||||
models/
|
||||
!models/.gitkeep
|
||||
venv/
|
||||
.env
|
||||
output.wav
|
||||
node_modules
|
||||
*.wav
|
||||
node_modules/
|
||||
dist/
|
||||
*.csv
|
||||
*.bin
|
||||
1903
Cargo.lock
generated
1903
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
21
Cargo.toml
21
Cargo.toml
@@ -1,14 +1,25 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = ["sbv2_api", "sbv2_core", "sbv2_bindings"]
|
||||
members = ["./crates/sbv2_api", "./crates/sbv2_core", "./crates/sbv2_bindings", "./crates/sbv2_wasm"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.2.0-alpha6"
|
||||
edition = "2021"
|
||||
description = "Style-Bert-VITSの推論ライブラリ"
|
||||
license = "MIT"
|
||||
readme = "./README.md"
|
||||
repository = "https://github.com/tuna2134/sbv2-api"
|
||||
documentation = "https://docs.rs/sbv2_core"
|
||||
|
||||
[workspace.dependencies]
|
||||
anyhow = "1.0.86"
|
||||
anyhow = "1.0.96"
|
||||
dotenvy = "0.15.7"
|
||||
env_logger = "0.11.5"
|
||||
env_logger = "0.11.6"
|
||||
ndarray = "0.16.1"
|
||||
once_cell = "1.20.3"
|
||||
|
||||
[profile.release]
|
||||
strip = true
|
||||
opt-level = "z"
|
||||
lto = true
|
||||
debug = false
|
||||
strip = true
|
||||
codegen-units = 1
|
||||
|
||||
33
README.md
33
README.md
@@ -1,5 +1,11 @@
|
||||
# SBV2-API
|
||||
|
||||
## 注意:本バージョンはアルファ版です。
|
||||
安定版を利用したい場合は[こちら](https://github.com/tuna2134/sbv2-api/tree/v0.1.x)をご覧ください。
|
||||
|
||||
## 注意: オプションの辞書はLGPLです。
|
||||
オプションの辞書を使用する場合、バイナリの内部の辞書部分について、LGPLが適用されます。
|
||||
|
||||
## プログラミングに詳しくない方向け
|
||||
|
||||
[こちら](https://github.com/tuna2134/sbv2-gui?tab=readme-ov-file)を参照してください。
|
||||
@@ -14,7 +20,7 @@ JP-Extra しか対応していません。(基本的に対応する予定もあ
|
||||
|
||||
## 変換方法
|
||||
|
||||
[こちら](https://github.com/tuna2134/sbv2-api/tree/main/convert)を参照してください。
|
||||
[こちら](https://github.com/tuna2134/sbv2-api/tree/main/scripts/convert)を参照してください。
|
||||
|
||||
## Todo
|
||||
|
||||
@@ -26,22 +32,27 @@ JP-Extra しか対応していません。(基本的に対応する予定もあ
|
||||
- [x] GPU 対応(CUDA)
|
||||
- [x] GPU 対応(DirectML)
|
||||
- [x] GPU 対応(CoreML)
|
||||
- [ ] WASM 変換(依存ライブラリの関係により現在は不可)
|
||||
- [ ] arm64のdockerサポート
|
||||
- [x] WASM 変換
|
||||
- [x] arm64のdockerサポート
|
||||
- [x] aivis形式のサポート
|
||||
- [ ] MeCabを利用する
|
||||
|
||||
## 構造説明
|
||||
|
||||
- `sbv2_api` - 推論用 REST API
|
||||
- `sbv2_core` - 推論コア部分
|
||||
- `docker` - docker ビルドスクリプト
|
||||
- `convert` - onnx, sbv2フォーマットへの変換スクリプト
|
||||
- `crates/sbv2_api` - 推論用 REST API
|
||||
- `crates/sbv2_core` - 推論コア部分
|
||||
- `scripts/docker` - docker ビルドスクリプト
|
||||
- `scripts/convert` - onnx, sbv2フォーマットへの変換スクリプト
|
||||
|
||||
## プログラミングある程度できる人向けREST API起動方法
|
||||
|
||||
### models をインストール
|
||||
|
||||
https://huggingface.co/googlefan/sbv2_onnx_models/tree/main
|
||||
の`tokenizer.json`,`debert.onnx`,`tsukuyomi.sbv2`を models フォルダに配置
|
||||
https://huggingface.co/neody/sbv2-api-assets/tree/main/deberta
|
||||
から`tokenizer.json`,`debert.onnx`
|
||||
https://huggingface.co/neody/sbv2-api-assets/tree/main/model
|
||||
から`tsukuyomi.sbv2`
|
||||
を models フォルダに配置
|
||||
|
||||
### .env ファイルの作成
|
||||
|
||||
@@ -110,8 +121,10 @@ curl http://localhost:3000/models
|
||||
- `ADDR` `localhost:3000`などのようにサーバー起動アドレスをコントロールできます。
|
||||
- `MODELS_PATH` sbv2モデルの存在するフォルダを指定できます。
|
||||
- `RUST_LOG` おなじみlog levelです。
|
||||
- `HOLDER_MAX_LOADED_MODElS` RAMにロードされるモデルの最大数を指定します。
|
||||
|
||||
## 謝辞
|
||||
|
||||
- [litagin02/Style-Bert-VITS2](https://github.com/litagin02/Style-Bert-VITS2) - このコードの書くにあたり、ベースとなる部分を参考にさせていただきました。
|
||||
- [litagin02/Style-Bert-VITS2](https://github.com/litagin02/Style-Bert-VITS2) - このコードを書くにあたり、ベースとなる部分を参考にさせていただきました。
|
||||
- [Googlefan](https://github.com/Googlefan256) - 彼にモデルを ONNX ヘ変換および効率化をする方法を教わりました。
|
||||
- [Aivis Project](https://github.com/Aivis-Project/AivisSpeech-Engine) - 辞書部分
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
10,000年前までコロナが流行っていました
|
||||
29
crates/sbv2_api/Cargo.toml
Normal file
29
crates/sbv2_api/Cargo.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[package]
|
||||
name = "sbv2_api"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
documentation.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
axum = "0.8.0"
|
||||
dotenvy.workspace = true
|
||||
env_logger.workspace = true
|
||||
log = "0.4.22"
|
||||
sbv2_core = { version = "0.2.0-alpha6", path = "../sbv2_core", features = ["aivmx"] }
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
tokio = { version = "1.40.0", features = ["full"] }
|
||||
utoipa = { version = "5.0.0", features = ["axum_extras"] }
|
||||
utoipa-scalar = { version = "0.3.0", features = ["axum"] }
|
||||
|
||||
[features]
|
||||
coreml = ["sbv2_core/coreml"]
|
||||
cuda = ["sbv2_core/cuda"]
|
||||
cuda_tf32 = ["sbv2_core/cuda_tf32"]
|
||||
dynamic = ["sbv2_core/dynamic"]
|
||||
directml = ["sbv2_core/directml"]
|
||||
tensorrt = ["sbv2_core/tensorrt"]
|
||||
5
crates/sbv2_api/build.rs
Normal file
5
crates/sbv2_api/build.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
fn main() {
|
||||
if cfg!(feature = "coreml") {
|
||||
println!("cargo:rustc-link-arg=-fapple-link-rtlib");
|
||||
}
|
||||
}
|
||||
@@ -11,10 +11,23 @@ use std::env;
|
||||
use std::sync::Arc;
|
||||
use tokio::fs;
|
||||
use tokio::sync::Mutex;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
use utoipa_scalar::{Scalar, Servable};
|
||||
|
||||
mod error;
|
||||
use crate::error::AppResult;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(paths(models, synthesize), components(schemas(SynthesizeRequest)))]
|
||||
struct ApiDoc;
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/models",
|
||||
responses(
|
||||
(status = 200, description = "Return model list", body = Vec<String>),
|
||||
)
|
||||
)]
|
||||
async fn models(State(state): State<AppState>) -> AppResult<impl IntoResponse> {
|
||||
Ok(Json(state.tts_model.lock().await.models()))
|
||||
}
|
||||
@@ -27,7 +40,15 @@ fn length_default() -> f32 {
|
||||
1.0
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
fn style_id_default() -> i32 {
|
||||
0
|
||||
}
|
||||
|
||||
fn speaker_id_default() -> i64 {
|
||||
0
|
||||
}
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
struct SynthesizeRequest {
|
||||
text: String,
|
||||
ident: String,
|
||||
@@ -35,8 +56,20 @@ struct SynthesizeRequest {
|
||||
sdp_ratio: f32,
|
||||
#[serde(default = "length_default")]
|
||||
length_scale: f32,
|
||||
#[serde(default = "style_id_default")]
|
||||
style_id: i32,
|
||||
#[serde(default = "speaker_id_default")]
|
||||
speaker_id: i64,
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/synthesize",
|
||||
request_body = SynthesizeRequest,
|
||||
responses(
|
||||
(status = 200, description = "Return audio/wav", body = Vec<u8>, content_type = "audio/wav")
|
||||
)
|
||||
)]
|
||||
async fn synthesize(
|
||||
State(state): State<AppState>,
|
||||
Json(SynthesizeRequest {
|
||||
@@ -44,15 +77,18 @@ async fn synthesize(
|
||||
ident,
|
||||
sdp_ratio,
|
||||
length_scale,
|
||||
style_id,
|
||||
speaker_id,
|
||||
}): Json<SynthesizeRequest>,
|
||||
) -> AppResult<impl IntoResponse> {
|
||||
log::debug!("processing request: text={text}, ident={ident}, sdp_ratio={sdp_ratio}, length_scale={length_scale}");
|
||||
let buffer = {
|
||||
let tts_model = state.tts_model.lock().await;
|
||||
let mut tts_model = state.tts_model.lock().await;
|
||||
tts_model.easy_synthesize(
|
||||
&ident,
|
||||
&text,
|
||||
0,
|
||||
style_id,
|
||||
speaker_id,
|
||||
SynthesizeOptions {
|
||||
sdp_ratio,
|
||||
length_scale,
|
||||
@@ -73,6 +109,9 @@ impl AppState {
|
||||
let mut tts_model = TTSModelHolder::new(
|
||||
&fs::read(env::var("BERT_MODEL_PATH")?).await?,
|
||||
&fs::read(env::var("TOKENIZER_PATH")?).await?,
|
||||
env::var("HOLDER_MAX_LOADED_MODElS")
|
||||
.ok()
|
||||
.and_then(|x| x.parse().ok()),
|
||||
)?;
|
||||
let models = env::var("MODELS_PATH").unwrap_or("models".to_string());
|
||||
let mut f = fs::read_dir(&models).await?;
|
||||
@@ -101,6 +140,20 @@ impl AppState {
|
||||
log::warn!("Error loading {entry}: {e}");
|
||||
};
|
||||
log::info!("Loaded: {entry}");
|
||||
} else if name.ends_with(".aivmx") {
|
||||
let entry = &name[..name.len() - 6];
|
||||
log::info!("Try loading: {entry}");
|
||||
let aivmx_bytes = match fs::read(format!("{models}/{entry}.aivmx")).await {
|
||||
Ok(b) => b,
|
||||
Err(e) => {
|
||||
log::warn!("Error loading aivmx bytes from file {entry}: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if let Err(e) = tts_model.load_aivmx(entry, aivmx_bytes) {
|
||||
log::error!("Error loading {entry}: {e}");
|
||||
}
|
||||
log::info!("Loaded: {entry}");
|
||||
}
|
||||
}
|
||||
for entry in entries {
|
||||
@@ -139,7 +192,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/", get(|| async { "Hello, World!" }))
|
||||
.route("/synthesize", post(synthesize))
|
||||
.route("/models", get(models))
|
||||
.with_state(AppState::new().await?);
|
||||
.with_state(AppState::new().await?)
|
||||
.merge(Scalar::with_url("/docs", ApiDoc::openapi()));
|
||||
let addr = env::var("ADDR").unwrap_or("0.0.0.0:3000".to_string());
|
||||
let listener = tokio::net::TcpListener::bind(&addr).await?;
|
||||
log::info!("Listening on {addr}");
|
||||
24
crates/sbv2_bindings/Cargo.toml
Normal file
24
crates/sbv2_bindings/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "sbv2_bindings"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
documentation.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
name = "sbv2_bindings"
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
ndarray.workspace = true
|
||||
pyo3 = { version = "0.23.0", features = ["anyhow"] }
|
||||
sbv2_core = { path = "../sbv2_core", features = ["std"], default-features = false }
|
||||
|
||||
[features]
|
||||
agpl_dict = ["sbv2_core/agpl_dict"]
|
||||
default = ["agpl_dict"]
|
||||
@@ -11,5 +11,7 @@ classifiers = [
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
|
||||
[tool.maturin]
|
||||
features = ["pyo3/extension-module"]
|
||||
strip = true
|
||||
@@ -1,6 +1,6 @@
|
||||
use pyo3::prelude::*;
|
||||
use pyo3::types::PyBytes;
|
||||
use sbv2_core::tts::{TTSModelHolder, SynthesizeOptions};
|
||||
use sbv2_core::tts::{SynthesizeOptions, TTSModelHolder};
|
||||
|
||||
use crate::style::StyleVector;
|
||||
|
||||
@@ -23,10 +23,15 @@ pub struct TTSModel {
|
||||
|
||||
#[pymethods]
|
||||
impl TTSModel {
|
||||
#[pyo3(signature = (bert_model_bytes, tokenizer_bytes, max_loaded_models=None))]
|
||||
#[new]
|
||||
fn new(bert_model_bytes: Vec<u8>, tokenizer_bytes: Vec<u8>) -> anyhow::Result<Self> {
|
||||
fn new(
|
||||
bert_model_bytes: Vec<u8>,
|
||||
tokenizer_bytes: Vec<u8>,
|
||||
max_loaded_models: Option<usize>,
|
||||
) -> anyhow::Result<Self> {
|
||||
Ok(Self {
|
||||
model: TTSModelHolder::new(bert_model_bytes, tokenizer_bytes)?,
|
||||
model: TTSModelHolder::new(bert_model_bytes, tokenizer_bytes, max_loaded_models)?,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -38,10 +43,21 @@ impl TTSModel {
|
||||
/// BERTモデルのパス
|
||||
/// tokenizer_path : str
|
||||
/// トークナイザーのパス
|
||||
/// max_loaded_models: int | None
|
||||
/// 同時にVRAMに存在するモデルの数
|
||||
#[pyo3(signature = (bert_model_path, tokenizer_path, max_loaded_models=None))]
|
||||
#[staticmethod]
|
||||
fn from_path(bert_model_path: String, tokenizer_path: String) -> anyhow::Result<Self> {
|
||||
fn from_path(
|
||||
bert_model_path: String,
|
||||
tokenizer_path: String,
|
||||
max_loaded_models: Option<usize>,
|
||||
) -> anyhow::Result<Self> {
|
||||
Ok(Self {
|
||||
model: TTSModelHolder::new(fs::read(bert_model_path)?, fs::read(tokenizer_path)?)?,
|
||||
model: TTSModelHolder::new(
|
||||
fs::read(bert_model_path)?,
|
||||
fs::read(tokenizer_path)?,
|
||||
max_loaded_models,
|
||||
)?,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -91,7 +107,7 @@ impl TTSModel {
|
||||
/// style_vector : StyleVector
|
||||
/// スタイルベクトル
|
||||
fn get_style_vector(
|
||||
&self,
|
||||
&mut self,
|
||||
ident: String,
|
||||
style_id: i32,
|
||||
weight: f32,
|
||||
@@ -121,25 +137,27 @@ impl TTSModel {
|
||||
/// voice_data : bytes
|
||||
/// 音声データ
|
||||
fn synthesize<'p>(
|
||||
&'p self,
|
||||
&'p mut self,
|
||||
py: Python<'p>,
|
||||
text: String,
|
||||
ident: String,
|
||||
style_id: i32,
|
||||
speaker_id: i64,
|
||||
sdp_ratio: f32,
|
||||
length_scale: f32,
|
||||
) -> anyhow::Result<Bound<PyBytes>> {
|
||||
) -> anyhow::Result<Bound<'p, PyBytes>> {
|
||||
let data = self.model.easy_synthesize(
|
||||
ident.as_str(),
|
||||
&text,
|
||||
style_id,
|
||||
speaker_id,
|
||||
SynthesizeOptions {
|
||||
sdp_ratio,
|
||||
length_scale,
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
Ok(PyBytes::new_bound(py, &data))
|
||||
Ok(PyBytes::new(py, &data))
|
||||
}
|
||||
|
||||
fn unload(&mut self, ident: String) -> bool {
|
||||
47
crates/sbv2_core/Cargo.toml
Normal file
47
crates/sbv2_core/Cargo.toml
Normal file
@@ -0,0 +1,47 @@
|
||||
[package]
|
||||
name = "sbv2_core"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
documentation.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
base64 = { version = "0.22.1", optional = true }
|
||||
dotenvy.workspace = true
|
||||
env_logger.workspace = true
|
||||
hound = "3.5.1"
|
||||
jpreprocess = { version = "0.12.0", features = ["naist-jdic"] }
|
||||
ndarray.workspace = true
|
||||
npyz = { version = "0.8.3", optional = true }
|
||||
num_cpus = "1.16.0"
|
||||
once_cell.workspace = true
|
||||
ort = { git = "https://github.com/pykeio/ort.git", version = "2.0.0-rc.9", optional = true }
|
||||
regex = "1.10.6"
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
serde_json = "1.0.128"
|
||||
tar = "0.4.41"
|
||||
thiserror = "2.0.11"
|
||||
tokenizers = { version = "0.21.0", default-features = false }
|
||||
zstd = "0.13.2"
|
||||
|
||||
[features]
|
||||
cuda = ["ort/cuda", "std"]
|
||||
cuda_tf32 = ["std", "cuda"]
|
||||
agpl_dict = []
|
||||
std = ["dep:ort", "tokenizers/progressbar", "tokenizers/onig", "tokenizers/esaxx_fast"]
|
||||
dynamic = ["ort/load-dynamic", "std"]
|
||||
directml = ["ort/directml", "std"]
|
||||
tensorrt = ["ort/tensorrt", "std"]
|
||||
coreml = ["ort/coreml", "std"]
|
||||
default = ["std", "agpl_dict"]
|
||||
no_std = ["tokenizers/unstable_wasm"]
|
||||
aivmx = ["npyz", "base64"]
|
||||
base64 = ["dep:base64"]
|
||||
|
||||
[build-dependencies]
|
||||
dirs = "6.0.0"
|
||||
ureq = "3.0.6"
|
||||
25
crates/sbv2_core/build.rs
Normal file
25
crates/sbv2_core/build.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use dirs::home_dir;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::io::copy;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let static_path = home_dir().unwrap().join(".cache/sbv2/all.bin");
|
||||
let out_path = PathBuf::from(&env::var("OUT_DIR").unwrap()).join("all.bin");
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
if static_path.exists() {
|
||||
if fs::hard_link(&static_path, &out_path).is_err() {
|
||||
fs::copy(static_path, out_path).unwrap();
|
||||
};
|
||||
} else {
|
||||
println!("cargo:warning=Downloading dictionary file...");
|
||||
let mut response =
|
||||
ureq::get("https://huggingface.co/neody/sbv2-api-assets/resolve/main/dic/all.bin")
|
||||
.call()?;
|
||||
let mut response = response.body_mut().as_reader();
|
||||
let mut file = fs::File::create(&out_path)?;
|
||||
copy(&mut response, &mut file)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
24
crates/sbv2_core/src/bert.rs
Normal file
24
crates/sbv2_core/src/bert.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use crate::error::Result;
|
||||
use ndarray::{Array2, Ix2};
|
||||
use ort::session::Session;
|
||||
use ort::value::TensorRef;
|
||||
|
||||
pub fn predict(
|
||||
session: &mut Session,
|
||||
token_ids: Vec<i64>,
|
||||
attention_masks: Vec<i64>,
|
||||
) -> Result<Array2<f32>> {
|
||||
let outputs = session.run(
|
||||
ort::inputs! {
|
||||
"input_ids" => TensorRef::from_array_view((vec![1, token_ids.len() as i64], token_ids.as_slice()))?,
|
||||
"attention_mask" => TensorRef::from_array_view((vec![1, attention_masks.len() as i64], attention_masks.as_slice()))?,
|
||||
}
|
||||
)?;
|
||||
|
||||
let output = outputs["output"]
|
||||
.try_extract_tensor::<f32>()?
|
||||
.into_dimensionality::<Ix2>()?
|
||||
.to_owned();
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
@@ -6,6 +6,9 @@ pub enum Error {
|
||||
TokenizerError(#[from] tokenizers::Error),
|
||||
#[error("JPreprocess error: {0}")]
|
||||
JPreprocessError(#[from] jpreprocess::error::JPreprocessError),
|
||||
#[error("Lindera error: {0}")]
|
||||
LinderaError(String),
|
||||
#[cfg(feature = "std")]
|
||||
#[error("ONNX error: {0}")]
|
||||
OrtError(#[from] ort::Error),
|
||||
#[error("NDArray error: {0}")]
|
||||
@@ -20,6 +23,11 @@ pub enum Error {
|
||||
HoundError(#[from] hound::Error),
|
||||
#[error("model not found error")]
|
||||
ModelNotFoundError(String),
|
||||
#[cfg(feature = "base64")]
|
||||
#[error("base64 error")]
|
||||
Base64Error(#[from] base64::DecodeError),
|
||||
#[error("other")]
|
||||
OtherError(String),
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
@@ -1,37 +1,33 @@
|
||||
use crate::error::{Error, Result};
|
||||
use crate::mora::{MORA_KATA_TO_MORA_PHONEMES, VOWELS};
|
||||
use crate::norm::{replace_punctuation, PUNCTUATIONS};
|
||||
use jpreprocess::*;
|
||||
use jpreprocess::{kind, DefaultTokenizer, JPreprocess, SystemDictionaryConfig, UserDictionary};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
|
||||
type JPreprocessType = JPreprocess<DefaultFetcher>;
|
||||
type JPreprocessType = JPreprocess<DefaultTokenizer>;
|
||||
|
||||
fn initialize_jtalk() -> Result<JPreprocessType> {
|
||||
let config = JPreprocessConfig {
|
||||
dictionary: SystemDictionaryConfig::Bundled(kind::JPreprocessDictionaryKind::NaistJdic),
|
||||
user_dictionary: None,
|
||||
};
|
||||
let jpreprocess = JPreprocess::from_config(config)?;
|
||||
Ok(jpreprocess)
|
||||
#[cfg(feature = "agpl_dict")]
|
||||
fn agpl_dict() -> Result<Option<UserDictionary>> {
|
||||
Ok(Some(
|
||||
UserDictionary::load(include_bytes!(concat!(env!("OUT_DIR"), "/all.bin")))
|
||||
.map_err(|e| Error::LinderaError(e.to_string()))?,
|
||||
))
|
||||
}
|
||||
|
||||
static JTALK_G2P_G_A1_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"/A:([0-9\-]+)\+").unwrap());
|
||||
static JTALK_G2P_G_A2_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"\+(\d+)\+").unwrap());
|
||||
static JTALK_G2P_G_A3_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"\+(\d+)/").unwrap());
|
||||
static JTALK_G2P_G_E3_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"!(\d+)_").unwrap());
|
||||
static JTALK_G2P_G_F1_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"/F:(\d+)_").unwrap());
|
||||
static JTALK_G2P_G_P3_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"\-(.*?)\+").unwrap());
|
||||
#[cfg(not(feature = "agpl_dict"))]
|
||||
fn agpl_dict() -> Result<Option<UserDictionary>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn numeric_feature_by_regex(regex: &Regex, text: &str) -> i32 {
|
||||
if let Some(mat) = regex.captures(text) {
|
||||
mat[1].parse::<i32>().unwrap()
|
||||
} else {
|
||||
-50
|
||||
}
|
||||
fn initialize_jtalk() -> Result<JPreprocessType> {
|
||||
let sdic =
|
||||
SystemDictionaryConfig::Bundled(kind::JPreprocessDictionaryKind::NaistJdic).load()?;
|
||||
let jpreprocess = JPreprocess::with_dictionaries(sdic, agpl_dict()?);
|
||||
Ok(jpreprocess)
|
||||
}
|
||||
|
||||
macro_rules! hash_set {
|
||||
@@ -239,7 +235,10 @@ impl JTalkProcess {
|
||||
}
|
||||
|
||||
fn kata_to_phoneme_list(mut text: String) -> Result<Vec<String>> {
|
||||
if PUNCTUATIONS.contains(&text.as_str()) {
|
||||
let chars: HashSet<String> = text.chars().map(|x| x.to_string()).collect();
|
||||
if chars.is_subset(&HashSet::from_iter(
|
||||
PUNCTUATIONS.iter().map(|x| x.to_string()),
|
||||
)) {
|
||||
return Ok(text.chars().map(|x| x.to_string()).collect());
|
||||
}
|
||||
if !KATAKANA_PATTERN.is_match(&text) {
|
||||
@@ -351,11 +350,7 @@ impl JTalkProcess {
|
||||
|
||||
let mut phones: Vec<String> = Vec::new();
|
||||
for (i, label) in labels.iter().enumerate() {
|
||||
let mut p3 = {
|
||||
let label_text = label.to_string();
|
||||
let mattched = JTALK_G2P_G_P3_PATTERN.captures(&label_text).unwrap();
|
||||
mattched[1].to_string()
|
||||
};
|
||||
let mut p3 = label.phoneme.c.clone().unwrap();
|
||||
if "AIUEO".contains(&p3) {
|
||||
// 文字をlowerする
|
||||
p3 = p3.to_lowercase();
|
||||
@@ -365,10 +360,10 @@ impl JTalkProcess {
|
||||
if i == 0 {
|
||||
phones.push("^".to_string());
|
||||
} else if i == labels.len() - 1 {
|
||||
let e3 = numeric_feature_by_regex(&JTALK_G2P_G_E3_PATTERN, &label.to_string());
|
||||
if e3 == 0 {
|
||||
let e3 = label.accent_phrase_prev.clone().unwrap().is_interrogative;
|
||||
if e3 {
|
||||
phones.push("$".to_string());
|
||||
} else if e3 == 1 {
|
||||
} else {
|
||||
phones.push("?".to_string());
|
||||
}
|
||||
}
|
||||
@@ -380,14 +375,33 @@ impl JTalkProcess {
|
||||
phones.push(p3.clone());
|
||||
}
|
||||
|
||||
let a1 = numeric_feature_by_regex(&JTALK_G2P_G_A1_PATTERN, &label.to_string());
|
||||
let a2 = numeric_feature_by_regex(&JTALK_G2P_G_A2_PATTERN, &label.to_string());
|
||||
let a3 = numeric_feature_by_regex(&JTALK_G2P_G_A3_PATTERN, &label.to_string());
|
||||
let a1 = if let Some(mora) = &label.mora {
|
||||
mora.relative_accent_position as i32
|
||||
} else {
|
||||
-50
|
||||
};
|
||||
let a2 = if let Some(mora) = &label.mora {
|
||||
mora.position_forward as i32
|
||||
} else {
|
||||
-50
|
||||
};
|
||||
let a3 = if let Some(mora) = &label.mora {
|
||||
mora.position_backward as i32
|
||||
} else {
|
||||
-50
|
||||
};
|
||||
|
||||
let f1 = numeric_feature_by_regex(&JTALK_G2P_G_F1_PATTERN, &label.to_string());
|
||||
let f1 = if let Some(accent_phrase) = &label.accent_phrase_curr {
|
||||
accent_phrase.mora_count as i32
|
||||
} else {
|
||||
-50
|
||||
};
|
||||
|
||||
let a2_next =
|
||||
numeric_feature_by_regex(&JTALK_G2P_G_A2_PATTERN, &labels[i + 1].to_string());
|
||||
let a2_next = if let Some(mora) = &labels[i + 1].mora {
|
||||
mora.position_forward as i32
|
||||
} else {
|
||||
-50
|
||||
};
|
||||
|
||||
if a3 == 1 && a2_next == 1 && "aeiouAEIOUNcl".contains(&p3) {
|
||||
phones.push("#".to_string());
|
||||
@@ -1,11 +1,16 @@
|
||||
#[cfg(feature = "std")]
|
||||
pub mod bert;
|
||||
pub mod error;
|
||||
pub mod jtalk;
|
||||
#[cfg(feature = "std")]
|
||||
pub mod model;
|
||||
pub mod mora;
|
||||
pub mod nlp;
|
||||
pub mod norm;
|
||||
pub mod sbv2file;
|
||||
pub mod style;
|
||||
pub mod tokenizer;
|
||||
#[cfg(feature = "std")]
|
||||
pub mod tts;
|
||||
pub mod tts_util;
|
||||
pub mod utils;
|
||||
49
crates/sbv2_core/src/main.rs
Normal file
49
crates/sbv2_core/src/main.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use std::env;
|
||||
use std::fs;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
fn main_inner() -> anyhow::Result<()> {
|
||||
use sbv2_core::tts;
|
||||
dotenvy::dotenv_override().ok();
|
||||
env_logger::init();
|
||||
let text = "今日の天気は快晴です。";
|
||||
let ident = "aaa";
|
||||
let mut tts_holder = tts::TTSModelHolder::new(
|
||||
&fs::read(env::var("BERT_MODEL_PATH")?)?,
|
||||
&fs::read(env::var("TOKENIZER_PATH")?)?,
|
||||
env::var("HOLDER_MAX_LOADED_MODElS")
|
||||
.ok()
|
||||
.and_then(|x| x.parse().ok()),
|
||||
)?;
|
||||
let mp = env::var("MODEL_PATH")?;
|
||||
let b = fs::read(&mp)?;
|
||||
#[cfg(not(feature = "aivmx"))]
|
||||
{
|
||||
tts_holder.load_sbv2file(ident, b)?;
|
||||
}
|
||||
#[cfg(feature = "aivmx")]
|
||||
{
|
||||
if mp.ends_with(".sbv2") {
|
||||
tts_holder.load_sbv2file(ident, b)?;
|
||||
} else {
|
||||
tts_holder.load_aivmx(ident, b)?;
|
||||
}
|
||||
}
|
||||
|
||||
let audio =
|
||||
tts_holder.easy_synthesize(ident, &text, 0, 0, tts::SynthesizeOptions::default())?;
|
||||
fs::write("output.wav", audio)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
fn main_inner() -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
if let Err(e) = main_inner() {
|
||||
println!("Error: {e}");
|
||||
}
|
||||
}
|
||||
111
crates/sbv2_core/src/model.rs
Normal file
111
crates/sbv2_core/src/model.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use crate::error::Result;
|
||||
use ndarray::{array, Array1, Array2, Array3, Axis, Ix3};
|
||||
use ort::session::{builder::GraphOptimizationLevel, Session};
|
||||
|
||||
#[allow(clippy::vec_init_then_push, unused_variables)]
|
||||
pub fn load_model<P: AsRef<[u8]>>(model_file: P, bert: bool) -> Result<Session> {
|
||||
let mut exp = Vec::new();
|
||||
#[cfg(feature = "tensorrt")]
|
||||
{
|
||||
if bert {
|
||||
exp.push(
|
||||
ort::execution_providers::TensorRTExecutionProvider::default()
|
||||
.with_fp16(true)
|
||||
.with_profile_min_shapes("input_ids:1x1,attention_mask:1x1")
|
||||
.with_profile_max_shapes("input_ids:1x100,attention_mask:1x100")
|
||||
.with_profile_opt_shapes("input_ids:1x25,attention_mask:1x25")
|
||||
.build(),
|
||||
);
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "cuda")]
|
||||
{
|
||||
#[allow(unused_mut)]
|
||||
let mut cuda = ort::execution_providers::CUDAExecutionProvider::default()
|
||||
.with_conv_algorithm_search(
|
||||
ort::execution_providers::cuda::CUDAExecutionProviderCuDNNConvAlgoSearch::Default,
|
||||
);
|
||||
#[cfg(feature = "cuda_tf32")]
|
||||
{
|
||||
cuda = cuda.with_tf32(true);
|
||||
}
|
||||
exp.push(cuda.build());
|
||||
}
|
||||
#[cfg(feature = "directml")]
|
||||
{
|
||||
exp.push(ort::execution_providers::DirectMLExecutionProvider::default().build());
|
||||
}
|
||||
#[cfg(feature = "coreml")]
|
||||
{
|
||||
exp.push(ort::execution_providers::CoreMLExecutionProvider::default().build());
|
||||
}
|
||||
exp.push(ort::execution_providers::CPUExecutionProvider::default().build());
|
||||
Ok(Session::builder()?
|
||||
.with_execution_providers(exp)?
|
||||
.with_optimization_level(GraphOptimizationLevel::Level3)?
|
||||
.with_intra_threads(num_cpus::get_physical())?
|
||||
.with_parallel_execution(true)?
|
||||
.with_inter_threads(num_cpus::get_physical())?
|
||||
.commit_from_memory(model_file.as_ref())?)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn synthesize(
|
||||
session: &mut Session,
|
||||
bert_ori: Array2<f32>,
|
||||
x_tst: Array1<i64>,
|
||||
mut spk_ids: Array1<i64>,
|
||||
tones: Array1<i64>,
|
||||
lang_ids: Array1<i64>,
|
||||
style_vector: Array1<f32>,
|
||||
sdp_ratio: f32,
|
||||
length_scale: f32,
|
||||
noise_scale: f32,
|
||||
noise_scale_w: f32,
|
||||
) -> Result<Array3<f32>> {
|
||||
let bert_ori = bert_ori.insert_axis(Axis(0));
|
||||
let bert_ori = bert_ori.as_standard_layout();
|
||||
let bert = ort::value::TensorRef::from_array_view(&bert_ori)?;
|
||||
let mut x_tst_lengths = array![x_tst.shape()[0] as i64];
|
||||
let x_tst_lengths = ort::value::TensorRef::from_array_view(&mut x_tst_lengths)?;
|
||||
let mut x_tst = x_tst.insert_axis(Axis(0));
|
||||
let x_tst = ort::value::TensorRef::from_array_view(&mut x_tst)?;
|
||||
let mut lang_ids = lang_ids.insert_axis(Axis(0));
|
||||
let lang_ids = ort::value::TensorRef::from_array_view(&mut lang_ids)?;
|
||||
let mut tones = tones.insert_axis(Axis(0));
|
||||
let tones = ort::value::TensorRef::from_array_view(&mut tones)?;
|
||||
let mut style_vector = style_vector.insert_axis(Axis(0));
|
||||
let style_vector = ort::value::TensorRef::from_array_view(&mut style_vector)?;
|
||||
let sid = ort::value::TensorRef::from_array_view(&mut spk_ids)?;
|
||||
let sdp_ratio = vec![sdp_ratio];
|
||||
let sdp_ratio = ort::value::TensorRef::from_array_view((vec![1_i64], sdp_ratio.as_slice()))?;
|
||||
let length_scale = vec![length_scale];
|
||||
let length_scale =
|
||||
ort::value::TensorRef::from_array_view((vec![1_i64], length_scale.as_slice()))?;
|
||||
let noise_scale = vec![noise_scale];
|
||||
let noise_scale =
|
||||
ort::value::TensorRef::from_array_view((vec![1_i64], noise_scale.as_slice()))?;
|
||||
let noise_scale_w = vec![noise_scale_w];
|
||||
let noise_scale_w =
|
||||
ort::value::TensorRef::from_array_view((vec![1_i64], noise_scale_w.as_slice()))?;
|
||||
let outputs = session.run(ort::inputs! {
|
||||
"x_tst" => x_tst,
|
||||
"x_tst_lengths" => x_tst_lengths,
|
||||
"sid" => sid,
|
||||
"tones" => tones,
|
||||
"language" => lang_ids,
|
||||
"bert" => bert,
|
||||
"style_vec" => style_vector,
|
||||
"sdp_ratio" => sdp_ratio,
|
||||
"length_scale" => length_scale,
|
||||
"noise_scale" => noise_scale,
|
||||
"noise_scale_w" => noise_scale_w,
|
||||
})?;
|
||||
|
||||
let audio_array = outputs["output"]
|
||||
.try_extract_tensor::<f32>()?
|
||||
.into_dimensionality::<Ix3>()?
|
||||
.to_owned();
|
||||
|
||||
Ok(audio_array)
|
||||
}
|
||||
37
crates/sbv2_core/src/sbv2file.rs
Normal file
37
crates/sbv2_core/src/sbv2file.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use std::io::{Cursor, Read};
|
||||
|
||||
use tar::Archive;
|
||||
use zstd::decode_all;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
|
||||
/// Parse a .sbv2 file binary
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rs
|
||||
/// parse_sbv2file("tsukuyomi", std::fs::read("tsukuyomi.sbv2")?)?;
|
||||
/// ```
|
||||
pub fn parse_sbv2file<P: AsRef<[u8]>>(sbv2_bytes: P) -> Result<(Vec<u8>, Vec<u8>)> {
|
||||
let mut arc = Archive::new(Cursor::new(decode_all(Cursor::new(sbv2_bytes.as_ref()))?));
|
||||
let mut vits2 = None;
|
||||
let mut style_vectors = None;
|
||||
let mut et = arc.entries()?;
|
||||
while let Some(Ok(mut e)) = et.next() {
|
||||
let pth = String::from_utf8_lossy(&e.path_bytes()).to_string();
|
||||
let mut b = Vec::with_capacity(e.size() as usize);
|
||||
e.read_to_end(&mut b)?;
|
||||
match pth.as_str() {
|
||||
"model.onnx" => vits2 = Some(b),
|
||||
"style_vectors.json" => style_vectors = Some(b),
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
if style_vectors.is_none() {
|
||||
return Err(Error::ModelNotFoundError("style_vectors".to_string()));
|
||||
}
|
||||
if vits2.is_none() {
|
||||
return Err(Error::ModelNotFoundError("vits2".to_string()));
|
||||
}
|
||||
Ok((style_vectors.unwrap(), vits2.unwrap()))
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::error::Result;
|
||||
use tokenizers::Tokenizer;
|
||||
pub use tokenizers::Tokenizer;
|
||||
|
||||
pub fn get_tokenizer<P: AsRef<[u8]>>(p: P) -> Result<Tokenizer> {
|
||||
let tokenizer = Tokenizer::from_bytes(p)?;
|
||||
@@ -1,12 +1,14 @@
|
||||
use crate::error::{Error, Result};
|
||||
use crate::{bert, jtalk, model, nlp, norm, style, tokenizer, utils};
|
||||
use hound::{SampleFormat, WavSpec, WavWriter};
|
||||
use ndarray::{concatenate, s, Array, Array1, Array2, Array3, Axis};
|
||||
use ort::Session;
|
||||
use std::io::{Cursor, Read};
|
||||
use tar::Archive;
|
||||
use crate::{jtalk, model, style, tokenizer, tts_util};
|
||||
#[cfg(feature = "aivmx")]
|
||||
use base64::prelude::{Engine as _, BASE64_STANDARD};
|
||||
#[cfg(feature = "aivmx")]
|
||||
use ndarray::ShapeBuilder;
|
||||
use ndarray::{concatenate, Array1, Array2, Array3, Axis};
|
||||
use ort::session::Session;
|
||||
#[cfg(feature = "aivmx")]
|
||||
use std::io::Cursor;
|
||||
use tokenizers::Tokenizer;
|
||||
use zstd::decode_all;
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub struct TTSIdent(String);
|
||||
@@ -28,9 +30,10 @@ where
|
||||
}
|
||||
|
||||
pub struct TTSModel {
|
||||
vits2: Session,
|
||||
vits2: Option<Session>,
|
||||
style_vectors: Array2<f32>,
|
||||
ident: TTSIdent,
|
||||
bytes: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
/// High-level Style-Bert-VITS2's API
|
||||
@@ -39,6 +42,7 @@ pub struct TTSModelHolder {
|
||||
bert: Session,
|
||||
models: Vec<TTSModel>,
|
||||
jtalk: jtalk::JTalk,
|
||||
max_loaded_models: Option<usize>,
|
||||
}
|
||||
|
||||
impl TTSModelHolder {
|
||||
@@ -47,9 +51,13 @@ impl TTSModelHolder {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rs
|
||||
/// let mut tts_holder = TTSModelHolder::new(std::fs::read("deberta.onnx")?, std::fs::read("tokenizer.json")?)?;
|
||||
/// let mut tts_holder = TTSModelHolder::new(std::fs::read("deberta.onnx")?, std::fs::read("tokenizer.json")?, None)?;
|
||||
/// ```
|
||||
pub fn new<P: AsRef<[u8]>>(bert_model_bytes: P, tokenizer_bytes: P) -> Result<Self> {
|
||||
pub fn new<P: AsRef<[u8]>>(
|
||||
bert_model_bytes: P,
|
||||
tokenizer_bytes: P,
|
||||
max_loaded_models: Option<usize>,
|
||||
) -> Result<Self> {
|
||||
let bert = model::load_model(bert_model_bytes, true)?;
|
||||
let jtalk = jtalk::JTalk::new()?;
|
||||
let tokenizer = tokenizer::get_tokenizer(tokenizer_bytes)?;
|
||||
@@ -58,6 +66,7 @@ impl TTSModelHolder {
|
||||
models: vec![],
|
||||
jtalk,
|
||||
tokenizer,
|
||||
max_loaded_models,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -66,6 +75,53 @@ impl TTSModelHolder {
|
||||
self.models.iter().map(|m| m.ident.to_string()).collect()
|
||||
}
|
||||
|
||||
#[cfg(feature = "aivmx")]
|
||||
pub fn load_aivmx<I: Into<TTSIdent>, P: AsRef<[u8]>>(
|
||||
&mut self,
|
||||
ident: I,
|
||||
aivmx_bytes: P,
|
||||
) -> Result<()> {
|
||||
let ident = ident.into();
|
||||
if self.find_model(ident.clone()).is_err() {
|
||||
let mut load = true;
|
||||
if let Some(max) = self.max_loaded_models {
|
||||
if self.models.iter().filter(|x| x.vits2.is_some()).count() >= max {
|
||||
load = false;
|
||||
}
|
||||
}
|
||||
let model = model::load_model(&aivmx_bytes, false)?;
|
||||
let metadata = model.metadata()?;
|
||||
if let Some(aivm_style_vectors) = metadata.custom("aivm_style_vectors")? {
|
||||
let aivm_style_vectors = BASE64_STANDARD.decode(aivm_style_vectors)?;
|
||||
let style_vectors = Cursor::new(&aivm_style_vectors);
|
||||
let reader = npyz::NpyFile::new(style_vectors)?;
|
||||
let style_vectors = {
|
||||
let shape = reader.shape().to_vec();
|
||||
let order = reader.order();
|
||||
let data = reader.into_vec::<f32>()?;
|
||||
let shape = match shape[..] {
|
||||
[i1, i2] => [i1 as usize, i2 as usize],
|
||||
_ => panic!("expected 2D array"),
|
||||
};
|
||||
let true_shape = shape.set_f(order == npyz::Order::Fortran);
|
||||
ndarray::Array2::from_shape_vec(true_shape, data)?
|
||||
};
|
||||
drop(metadata);
|
||||
self.models.push(TTSModel {
|
||||
vits2: if load { Some(model) } else { None },
|
||||
bytes: if self.max_loaded_models.is_some() {
|
||||
Some(aivmx_bytes.as_ref().to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
ident,
|
||||
style_vectors,
|
||||
})
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load a .sbv2 file binary
|
||||
///
|
||||
/// # Examples
|
||||
@@ -78,27 +134,8 @@ impl TTSModelHolder {
|
||||
ident: I,
|
||||
sbv2_bytes: P,
|
||||
) -> Result<()> {
|
||||
let mut arc = Archive::new(Cursor::new(decode_all(Cursor::new(sbv2_bytes.as_ref()))?));
|
||||
let mut vits2 = None;
|
||||
let mut style_vectors = None;
|
||||
let mut et = arc.entries()?;
|
||||
while let Some(Ok(mut e)) = et.next() {
|
||||
let pth = String::from_utf8_lossy(&e.path_bytes()).to_string();
|
||||
let mut b = Vec::with_capacity(e.size() as usize);
|
||||
e.read_to_end(&mut b)?;
|
||||
match pth.as_str() {
|
||||
"model.onnx" => vits2 = Some(b),
|
||||
"style_vectors.json" => style_vectors = Some(b),
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
if style_vectors.is_none() {
|
||||
return Err(Error::ModelNotFoundError("style_vectors".to_string()));
|
||||
}
|
||||
if vits2.is_none() {
|
||||
return Err(Error::ModelNotFoundError("vits2".to_string()));
|
||||
}
|
||||
self.load(ident, style_vectors.unwrap(), vits2.unwrap())?;
|
||||
let (style_vectors, vits2) = crate::sbv2file::parse_sbv2file(sbv2_bytes)?;
|
||||
self.load(ident, style_vectors, vits2)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -117,10 +154,25 @@ impl TTSModelHolder {
|
||||
) -> Result<()> {
|
||||
let ident = ident.into();
|
||||
if self.find_model(ident.clone()).is_err() {
|
||||
let mut load = true;
|
||||
if let Some(max) = self.max_loaded_models {
|
||||
if self.models.iter().filter(|x| x.vits2.is_some()).count() >= max {
|
||||
load = false;
|
||||
}
|
||||
}
|
||||
self.models.push(TTSModel {
|
||||
vits2: model::load_model(vits2_bytes, false)?,
|
||||
vits2: if load {
|
||||
Some(model::load_model(&vits2_bytes, false)?)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
style_vectors: style::load_style(style_vectors_bytes)?,
|
||||
ident,
|
||||
bytes: if self.max_loaded_models.is_some() {
|
||||
Some(vits2_bytes.as_ref().to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
})
|
||||
}
|
||||
Ok(())
|
||||
@@ -148,88 +200,69 @@ impl TTSModelHolder {
|
||||
/// This function is for low-level usage, use `easy_synthesize` for high-level usage.
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn parse_text(
|
||||
&self,
|
||||
&mut self,
|
||||
text: &str,
|
||||
) -> Result<(Array2<f32>, Array1<i64>, Array1<i64>, Array1<i64>)> {
|
||||
let text = self.jtalk.num2word(text)?;
|
||||
let normalized_text = norm::normalize_text(&text);
|
||||
|
||||
let process = self.jtalk.process_text(&normalized_text)?;
|
||||
let (phones, tones, mut word2ph) = process.g2p()?;
|
||||
let (phones, tones, lang_ids) = nlp::cleaned_text_to_sequence(phones, tones);
|
||||
|
||||
let phones = utils::intersperse(&phones, 0);
|
||||
let tones = utils::intersperse(&tones, 0);
|
||||
let lang_ids = utils::intersperse(&lang_ids, 0);
|
||||
for item in &mut word2ph {
|
||||
*item *= 2;
|
||||
}
|
||||
word2ph[0] += 1;
|
||||
|
||||
let text = {
|
||||
let (seq_text, _) = process.text_to_seq_kata()?;
|
||||
seq_text.join("")
|
||||
};
|
||||
let (token_ids, attention_masks) = tokenizer::tokenize(&text, &self.tokenizer)?;
|
||||
|
||||
let bert_content = bert::predict(&self.bert, token_ids, attention_masks)?;
|
||||
|
||||
assert!(
|
||||
word2ph.len() == text.chars().count() + 2,
|
||||
"{} {}",
|
||||
word2ph.len(),
|
||||
normalized_text.chars().count()
|
||||
);
|
||||
|
||||
let mut phone_level_feature = vec![];
|
||||
for (i, reps) in word2ph.iter().enumerate() {
|
||||
let repeat_feature = {
|
||||
let (reps_rows, reps_cols) = (*reps, 1);
|
||||
let arr_len = bert_content.slice(s![i, ..]).len();
|
||||
|
||||
let mut results: Array2<f32> =
|
||||
Array::zeros((reps_rows as usize, arr_len * reps_cols));
|
||||
|
||||
for j in 0..reps_rows {
|
||||
for k in 0..reps_cols {
|
||||
let mut view = results.slice_mut(s![j, k * arr_len..(k + 1) * arr_len]);
|
||||
view.assign(&bert_content.slice(s![i, ..]));
|
||||
}
|
||||
}
|
||||
results
|
||||
};
|
||||
phone_level_feature.push(repeat_feature);
|
||||
}
|
||||
let phone_level_feature = concatenate(
|
||||
Axis(0),
|
||||
&phone_level_feature
|
||||
.iter()
|
||||
.map(|x| x.view())
|
||||
.collect::<Vec<_>>(),
|
||||
)?;
|
||||
let bert_ori = phone_level_feature.t();
|
||||
Ok((
|
||||
bert_ori.to_owned(),
|
||||
phones.into(),
|
||||
tones.into(),
|
||||
lang_ids.into(),
|
||||
))
|
||||
crate::tts_util::parse_text_blocking(
|
||||
text,
|
||||
&self.jtalk,
|
||||
&self.tokenizer,
|
||||
|token_ids, attention_masks| {
|
||||
crate::bert::predict(&mut self.bert, token_ids, attention_masks)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn find_model<I: Into<TTSIdent>>(&self, ident: I) -> Result<&TTSModel> {
|
||||
fn find_model<I: Into<TTSIdent>>(&mut self, ident: I) -> Result<&mut TTSModel> {
|
||||
let ident = ident.into();
|
||||
self.models
|
||||
.iter()
|
||||
.iter_mut()
|
||||
.find(|m| m.ident == ident)
|
||||
.ok_or(Error::ModelNotFoundError(ident.to_string()))
|
||||
}
|
||||
fn find_and_load_model<I: Into<TTSIdent>>(&mut self, ident: I) -> Result<bool> {
|
||||
let ident = ident.into();
|
||||
let (bytes, style_vectors) = {
|
||||
let model = self
|
||||
.models
|
||||
.iter()
|
||||
.find(|m| m.ident == ident)
|
||||
.ok_or(Error::ModelNotFoundError(ident.to_string()))?;
|
||||
if model.vits2.is_some() {
|
||||
return Ok(true);
|
||||
}
|
||||
(model.bytes.clone().unwrap(), model.style_vectors.clone())
|
||||
};
|
||||
self.unload(ident.clone());
|
||||
let s = model::load_model(&bytes, false)?;
|
||||
if let Some(max) = self.max_loaded_models {
|
||||
if self.models.iter().filter(|x| x.vits2.is_some()).count() >= max {
|
||||
self.unload(self.models.first().unwrap().ident.clone());
|
||||
}
|
||||
}
|
||||
self.models.push(TTSModel {
|
||||
bytes: Some(bytes.to_vec()),
|
||||
vits2: Some(s),
|
||||
style_vectors,
|
||||
ident: ident.clone(),
|
||||
});
|
||||
let model = self
|
||||
.models
|
||||
.iter()
|
||||
.find(|m| m.ident == ident)
|
||||
.ok_or(Error::ModelNotFoundError(ident.to_string()))?;
|
||||
if model.vits2.is_some() {
|
||||
return Ok(true);
|
||||
}
|
||||
Err(Error::ModelNotFoundError(ident.to_string()))
|
||||
}
|
||||
|
||||
/// Get style vector by style id and weight
|
||||
///
|
||||
/// # Note
|
||||
/// This function is for low-level usage, use `easy_synthesize` for high-level usage.
|
||||
pub fn get_style_vector<I: Into<TTSIdent>>(
|
||||
&self,
|
||||
&mut self,
|
||||
ident: I,
|
||||
style_id: i32,
|
||||
weight: f32,
|
||||
@@ -245,12 +278,14 @@ impl TTSModelHolder {
|
||||
/// let audio = tts_holder.easy_synthesize("tsukuyomi", "こんにちは", 0, SynthesizeOptions::default())?;
|
||||
/// ```
|
||||
pub fn easy_synthesize<I: Into<TTSIdent> + Copy>(
|
||||
&self,
|
||||
&mut self,
|
||||
ident: I,
|
||||
text: &str,
|
||||
style_id: i32,
|
||||
speaker_id: i64,
|
||||
options: SynthesizeOptions,
|
||||
) -> Result<Vec<u8>> {
|
||||
self.find_and_load_model(ident)?;
|
||||
let style_vector = self.get_style_vector(ident, style_id, options.style_weight)?;
|
||||
let audio_array = if options.split_sentences {
|
||||
let texts: Vec<&str> = text.split('\n').collect();
|
||||
@@ -260,15 +295,24 @@ impl TTSModelHolder {
|
||||
continue;
|
||||
}
|
||||
let (bert_ori, phones, tones, lang_ids) = self.parse_text(t)?;
|
||||
|
||||
let vits2 = self
|
||||
.find_model(ident)?
|
||||
.vits2
|
||||
.as_mut()
|
||||
.ok_or(Error::ModelNotFoundError(ident.into().to_string()))?;
|
||||
let audio = model::synthesize(
|
||||
&self.find_model(ident)?.vits2,
|
||||
vits2,
|
||||
bert_ori.to_owned(),
|
||||
phones,
|
||||
Array1::from_vec(vec![speaker_id]),
|
||||
tones,
|
||||
lang_ids,
|
||||
style_vector.clone(),
|
||||
options.sdp_ratio,
|
||||
options.length_scale,
|
||||
0.677,
|
||||
0.8,
|
||||
)?;
|
||||
audios.push(audio.clone());
|
||||
if i != texts.len() - 1 {
|
||||
@@ -281,66 +325,27 @@ impl TTSModelHolder {
|
||||
)?
|
||||
} else {
|
||||
let (bert_ori, phones, tones, lang_ids) = self.parse_text(text)?;
|
||||
|
||||
let vits2 = self
|
||||
.find_model(ident)?
|
||||
.vits2
|
||||
.as_mut()
|
||||
.ok_or(Error::ModelNotFoundError(ident.into().to_string()))?;
|
||||
model::synthesize(
|
||||
&self.find_model(ident)?.vits2,
|
||||
vits2,
|
||||
bert_ori.to_owned(),
|
||||
phones,
|
||||
Array1::from_vec(vec![speaker_id]),
|
||||
tones,
|
||||
lang_ids,
|
||||
style_vector,
|
||||
options.sdp_ratio,
|
||||
options.length_scale,
|
||||
0.677,
|
||||
0.8,
|
||||
)?
|
||||
};
|
||||
Self::array_to_vec(audio_array)
|
||||
}
|
||||
|
||||
fn array_to_vec(audio_array: Array3<f32>) -> Result<Vec<u8>> {
|
||||
let spec = WavSpec {
|
||||
channels: 1,
|
||||
sample_rate: 44100,
|
||||
bits_per_sample: 32,
|
||||
sample_format: SampleFormat::Float,
|
||||
};
|
||||
let mut cursor = Cursor::new(Vec::new());
|
||||
let mut writer = WavWriter::new(&mut cursor, spec)?;
|
||||
for i in 0..audio_array.shape()[0] {
|
||||
let output = audio_array.slice(s![i, 0, ..]).to_vec();
|
||||
for sample in output {
|
||||
writer.write_sample(sample)?;
|
||||
}
|
||||
}
|
||||
writer.finalize()?;
|
||||
Ok(cursor.into_inner())
|
||||
}
|
||||
|
||||
/// Synthesize text to audio
|
||||
///
|
||||
/// # Note
|
||||
/// This function is for low-level usage, use `easy_synthesize` for high-level usage.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn synthesize<I: Into<TTSIdent>>(
|
||||
&self,
|
||||
ident: I,
|
||||
bert_ori: Array2<f32>,
|
||||
phones: Array1<i64>,
|
||||
tones: Array1<i64>,
|
||||
lang_ids: Array1<i64>,
|
||||
style_vector: Array1<f32>,
|
||||
sdp_ratio: f32,
|
||||
length_scale: f32,
|
||||
) -> Result<Vec<u8>> {
|
||||
let audio_array = model::synthesize(
|
||||
&self.find_model(ident)?.vits2,
|
||||
bert_ori.to_owned(),
|
||||
phones,
|
||||
tones,
|
||||
lang_ids,
|
||||
style_vector,
|
||||
sdp_ratio,
|
||||
length_scale,
|
||||
)?;
|
||||
Self::array_to_vec(audio_array)
|
||||
tts_util::array_to_vec(audio_array)
|
||||
}
|
||||
}
|
||||
|
||||
180
crates/sbv2_core/src/tts_util.rs
Normal file
180
crates/sbv2_core/src/tts_util.rs
Normal file
@@ -0,0 +1,180 @@
|
||||
use std::io::Cursor;
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::{jtalk, nlp, norm, tokenizer, utils};
|
||||
use hound::{SampleFormat, WavSpec, WavWriter};
|
||||
use ndarray::{concatenate, s, Array, Array1, Array2, Array3, Axis};
|
||||
use tokenizers::Tokenizer;
|
||||
/// Parse text and return the input for synthesize
|
||||
///
|
||||
/// # Note
|
||||
/// This function is for low-level usage, use `easy_synthesize` for high-level usage.
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub async fn parse_text(
|
||||
text: &str,
|
||||
jtalk: &jtalk::JTalk,
|
||||
tokenizer: &Tokenizer,
|
||||
bert_predict: impl FnOnce(
|
||||
Vec<i64>,
|
||||
Vec<i64>,
|
||||
) -> std::pin::Pin<
|
||||
Box<dyn std::future::Future<Output = Result<ndarray::Array2<f32>>>>,
|
||||
>,
|
||||
) -> Result<(Array2<f32>, Array1<i64>, Array1<i64>, Array1<i64>)> {
|
||||
let text = jtalk.num2word(text)?;
|
||||
let normalized_text = norm::normalize_text(&text);
|
||||
|
||||
let process = jtalk.process_text(&normalized_text)?;
|
||||
let (phones, tones, mut word2ph) = process.g2p()?;
|
||||
let (phones, tones, lang_ids) = nlp::cleaned_text_to_sequence(phones, tones);
|
||||
|
||||
let phones = utils::intersperse(&phones, 0);
|
||||
let tones = utils::intersperse(&tones, 0);
|
||||
let lang_ids = utils::intersperse(&lang_ids, 0);
|
||||
for item in &mut word2ph {
|
||||
*item *= 2;
|
||||
}
|
||||
word2ph[0] += 1;
|
||||
|
||||
let text = {
|
||||
let (seq_text, _) = process.text_to_seq_kata()?;
|
||||
seq_text.join("")
|
||||
};
|
||||
let (token_ids, attention_masks) = tokenizer::tokenize(&text, tokenizer)?;
|
||||
|
||||
let bert_content = bert_predict(token_ids, attention_masks).await?;
|
||||
|
||||
assert!(
|
||||
word2ph.len() == text.chars().count() + 2,
|
||||
"{} {}",
|
||||
word2ph.len(),
|
||||
normalized_text.chars().count()
|
||||
);
|
||||
|
||||
let mut phone_level_feature = vec![];
|
||||
for (i, reps) in word2ph.iter().enumerate() {
|
||||
let repeat_feature = {
|
||||
let (reps_rows, reps_cols) = (*reps, 1);
|
||||
let arr_len = bert_content.slice(s![i, ..]).len();
|
||||
|
||||
let mut results: Array2<f32> = Array::zeros((reps_rows as usize, arr_len * reps_cols));
|
||||
|
||||
for j in 0..reps_rows {
|
||||
for k in 0..reps_cols {
|
||||
let mut view = results.slice_mut(s![j, k * arr_len..(k + 1) * arr_len]);
|
||||
view.assign(&bert_content.slice(s![i, ..]));
|
||||
}
|
||||
}
|
||||
results
|
||||
};
|
||||
phone_level_feature.push(repeat_feature);
|
||||
}
|
||||
let phone_level_feature = concatenate(
|
||||
Axis(0),
|
||||
&phone_level_feature
|
||||
.iter()
|
||||
.map(|x| x.view())
|
||||
.collect::<Vec<_>>(),
|
||||
)?;
|
||||
let bert_ori = phone_level_feature.t();
|
||||
Ok((
|
||||
bert_ori.to_owned(),
|
||||
phones.into(),
|
||||
tones.into(),
|
||||
lang_ids.into(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Parse text and return the input for synthesize
|
||||
///
|
||||
/// # Note
|
||||
/// This function is for low-level usage, use `easy_synthesize` for high-level usage.
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn parse_text_blocking(
|
||||
text: &str,
|
||||
jtalk: &jtalk::JTalk,
|
||||
tokenizer: &Tokenizer,
|
||||
bert_predict: impl FnOnce(Vec<i64>, Vec<i64>) -> Result<ndarray::Array2<f32>>,
|
||||
) -> Result<(Array2<f32>, Array1<i64>, Array1<i64>, Array1<i64>)> {
|
||||
let text = jtalk.num2word(text)?;
|
||||
let normalized_text = norm::normalize_text(&text);
|
||||
|
||||
let process = jtalk.process_text(&normalized_text)?;
|
||||
let (phones, tones, mut word2ph) = process.g2p()?;
|
||||
let (phones, tones, lang_ids) = nlp::cleaned_text_to_sequence(phones, tones);
|
||||
|
||||
let phones = utils::intersperse(&phones, 0);
|
||||
let tones = utils::intersperse(&tones, 0);
|
||||
let lang_ids = utils::intersperse(&lang_ids, 0);
|
||||
for item in &mut word2ph {
|
||||
*item *= 2;
|
||||
}
|
||||
word2ph[0] += 1;
|
||||
|
||||
let text = {
|
||||
let (seq_text, _) = process.text_to_seq_kata()?;
|
||||
seq_text.join("")
|
||||
};
|
||||
let (token_ids, attention_masks) = tokenizer::tokenize(&text, tokenizer)?;
|
||||
|
||||
let bert_content = bert_predict(token_ids, attention_masks)?;
|
||||
|
||||
assert!(
|
||||
word2ph.len() == text.chars().count() + 2,
|
||||
"{} {}",
|
||||
word2ph.len(),
|
||||
normalized_text.chars().count()
|
||||
);
|
||||
|
||||
let mut phone_level_feature = vec![];
|
||||
for (i, reps) in word2ph.iter().enumerate() {
|
||||
let repeat_feature = {
|
||||
let (reps_rows, reps_cols) = (*reps, 1);
|
||||
let arr_len = bert_content.slice(s![i, ..]).len();
|
||||
|
||||
let mut results: Array2<f32> = Array::zeros((reps_rows as usize, arr_len * reps_cols));
|
||||
|
||||
for j in 0..reps_rows {
|
||||
for k in 0..reps_cols {
|
||||
let mut view = results.slice_mut(s![j, k * arr_len..(k + 1) * arr_len]);
|
||||
view.assign(&bert_content.slice(s![i, ..]));
|
||||
}
|
||||
}
|
||||
results
|
||||
};
|
||||
phone_level_feature.push(repeat_feature);
|
||||
}
|
||||
let phone_level_feature = concatenate(
|
||||
Axis(0),
|
||||
&phone_level_feature
|
||||
.iter()
|
||||
.map(|x| x.view())
|
||||
.collect::<Vec<_>>(),
|
||||
)?;
|
||||
let bert_ori = phone_level_feature.t();
|
||||
Ok((
|
||||
bert_ori.to_owned(),
|
||||
phones.into(),
|
||||
tones.into(),
|
||||
lang_ids.into(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn array_to_vec(audio_array: Array3<f32>) -> Result<Vec<u8>> {
|
||||
let spec = WavSpec {
|
||||
channels: 1,
|
||||
sample_rate: 44100,
|
||||
bits_per_sample: 32,
|
||||
sample_format: SampleFormat::Float,
|
||||
};
|
||||
let mut cursor = Cursor::new(Vec::new());
|
||||
let mut writer = WavWriter::new(&mut cursor, spec)?;
|
||||
for i in 0..audio_array.shape()[0] {
|
||||
let output = audio_array.slice(s![i, 0, ..]).to_vec();
|
||||
for sample in output {
|
||||
writer.write_sample(sample)?;
|
||||
}
|
||||
}
|
||||
writer.finalize()?;
|
||||
Ok(cursor.into_inner())
|
||||
}
|
||||
20
crates/sbv2_wasm/Cargo.toml
Normal file
20
crates/sbv2_wasm/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "sbv2_wasm"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
documentation.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
wasm-bindgen = "0.2.93"
|
||||
sbv2_core = { path = "../sbv2_core", default-features = false, features = ["no_std"] }
|
||||
once_cell.workspace = true
|
||||
js-sys = "0.3.70"
|
||||
ndarray.workspace = true
|
||||
wasm-bindgen-futures = "0.4.43"
|
||||
2
crates/sbv2_wasm/README.md
Normal file
2
crates/sbv2_wasm/README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
# StyleBertVITS2 wasm
|
||||
refer to https://github.com/tuna2134/sbv2-api
|
||||
31
crates/sbv2_wasm/biome.json
Normal file
31
crates/sbv2_wasm/biome.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/1.9.2/schema.json",
|
||||
"vcs": {
|
||||
"enabled": false,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": false
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"ignore": []
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "tab",
|
||||
"ignore": ["dist/", "pkg/"]
|
||||
},
|
||||
"organizeImports": {
|
||||
"enabled": true
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": true
|
||||
}
|
||||
},
|
||||
"javascript": {
|
||||
"formatter": {
|
||||
"quoteStyle": "double"
|
||||
}
|
||||
}
|
||||
}
|
||||
8
crates/sbv2_wasm/build.sh
Executable file
8
crates/sbv2_wasm/build.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
wasm-pack build --target web ./crates/sbv2_wasm --release
|
||||
wasm-opt -O3 -o ./crates/sbv2_wasm/pkg/sbv2_wasm_bg.wasm ./crates/sbv2_wasm/pkg/sbv2_wasm_bg.wasm
|
||||
wasm-strip ./crates/sbv2_wasm/pkg/sbv2_wasm_bg.wasm
|
||||
mkdir -p ./crates/sbv2_wasm/dist
|
||||
cp ./crates/sbv2_wasm/pkg/sbv2_wasm_bg.wasm ./crates/sbv2_wasm/dist/sbv2_wasm_bg.wasm
|
||||
cd ./crates/sbv2_wasm
|
||||
pnpm build
|
||||
14
crates/sbv2_wasm/example.js
Normal file
14
crates/sbv2_wasm/example.js
Normal file
@@ -0,0 +1,14 @@
|
||||
import { ModelHolder } from "./dist/index.js";
|
||||
import fs from "node:fs/promises";
|
||||
|
||||
ModelHolder.globalInit(await fs.readFile("./dist/sbv2_wasm_bg.wasm"));
|
||||
const holder = await ModelHolder.create(
|
||||
(await fs.readFile("../../models/tokenizer.json")).toString("utf-8"),
|
||||
await fs.readFile("../../models/deberta.onnx"),
|
||||
);
|
||||
await holder.load(
|
||||
"tsukuyomi",
|
||||
await fs.readFile("../../models/tsukuyomi.sbv2"),
|
||||
);
|
||||
await fs.writeFile("out.wav", await holder.synthesize("tsukuyomi", "おはよう"));
|
||||
holder.unload("tsukuyomi");
|
||||
25
crates/sbv2_wasm/package.json
Normal file
25
crates/sbv2_wasm/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "sbv2",
|
||||
"version": "0.2.0-alpha6",
|
||||
"description": "Style Bert VITS2 wasm",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc && esbuild src-js/index.ts --outfile=dist/index.js --minify --format=esm --bundle --external:onnxruntime-web",
|
||||
"format": "biome format --write ."
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "tuna2134",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@types/node": "^22.13.5",
|
||||
"esbuild": "^0.25.0",
|
||||
"typescript": "^5.7.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"onnxruntime-web": "^1.20.1"
|
||||
},
|
||||
"files": ["dist/*", "package.json", "README.md", "pkg/*.ts", "pkg/*.js"]
|
||||
}
|
||||
504
crates/sbv2_wasm/pnpm-lock.yaml
generated
Normal file
504
crates/sbv2_wasm/pnpm-lock.yaml
generated
Normal file
@@ -0,0 +1,504 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
onnxruntime-web:
|
||||
specifier: ^1.20.1
|
||||
version: 1.20.1
|
||||
devDependencies:
|
||||
'@biomejs/biome':
|
||||
specifier: ^1.9.4
|
||||
version: 1.9.4
|
||||
'@types/node':
|
||||
specifier: ^22.13.5
|
||||
version: 22.13.5
|
||||
esbuild:
|
||||
specifier: ^0.25.0
|
||||
version: 0.25.0
|
||||
typescript:
|
||||
specifier: ^5.7.3
|
||||
version: 5.7.3
|
||||
|
||||
packages:
|
||||
|
||||
'@biomejs/biome@1.9.4':
|
||||
resolution: {integrity: sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
hasBin: true
|
||||
|
||||
'@biomejs/cli-darwin-arm64@1.9.4':
|
||||
resolution: {integrity: sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@biomejs/cli-darwin-x64@1.9.4':
|
||||
resolution: {integrity: sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@biomejs/cli-linux-arm64-musl@1.9.4':
|
||||
resolution: {integrity: sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@biomejs/cli-linux-arm64@1.9.4':
|
||||
resolution: {integrity: sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@biomejs/cli-linux-x64-musl@1.9.4':
|
||||
resolution: {integrity: sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@biomejs/cli-linux-x64@1.9.4':
|
||||
resolution: {integrity: sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@biomejs/cli-win32-arm64@1.9.4':
|
||||
resolution: {integrity: sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@biomejs/cli-win32-x64@1.9.4':
|
||||
resolution: {integrity: sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA==}
|
||||
engines: {node: '>=14.21.3'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/aix-ppc64@0.25.0':
|
||||
resolution: {integrity: sha512-O7vun9Sf8DFjH2UtqK8Ku3LkquL9SZL8OLY1T5NZkA34+wG3OQF7cl4Ql8vdNzM6fzBbYfLaiRLIOZ+2FOCgBQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [aix]
|
||||
|
||||
'@esbuild/android-arm64@0.25.0':
|
||||
resolution: {integrity: sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-arm@0.25.0':
|
||||
resolution: {integrity: sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-x64@0.25.0':
|
||||
resolution: {integrity: sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/darwin-arm64@0.25.0':
|
||||
resolution: {integrity: sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/darwin-x64@0.25.0':
|
||||
resolution: {integrity: sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/freebsd-arm64@0.25.0':
|
||||
resolution: {integrity: sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/freebsd-x64@0.25.0':
|
||||
resolution: {integrity: sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/linux-arm64@0.25.0':
|
||||
resolution: {integrity: sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-arm@0.25.0':
|
||||
resolution: {integrity: sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ia32@0.25.0':
|
||||
resolution: {integrity: sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-loong64@0.25.0':
|
||||
resolution: {integrity: sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [loong64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-mips64el@0.25.0':
|
||||
resolution: {integrity: sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [mips64el]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ppc64@0.25.0':
|
||||
resolution: {integrity: sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-riscv64@0.25.0':
|
||||
resolution: {integrity: sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-s390x@0.25.0':
|
||||
resolution: {integrity: sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-x64@0.25.0':
|
||||
resolution: {integrity: sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/netbsd-arm64@0.25.0':
|
||||
resolution: {integrity: sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/netbsd-x64@0.25.0':
|
||||
resolution: {integrity: sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/openbsd-arm64@0.25.0':
|
||||
resolution: {integrity: sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openbsd-x64@0.25.0':
|
||||
resolution: {integrity: sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/sunos-x64@0.25.0':
|
||||
resolution: {integrity: sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [sunos]
|
||||
|
||||
'@esbuild/win32-arm64@0.25.0':
|
||||
resolution: {integrity: sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-ia32@0.25.0':
|
||||
resolution: {integrity: sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-x64@0.25.0':
|
||||
resolution: {integrity: sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@protobufjs/aspromise@1.1.2':
|
||||
resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==}
|
||||
|
||||
'@protobufjs/base64@1.1.2':
|
||||
resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==}
|
||||
|
||||
'@protobufjs/codegen@2.0.4':
|
||||
resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0':
|
||||
resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==}
|
||||
|
||||
'@protobufjs/float@1.0.2':
|
||||
resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==}
|
||||
|
||||
'@protobufjs/inquire@1.1.0':
|
||||
resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==}
|
||||
|
||||
'@protobufjs/path@1.1.2':
|
||||
resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==}
|
||||
|
||||
'@protobufjs/pool@1.1.0':
|
||||
resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==}
|
||||
|
||||
'@protobufjs/utf8@1.1.0':
|
||||
resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
|
||||
|
||||
'@types/node@22.13.5':
|
||||
resolution: {integrity: sha512-+lTU0PxZXn0Dr1NBtC7Y8cR21AJr87dLLU953CWA6pMxxv/UDc7jYAY90upcrie1nRcD6XNG5HOYEDtgW5TxAg==}
|
||||
|
||||
esbuild@0.25.0:
|
||||
resolution: {integrity: sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==}
|
||||
engines: {node: '>=18'}
|
||||
hasBin: true
|
||||
|
||||
flatbuffers@1.12.0:
|
||||
resolution: {integrity: sha512-c7CZADjRcl6j0PlvFy0ZqXQ67qSEZfrVPynmnL+2zPc+NtMvrF8Y0QceMo7QqnSPc7+uWjUIAbvCQ5WIKlMVdQ==}
|
||||
|
||||
guid-typescript@1.0.9:
|
||||
resolution: {integrity: sha512-Y8T4vYhEfwJOTbouREvG+3XDsjr8E3kIr7uf+JZ0BYloFsttiHU0WfvANVsR7TxNUJa/WpCnw/Ino/p+DeBhBQ==}
|
||||
|
||||
long@5.3.1:
|
||||
resolution: {integrity: sha512-ka87Jz3gcx/I7Hal94xaN2tZEOPoUOEVftkQqZx2EeQRN7LGdfLlI3FvZ+7WDplm+vK2Urx9ULrvSowtdCieng==}
|
||||
|
||||
onnxruntime-common@1.20.1:
|
||||
resolution: {integrity: sha512-YiU0s0IzYYC+gWvqD1HzLc46Du1sXpSiwzKb63PACIJr6LfL27VsXSXQvt68EzD3V0D5Bc0vyJTjmMxp0ylQiw==}
|
||||
|
||||
onnxruntime-web@1.20.1:
|
||||
resolution: {integrity: sha512-TePF6XVpLL1rWVMIl5Y9ACBQcyCNFThZON/jgElNd9Txb73CIEGlklhYR3UEr1cp5r0rbGI6nDwwrs79g7WjoA==}
|
||||
|
||||
platform@1.3.6:
|
||||
resolution: {integrity: sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==}
|
||||
|
||||
protobufjs@7.4.0:
|
||||
resolution: {integrity: sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
typescript@5.7.3:
|
||||
resolution: {integrity: sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@6.20.0:
|
||||
resolution: {integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@biomejs/biome@1.9.4':
|
||||
optionalDependencies:
|
||||
'@biomejs/cli-darwin-arm64': 1.9.4
|
||||
'@biomejs/cli-darwin-x64': 1.9.4
|
||||
'@biomejs/cli-linux-arm64': 1.9.4
|
||||
'@biomejs/cli-linux-arm64-musl': 1.9.4
|
||||
'@biomejs/cli-linux-x64': 1.9.4
|
||||
'@biomejs/cli-linux-x64-musl': 1.9.4
|
||||
'@biomejs/cli-win32-arm64': 1.9.4
|
||||
'@biomejs/cli-win32-x64': 1.9.4
|
||||
|
||||
'@biomejs/cli-darwin-arm64@1.9.4':
|
||||
optional: true
|
||||
|
||||
'@biomejs/cli-darwin-x64@1.9.4':
|
||||
optional: true
|
||||
|
||||
'@biomejs/cli-linux-arm64-musl@1.9.4':
|
||||
optional: true
|
||||
|
||||
'@biomejs/cli-linux-arm64@1.9.4':
|
||||
optional: true
|
||||
|
||||
'@biomejs/cli-linux-x64-musl@1.9.4':
|
||||
optional: true
|
||||
|
||||
'@biomejs/cli-linux-x64@1.9.4':
|
||||
optional: true
|
||||
|
||||
'@biomejs/cli-win32-arm64@1.9.4':
|
||||
optional: true
|
||||
|
||||
'@biomejs/cli-win32-x64@1.9.4':
|
||||
optional: true
|
||||
|
||||
'@esbuild/aix-ppc64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-x64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-arm64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-x64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-arm64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-x64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ia32@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-loong64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-mips64el@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ppc64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-riscv64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-s390x@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-x64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-arm64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-x64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-arm64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-x64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/sunos-x64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-arm64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-ia32@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-x64@0.25.0':
|
||||
optional: true
|
||||
|
||||
'@protobufjs/aspromise@1.1.2': {}
|
||||
|
||||
'@protobufjs/base64@1.1.2': {}
|
||||
|
||||
'@protobufjs/codegen@2.0.4': {}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0': {}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
|
||||
'@protobufjs/float@1.0.2': {}
|
||||
|
||||
'@protobufjs/inquire@1.1.0': {}
|
||||
|
||||
'@protobufjs/path@1.1.2': {}
|
||||
|
||||
'@protobufjs/pool@1.1.0': {}
|
||||
|
||||
'@protobufjs/utf8@1.1.0': {}
|
||||
|
||||
'@types/node@22.13.5':
|
||||
dependencies:
|
||||
undici-types: 6.20.0
|
||||
|
||||
esbuild@0.25.0:
|
||||
optionalDependencies:
|
||||
'@esbuild/aix-ppc64': 0.25.0
|
||||
'@esbuild/android-arm': 0.25.0
|
||||
'@esbuild/android-arm64': 0.25.0
|
||||
'@esbuild/android-x64': 0.25.0
|
||||
'@esbuild/darwin-arm64': 0.25.0
|
||||
'@esbuild/darwin-x64': 0.25.0
|
||||
'@esbuild/freebsd-arm64': 0.25.0
|
||||
'@esbuild/freebsd-x64': 0.25.0
|
||||
'@esbuild/linux-arm': 0.25.0
|
||||
'@esbuild/linux-arm64': 0.25.0
|
||||
'@esbuild/linux-ia32': 0.25.0
|
||||
'@esbuild/linux-loong64': 0.25.0
|
||||
'@esbuild/linux-mips64el': 0.25.0
|
||||
'@esbuild/linux-ppc64': 0.25.0
|
||||
'@esbuild/linux-riscv64': 0.25.0
|
||||
'@esbuild/linux-s390x': 0.25.0
|
||||
'@esbuild/linux-x64': 0.25.0
|
||||
'@esbuild/netbsd-arm64': 0.25.0
|
||||
'@esbuild/netbsd-x64': 0.25.0
|
||||
'@esbuild/openbsd-arm64': 0.25.0
|
||||
'@esbuild/openbsd-x64': 0.25.0
|
||||
'@esbuild/sunos-x64': 0.25.0
|
||||
'@esbuild/win32-arm64': 0.25.0
|
||||
'@esbuild/win32-ia32': 0.25.0
|
||||
'@esbuild/win32-x64': 0.25.0
|
||||
|
||||
flatbuffers@1.12.0: {}
|
||||
|
||||
guid-typescript@1.0.9: {}
|
||||
|
||||
long@5.3.1: {}
|
||||
|
||||
onnxruntime-common@1.20.1: {}
|
||||
|
||||
onnxruntime-web@1.20.1:
|
||||
dependencies:
|
||||
flatbuffers: 1.12.0
|
||||
guid-typescript: 1.0.9
|
||||
long: 5.3.1
|
||||
onnxruntime-common: 1.20.1
|
||||
platform: 1.3.6
|
||||
protobufjs: 7.4.0
|
||||
|
||||
platform@1.3.6: {}
|
||||
|
||||
protobufjs@7.4.0:
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/base64': 1.1.2
|
||||
'@protobufjs/codegen': 2.0.4
|
||||
'@protobufjs/eventemitter': 1.1.0
|
||||
'@protobufjs/fetch': 1.1.0
|
||||
'@protobufjs/float': 1.0.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
'@protobufjs/path': 1.1.2
|
||||
'@protobufjs/pool': 1.1.0
|
||||
'@protobufjs/utf8': 1.1.0
|
||||
'@types/node': 22.13.5
|
||||
long: 5.3.1
|
||||
|
||||
typescript@5.7.3: {}
|
||||
|
||||
undici-types@6.20.0: {}
|
||||
108
crates/sbv2_wasm/src-js/index.ts
Normal file
108
crates/sbv2_wasm/src-js/index.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import * as wasm from "../pkg/sbv2_wasm.js";
|
||||
import { InferenceSession, Tensor } from "onnxruntime-web";
|
||||
|
||||
export class ModelHolder {
|
||||
private models: Map<string, [InferenceSession, wasm.StyleVectorWrap]> =
|
||||
new Map();
|
||||
constructor(
|
||||
private tok: wasm.TokenizerWrap,
|
||||
private deberta: InferenceSession,
|
||||
) {}
|
||||
public static async globalInit(buf: ArrayBufferLike) {
|
||||
await wasm.default(buf);
|
||||
}
|
||||
public static async create(tok: string, deberta: ArrayBufferLike) {
|
||||
return new ModelHolder(
|
||||
wasm.load_tokenizer(tok),
|
||||
await InferenceSession.create(deberta, {
|
||||
executionProviders: ["webnn", "webgpu", "wasm", "cpu"],
|
||||
graphOptimizationLevel: "all",
|
||||
}),
|
||||
);
|
||||
}
|
||||
public async synthesize(
|
||||
name: string,
|
||||
text: string,
|
||||
style_id: number = 0,
|
||||
style_weight: number = 1.0,
|
||||
sdp_ratio: number = 0.4,
|
||||
speed: number = 1.0,
|
||||
) {
|
||||
const mod = this.models.get(name);
|
||||
if (!mod) throw new Error(`No model named ${name}`);
|
||||
const [vits2, style] = mod;
|
||||
return wasm.synthesize(
|
||||
text,
|
||||
this.tok,
|
||||
async (a: BigInt64Array, b: BigInt64Array) => {
|
||||
try {
|
||||
const res = (
|
||||
await this.deberta.run({
|
||||
input_ids: new Tensor("int64", a, [1, a.length]),
|
||||
attention_mask: new Tensor("int64", b, [1, b.length]),
|
||||
})
|
||||
)["output"];
|
||||
return [new Uint32Array(res.dims), await res.getData(true)];
|
||||
} catch (e) {
|
||||
console.warn(e);
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async (
|
||||
[a_shape, a_array]: any,
|
||||
b_d: any,
|
||||
c_d: any,
|
||||
d_d: any,
|
||||
e_d: any,
|
||||
f: number,
|
||||
g: number,
|
||||
) => {
|
||||
try {
|
||||
const a = new Tensor("float32", a_array, [1, ...a_shape]);
|
||||
const b = new Tensor("int64", b_d, [1, b_d.length]);
|
||||
const c = new Tensor("int64", c_d, [1, c_d.length]);
|
||||
const d = new Tensor("int64", d_d, [1, d_d.length]);
|
||||
const e = new Tensor("float32", e_d, [1, e_d.length]);
|
||||
const res = (
|
||||
await vits2.run({
|
||||
x_tst: b,
|
||||
x_tst_lengths: new Tensor("int64", [b_d.length]),
|
||||
sid: new Tensor("int64", [0]),
|
||||
tones: c,
|
||||
language: d,
|
||||
bert: a,
|
||||
style_vec: e,
|
||||
sdp_ratio: new Tensor("float32", [f]),
|
||||
length_scale: new Tensor("float32", [g]),
|
||||
noise_scale: new Tensor("float32", [0.677]),
|
||||
noise_scale_w: new Tensor("float32", [0.8]),
|
||||
})
|
||||
).output;
|
||||
return [new Uint32Array(res.dims), await res.getData(true)];
|
||||
} catch (e) {
|
||||
console.warn(e);
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
sdp_ratio,
|
||||
1.0 / speed,
|
||||
style_id,
|
||||
style_weight,
|
||||
style,
|
||||
);
|
||||
}
|
||||
public async load(name: string, b: Uint8Array) {
|
||||
const [style, vits2_b] = wasm.load_sbv2file(b);
|
||||
const vits2 = await InferenceSession.create(vits2_b as Uint8Array, {
|
||||
executionProviders: ["webnn", "webgpu", "wasm", "cpu"],
|
||||
graphOptimizationLevel: "all",
|
||||
});
|
||||
this.models.set(name, [vits2, style]);
|
||||
}
|
||||
public async unload(name: string) {
|
||||
return this.models.delete(name);
|
||||
}
|
||||
public modelList() {
|
||||
return this.models.keys();
|
||||
}
|
||||
}
|
||||
102
crates/sbv2_wasm/src/array_helper.rs
Normal file
102
crates/sbv2_wasm/src/array_helper.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
pub fn vec8_to_array8(v: Vec<u8>) -> js_sys::Uint8Array {
|
||||
let arr = js_sys::Uint8Array::new_with_length(v.len() as u32);
|
||||
arr.copy_from(&v);
|
||||
arr
|
||||
}
|
||||
|
||||
pub fn vec_f32_to_array_f32(v: Vec<f32>) -> js_sys::Float32Array {
|
||||
let arr = js_sys::Float32Array::new_with_length(v.len() as u32);
|
||||
arr.copy_from(&v);
|
||||
arr
|
||||
}
|
||||
|
||||
pub fn array8_to_vec8(buf: js_sys::Uint8Array) -> Vec<u8> {
|
||||
let mut body = vec![0; buf.length() as usize];
|
||||
buf.copy_to(&mut body[..]);
|
||||
body
|
||||
}
|
||||
|
||||
pub fn vec64_to_array64(v: Vec<i64>) -> js_sys::BigInt64Array {
|
||||
let arr = js_sys::BigInt64Array::new_with_length(v.len() as u32);
|
||||
arr.copy_from(&v);
|
||||
arr
|
||||
}
|
||||
|
||||
pub fn vec_to_array(v: Vec<wasm_bindgen::JsValue>) -> js_sys::Array {
|
||||
let arr = js_sys::Array::new_with_length(v.len() as u32);
|
||||
for (i, v) in v.into_iter().enumerate() {
|
||||
arr.set(i as u32, v);
|
||||
}
|
||||
arr
|
||||
}
|
||||
|
||||
struct A {
|
||||
shape: Vec<u32>,
|
||||
data: Vec<f32>,
|
||||
}
|
||||
|
||||
impl TryFrom<wasm_bindgen::JsValue> for A {
|
||||
type Error = sbv2_core::error::Error;
|
||||
|
||||
fn try_from(value: wasm_bindgen::JsValue) -> Result<Self, Self::Error> {
|
||||
let value: js_sys::Array = value.into();
|
||||
let mut shape = vec![];
|
||||
let mut data = vec![];
|
||||
for (i, v) in value.iter().enumerate() {
|
||||
match i {
|
||||
0 => {
|
||||
let v: js_sys::Uint32Array = v.into();
|
||||
shape = vec![0; v.length() as usize];
|
||||
v.copy_to(&mut shape);
|
||||
}
|
||||
1 => {
|
||||
let v: js_sys::Float32Array = v.into();
|
||||
data = vec![0.0; v.length() as usize];
|
||||
v.copy_to(&mut data);
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
Ok(A { shape, data })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn array_to_array2_f32(
|
||||
a: wasm_bindgen::JsValue,
|
||||
) -> sbv2_core::error::Result<ndarray::Array2<f32>> {
|
||||
let a = A::try_from(a)?;
|
||||
if a.shape.len() != 2 {
|
||||
return Err(sbv2_core::error::Error::OtherError(
|
||||
"Length mismatch".to_string(),
|
||||
));
|
||||
}
|
||||
let shape = [a.shape[0] as usize, a.shape[1] as usize];
|
||||
let arr = ndarray::Array2::from_shape_vec(shape, a.data.to_vec())
|
||||
.map_err(|e| sbv2_core::error::Error::OtherError(e.to_string()))?;
|
||||
Ok(arr)
|
||||
}
|
||||
pub fn array_to_array3_f32(
|
||||
a: wasm_bindgen::JsValue,
|
||||
) -> sbv2_core::error::Result<ndarray::Array3<f32>> {
|
||||
let a = A::try_from(a)?;
|
||||
if a.shape.len() != 3 {
|
||||
return Err(sbv2_core::error::Error::OtherError(
|
||||
"Length mismatch".to_string(),
|
||||
));
|
||||
}
|
||||
let shape = [
|
||||
a.shape[0] as usize,
|
||||
a.shape[1] as usize,
|
||||
a.shape[2] as usize,
|
||||
];
|
||||
let arr = ndarray::Array3::from_shape_vec(shape, a.data.to_vec())
|
||||
.map_err(|e| sbv2_core::error::Error::OtherError(e.to_string()))?;
|
||||
Ok(arr)
|
||||
}
|
||||
|
||||
pub fn array2_f32_to_array(a: ndarray::Array2<f32>) -> js_sys::Array {
|
||||
let shape: Vec<wasm_bindgen::JsValue> = a.shape().iter().map(|f| (*f as u32).into()).collect();
|
||||
let typed_array = js_sys::Float32Array::new_with_length(a.len() as u32);
|
||||
typed_array.copy_from(&a.into_flat().to_vec());
|
||||
vec_to_array(vec![vec_to_array(shape).into(), typed_array.into()])
|
||||
}
|
||||
123
crates/sbv2_wasm/src/lib.rs
Normal file
123
crates/sbv2_wasm/src/lib.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use sbv2_core::*;
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen_futures::JsFuture;
|
||||
mod array_helper;
|
||||
|
||||
static JTALK: Lazy<jtalk::JTalk> = Lazy::new(|| jtalk::JTalk::new().unwrap());
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct TokenizerWrap {
|
||||
tokenizer: tokenizer::Tokenizer,
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn load_tokenizer(s: js_sys::JsString) -> Result<TokenizerWrap, JsError> {
|
||||
if let Some(s) = s.as_string() {
|
||||
Ok(TokenizerWrap {
|
||||
tokenizer: tokenizer::Tokenizer::from_bytes(s.as_bytes())
|
||||
.map_err(|e| JsError::new(&e.to_string()))?,
|
||||
})
|
||||
} else {
|
||||
Err(JsError::new("invalid utf8"))
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct StyleVectorWrap {
|
||||
style_vector: ndarray::Array2<f32>,
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn load_sbv2file(buf: js_sys::Uint8Array) -> Result<js_sys::Array, JsError> {
|
||||
let (style_vectors, vits2) = sbv2file::parse_sbv2file(array_helper::array8_to_vec8(buf))?;
|
||||
let buf = array_helper::vec8_to_array8(vits2);
|
||||
Ok(array_helper::vec_to_array(vec![
|
||||
StyleVectorWrap {
|
||||
style_vector: style::load_style(style_vectors)?,
|
||||
}
|
||||
.into(),
|
||||
buf.into(),
|
||||
]))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[wasm_bindgen]
|
||||
pub async fn synthesize(
|
||||
text: &str,
|
||||
tokenizer: &TokenizerWrap,
|
||||
bert_predict_fn: js_sys::Function,
|
||||
synthesize_fn: js_sys::Function,
|
||||
sdp_ratio: f32,
|
||||
length_scale: f32,
|
||||
style_id: i32,
|
||||
style_weight: f32,
|
||||
style_vectors: &StyleVectorWrap,
|
||||
) -> Result<js_sys::Uint8Array, JsError> {
|
||||
let synthesize_wrap = |bert_ori: ndarray::Array2<f32>,
|
||||
x_tst: ndarray::Array1<i64>,
|
||||
tones: ndarray::Array1<i64>,
|
||||
lang_ids: ndarray::Array1<i64>,
|
||||
style_vector: ndarray::Array1<f32>,
|
||||
sdp_ratio: f32,
|
||||
length_scale: f32| async move {
|
||||
let arr = array_helper::vec_to_array(vec![
|
||||
array_helper::array2_f32_to_array(bert_ori).into(),
|
||||
array_helper::vec64_to_array64(x_tst.to_vec()).into(),
|
||||
array_helper::vec64_to_array64(tones.to_vec()).into(),
|
||||
array_helper::vec64_to_array64(lang_ids.to_vec()).into(),
|
||||
array_helper::vec_f32_to_array_f32(style_vector.to_vec()).into(),
|
||||
sdp_ratio.into(),
|
||||
length_scale.into(),
|
||||
]);
|
||||
let res = synthesize_fn
|
||||
.apply(&js_sys::Object::new().into(), &arr)
|
||||
.map_err(|e| {
|
||||
error::Error::OtherError(e.as_string().unwrap_or("unknown".to_string()))
|
||||
})?;
|
||||
let res = JsFuture::from(Into::<js_sys::Promise>::into(res))
|
||||
.await
|
||||
.map_err(|e| {
|
||||
sbv2_core::error::Error::OtherError(e.as_string().unwrap_or("unknown".to_string()))
|
||||
})?;
|
||||
array_helper::array_to_array3_f32(res)
|
||||
};
|
||||
let (bert_ori, phones, tones, lang_ids) = tts_util::parse_text(
|
||||
text,
|
||||
&JTALK,
|
||||
&tokenizer.tokenizer,
|
||||
|token_ids: Vec<i64>, attention_masks: Vec<i64>| {
|
||||
Box::pin(async move {
|
||||
let arr = array_helper::vec_to_array(vec![
|
||||
array_helper::vec64_to_array64(token_ids).into(),
|
||||
array_helper::vec64_to_array64(attention_masks).into(),
|
||||
]);
|
||||
let res = bert_predict_fn
|
||||
.apply(&js_sys::Object::new().into(), &arr)
|
||||
.map_err(|e| {
|
||||
error::Error::OtherError(e.as_string().unwrap_or("unknown".to_string()))
|
||||
})?;
|
||||
let res = JsFuture::from(Into::<js_sys::Promise>::into(res))
|
||||
.await
|
||||
.map_err(|e| {
|
||||
sbv2_core::error::Error::OtherError(
|
||||
e.as_string().unwrap_or("unknown".to_string()),
|
||||
)
|
||||
})?;
|
||||
array_helper::array_to_array2_f32(res)
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
let audio = synthesize_wrap(
|
||||
bert_ori.to_owned(),
|
||||
phones,
|
||||
tones,
|
||||
lang_ids,
|
||||
style::get_style_vector(&style_vectors.style_vector, style_id, style_weight)?,
|
||||
sdp_ratio,
|
||||
length_scale,
|
||||
)
|
||||
.await?;
|
||||
Ok(array_helper::vec8_to_array8(tts_util::array_to_vec(audio)?))
|
||||
}
|
||||
15
crates/sbv2_wasm/tsconfig.json
Normal file
15
crates/sbv2_wasm/tsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"rootDir": "./src-js",
|
||||
"outDir": "./dist",
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"emitDeclarationOnly": true
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
[package]
|
||||
name = "sbv2_api"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
axum = "0.7.5"
|
||||
dotenvy.workspace = true
|
||||
env_logger.workspace = true
|
||||
log = "0.4.22"
|
||||
sbv2_core = { version = "0.1.3", path = "../sbv2_core" }
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
tokio = { version = "1.40.0", features = ["full"] }
|
||||
|
||||
[features]
|
||||
coreml = ["sbv2_core/coreml"]
|
||||
cuda = ["sbv2_core/cuda"]
|
||||
cuda_tf32 = ["sbv2_core/cuda_tf32"]
|
||||
dynamic = ["sbv2_core/dynamic"]
|
||||
directml = ["sbv2_core/directml"]
|
||||
tensorrt = ["sbv2_core/tensorrt"]
|
||||
@@ -1,15 +0,0 @@
|
||||
[package]
|
||||
name = "sbv2_bindings"
|
||||
version = "0.1.1"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
name = "sbv2_bindings"
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
ndarray.workspace = true
|
||||
pyo3 = { version = "0.22.0", features = ["anyhow"] }
|
||||
sbv2_core = { version = "0.1.4", path = "../sbv2_core" }
|
||||
@@ -1,35 +0,0 @@
|
||||
[package]
|
||||
name = "sbv2_core"
|
||||
description = "Style-Bert-VITSの推論ライブラリ"
|
||||
version = "0.1.4"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
repository = "https://github.com/tuna2134/sbv2-api"
|
||||
documentation = "https://docs.rs/sbv2_core"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
dotenvy.workspace = true
|
||||
env_logger.workspace = true
|
||||
hound = "3.5.1"
|
||||
jpreprocess = { version = "0.10.0", features = ["naist-jdic"] }
|
||||
ndarray.workspace = true
|
||||
num_cpus = "1.16.0"
|
||||
once_cell = "1.19.0"
|
||||
ort = { git = "https://github.com/pykeio/ort.git", version = "2.0.0-rc.6" }
|
||||
regex = "1.10.6"
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
serde_json = "1.0.128"
|
||||
tar = "0.4.41"
|
||||
thiserror = "1.0.63"
|
||||
tokenizers = "0.20.0"
|
||||
zstd = "0.13.2"
|
||||
|
||||
[features]
|
||||
cuda = ["ort/cuda"]
|
||||
cuda_tf32 = []
|
||||
dynamic = ["ort/load-dynamic"]
|
||||
directml = ["ort/directml"]
|
||||
tensorrt = ["ort/tensorrt"]
|
||||
coreml = ["ort/coreml"]
|
||||
@@ -1,23 +0,0 @@
|
||||
use crate::error::Result;
|
||||
use ndarray::Array2;
|
||||
use ort::Session;
|
||||
|
||||
pub fn predict(
|
||||
session: &Session,
|
||||
token_ids: Vec<i64>,
|
||||
attention_masks: Vec<i64>,
|
||||
) -> Result<Array2<f32>> {
|
||||
let outputs = session.run(
|
||||
ort::inputs! {
|
||||
"input_ids" => Array2::from_shape_vec((1, token_ids.len()), token_ids).unwrap(),
|
||||
"attention_mask" => Array2::from_shape_vec((1, attention_masks.len()), attention_masks).unwrap(),
|
||||
}?
|
||||
)?;
|
||||
|
||||
let output = outputs.get("output").unwrap();
|
||||
|
||||
let content = output.try_extract_tensor::<f32>()?.to_owned();
|
||||
let (data, _) = content.clone().into_raw_vec_and_offset();
|
||||
|
||||
Ok(Array2::from_shape_vec((content.shape()[0], content.shape()[1]), data).unwrap())
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
use std::fs;
|
||||
|
||||
use sbv2_core::tts;
|
||||
use std::env;
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
dotenvy::dotenv_override().ok();
|
||||
env_logger::init();
|
||||
let text = fs::read_to_string("content.txt")?;
|
||||
let ident = "aaa";
|
||||
let mut tts_holder = tts::TTSModelHolder::new(
|
||||
&fs::read(env::var("BERT_MODEL_PATH")?)?,
|
||||
&fs::read(env::var("TOKENIZER_PATH")?)?,
|
||||
)?;
|
||||
tts_holder.load_sbv2file(ident, fs::read(env::var("MODEL_PATH")?)?)?;
|
||||
|
||||
let audio = tts_holder.easy_synthesize(ident, &text, 0, tts::SynthesizeOptions::default())?;
|
||||
fs::write("output.wav", audio)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
use crate::error::Result;
|
||||
use ndarray::{array, Array1, Array2, Array3, Axis};
|
||||
use ort::{GraphOptimizationLevel, Session};
|
||||
|
||||
#[allow(clippy::vec_init_then_push, unused_variables)]
|
||||
pub fn load_model<P: AsRef<[u8]>>(model_file: P, bert: bool) -> Result<Session> {
|
||||
let mut exp = Vec::new();
|
||||
#[cfg(feature = "tensorrt")]
|
||||
{
|
||||
if bert {
|
||||
exp.push(
|
||||
ort::TensorRTExecutionProvider::default()
|
||||
.with_fp16(true)
|
||||
.with_profile_min_shapes("input_ids:1x1,attention_mask:1x1")
|
||||
.with_profile_max_shapes("input_ids:1x100,attention_mask:1x100")
|
||||
.with_profile_opt_shapes("input_ids:1x25,attention_mask:1x25")
|
||||
.build(),
|
||||
);
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "cuda")]
|
||||
{
|
||||
#[allow(unused_mut)]
|
||||
let mut cuda = ort::CUDAExecutionProvider::default()
|
||||
.with_conv_algorithm_search(ort::CUDAExecutionProviderCuDNNConvAlgoSearch::Default);
|
||||
#[cfg(feature = "cuda_tf32")]
|
||||
{
|
||||
cuda = cuda.with_tf32(true);
|
||||
}
|
||||
exp.push(cuda.build());
|
||||
}
|
||||
#[cfg(feature = "directml")]
|
||||
{
|
||||
exp.push(ort::DirectMLExecutionProvider::default().build());
|
||||
}
|
||||
#[cfg(feature = "coreml")]
|
||||
{
|
||||
exp.push(ort::CoreMLExecutionProvider::default().build());
|
||||
}
|
||||
exp.push(ort::CPUExecutionProvider::default().build());
|
||||
Ok(Session::builder()?
|
||||
.with_execution_providers(exp)?
|
||||
.with_optimization_level(GraphOptimizationLevel::Level3)?
|
||||
.with_intra_threads(num_cpus::get_physical())?
|
||||
.with_parallel_execution(true)?
|
||||
.with_inter_threads(num_cpus::get_physical())?
|
||||
.commit_from_memory(model_file.as_ref())?)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn synthesize(
|
||||
session: &Session,
|
||||
bert_ori: Array2<f32>,
|
||||
x_tst: Array1<i64>,
|
||||
tones: Array1<i64>,
|
||||
lang_ids: Array1<i64>,
|
||||
style_vector: Array1<f32>,
|
||||
sdp_ratio: f32,
|
||||
length_scale: f32,
|
||||
) -> Result<Array3<f32>> {
|
||||
let bert = bert_ori.insert_axis(Axis(0));
|
||||
let x_tst_lengths: Array1<i64> = array![x_tst.shape()[0] as i64];
|
||||
let x_tst = x_tst.insert_axis(Axis(0));
|
||||
let lang_ids = lang_ids.insert_axis(Axis(0));
|
||||
let tones = tones.insert_axis(Axis(0));
|
||||
let style_vector = style_vector.insert_axis(Axis(0));
|
||||
let outputs = session.run(ort::inputs! {
|
||||
"x_tst" => x_tst,
|
||||
"x_tst_lengths" => x_tst_lengths,
|
||||
"sid" => array![0_i64],
|
||||
"tones" => tones,
|
||||
"language" => lang_ids,
|
||||
"bert" => bert,
|
||||
"style_vec" => style_vector,
|
||||
"sdp_ratio" => array![sdp_ratio],
|
||||
"length_scale" => array![length_scale],
|
||||
}?)?;
|
||||
|
||||
let audio_array = outputs
|
||||
.get("output")
|
||||
.unwrap()
|
||||
.try_extract_tensor::<f32>()?
|
||||
.to_owned();
|
||||
|
||||
Ok(Array3::from_shape_vec(
|
||||
(
|
||||
audio_array.shape()[0],
|
||||
audio_array.shape()[1],
|
||||
audio_array.shape()[2],
|
||||
),
|
||||
audio_array.into_raw_vec_and_offset().0,
|
||||
)?)
|
||||
}
|
||||
5
scripts/.gitignore
vendored
Normal file
5
scripts/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
*.json
|
||||
venv/
|
||||
tmp/
|
||||
*.safetensors
|
||||
*.npy
|
||||
@@ -5,6 +5,7 @@ from transformers import AutoModelForMaskedLM, AutoTokenizer
|
||||
import torch
|
||||
from torch import nn
|
||||
from argparse import ArgumentParser
|
||||
import os
|
||||
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--model", default="ku-nlp/deberta-v2-large-japanese-char-wwm")
|
||||
@@ -15,7 +16,7 @@ bert_models.load_tokenizer(Languages.JP, model_name)
|
||||
tokenizer = bert_models.load_tokenizer(Languages.JP)
|
||||
converter = BertConverter(tokenizer)
|
||||
tokenizer = converter.converted()
|
||||
tokenizer.save("../models/tokenizer.json")
|
||||
tokenizer.save("../../models/tokenizer.json")
|
||||
|
||||
|
||||
class ORTDeberta(nn.Module):
|
||||
@@ -42,9 +43,10 @@ inputs = AutoTokenizer.from_pretrained(model_name)(
|
||||
torch.onnx.export(
|
||||
model,
|
||||
(inputs["input_ids"], inputs["token_type_ids"], inputs["attention_mask"]),
|
||||
"../models/deberta.onnx",
|
||||
"../../models/deberta.onnx",
|
||||
input_names=["input_ids", "token_type_ids", "attention_mask"],
|
||||
output_names=["output"],
|
||||
verbose=True,
|
||||
dynamic_axes={"input_ids": {1: "batch_size"}, "attention_mask": {1: "batch_size"}},
|
||||
)
|
||||
os.system("onnxsim ../../models/deberta.onnx ../../models/deberta.onnx")
|
||||
@@ -36,7 +36,7 @@ data = array.tolist()
|
||||
hyper_parameters = HyperParameters.load_from_json(config_file)
|
||||
out_name = hyper_parameters.model_name
|
||||
|
||||
with open(f"../models/style_vectors_{out_name}.json", "w") as f:
|
||||
with open(f"../../models/style_vectors_{out_name}.json", "w") as f:
|
||||
json.dump(
|
||||
{
|
||||
"data": data,
|
||||
@@ -94,7 +94,7 @@ model = get_net_g(
|
||||
)
|
||||
|
||||
|
||||
def forward(x, x_len, sid, tone, lang, bert, style, length_scale, sdp_ratio):
|
||||
def forward(x, x_len, sid, tone, lang, bert, style, length_scale, sdp_ratio, noise_scale, noise_scale_w):
|
||||
return model.infer(
|
||||
x,
|
||||
x_len,
|
||||
@@ -105,6 +105,8 @@ def forward(x, x_len, sid, tone, lang, bert, style, length_scale, sdp_ratio):
|
||||
style,
|
||||
sdp_ratio=sdp_ratio,
|
||||
length_scale=length_scale,
|
||||
noise_scale=noise_scale,
|
||||
noise_scale_w=noise_scale_w,
|
||||
)
|
||||
|
||||
|
||||
@@ -122,8 +124,10 @@ torch.onnx.export(
|
||||
style_vec_tensor,
|
||||
torch.tensor(1.0),
|
||||
torch.tensor(0.0),
|
||||
torch.tensor(0.6777),
|
||||
torch.tensor(0.8),
|
||||
),
|
||||
f"../models/model_{out_name}.onnx",
|
||||
f"../../models/model_{out_name}.onnx",
|
||||
verbose=True,
|
||||
dynamic_axes={
|
||||
"x_tst": {0: "batch_size", 1: "x_tst_max_length"},
|
||||
@@ -144,14 +148,16 @@ torch.onnx.export(
|
||||
"style_vec",
|
||||
"length_scale",
|
||||
"sdp_ratio",
|
||||
"noise_scale",
|
||||
"noise_scale_w"
|
||||
],
|
||||
output_names=["output"],
|
||||
)
|
||||
os.system(f"onnxsim ../models/model_{out_name}.onnx ../models/model_{out_name}.onnx")
|
||||
onnxfile = open(f"../models/model_{out_name}.onnx", "rb").read()
|
||||
stylefile = open(f"../models/style_vectors_{out_name}.json", "rb").read()
|
||||
os.system(f"onnxsim ../../models/model_{out_name}.onnx ../../models/model_{out_name}.onnx")
|
||||
onnxfile = open(f"../../models/model_{out_name}.onnx", "rb").read()
|
||||
stylefile = open(f"../../models/style_vectors_{out_name}.json", "rb").read()
|
||||
version = bytes("1", "utf8")
|
||||
with taropen(f"../models/tmp_{out_name}.sbv2tar", "w") as w:
|
||||
with taropen(f"../../models/tmp_{out_name}.sbv2tar", "w") as w:
|
||||
|
||||
def add_tar(f, b):
|
||||
t = TarInfo(f)
|
||||
@@ -161,9 +167,9 @@ with taropen(f"../models/tmp_{out_name}.sbv2tar", "w") as w:
|
||||
add_tar("version.txt", version)
|
||||
add_tar("model.onnx", onnxfile)
|
||||
add_tar("style_vectors.json", stylefile)
|
||||
open(f"../models/{out_name}.sbv2", "wb").write(
|
||||
open(f"../../models/{out_name}.sbv2", "wb").write(
|
||||
ZstdCompressor(threads=-1, level=22).compress(
|
||||
open(f"../models/tmp_{out_name}.sbv2tar", "rb").read()
|
||||
open(f"../../models/tmp_{out_name}.sbv2tar", "rb").read()
|
||||
)
|
||||
)
|
||||
os.unlink(f"../models/tmp_{out_name}.sbv2tar")
|
||||
os.unlink(f"../../models/tmp_{out_name}.sbv2tar")
|
||||
14
scripts/make_dict.sh
Executable file
14
scripts/make_dict.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
git clone https://github.com/Aivis-Project/AivisSpeech-Engine ./scripts/tmp --filter=blob:none -n
|
||||
cd ./scripts/tmp
|
||||
git checkout 168b2a1144afe300b0490d9a6dd773ec6e927667 -- resources/dictionaries/*.csv
|
||||
cd ../..
|
||||
rm -rf ./crates/sbv2_core/src/dic
|
||||
cp -r ./scripts/tmp/resources/dictionaries ./crates/sbv2_core/src/dic
|
||||
rm -rf ./scripts/tmp
|
||||
for file in ./crates/sbv2_core/src/dic/0*.csv; do
|
||||
/usr/bin/cat "$file"
|
||||
echo
|
||||
done > ./crates/sbv2_core/src/all.csv
|
||||
lindera build ./crates/sbv2_core/src/all.csv ./crates/sbv2_core/src/dic/all.dic -u -k ipadic
|
||||
180
scripts/sbv2-bindings-colab.ipynb
Normal file
180
scripts/sbv2-bindings-colab.ipynb
Normal file
@@ -0,0 +1,180 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# 音声合成プログラム\n",
|
||||
"\n",
|
||||
"このノートブックでは、`sbv2_bindings` パッケージを使用して音声合成を行います。必要なモデルをダウンロードし、ユーザーが入力したテキストから音声を生成します。音声合成が終わったら、再度テキストの入力を求め、ユーザーが終了するまで繰り返します。"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# 必要なパッケージのインストール\n",
|
||||
"%pip install sbv2_bindings\n",
|
||||
"\n",
|
||||
"# 必要なモジュールのインポート\n",
|
||||
"import os\n",
|
||||
"import urllib.request\n",
|
||||
"import time\n",
|
||||
"from sbv2_bindings import TTSModel"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## モデルのダウンロード\n",
|
||||
"\n",
|
||||
"モデルファイルとトークナイザーをダウンロードします。ユーザーが独自のモデルを使用したい場合は、該当するURLまたはローカルパスを指定してください。"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# モデルの URL またはローカルパスの指定\n",
|
||||
"user_sbv2_model_url = \"\" # カスタムモデルのURLがあればここに指定\n",
|
||||
"user_sbv2_model_path = \"\" # カスタムモデルのローカルパスがあればここに指定\n",
|
||||
"\n",
|
||||
"# モデル用のディレクトリを作成\n",
|
||||
"model_dir = 'models'\n",
|
||||
"os.makedirs(model_dir, exist_ok=True)\n",
|
||||
"\n",
|
||||
"# ダウンロードするファイルの URL\n",
|
||||
"file_urls = [\n",
|
||||
" \"https://huggingface.co/googlefan/sbv2_onnx_models/resolve/main/tokenizer.json\",\n",
|
||||
" \"https://huggingface.co/googlefan/sbv2_onnx_models/resolve/main/deberta.onnx\",\n",
|
||||
"]\n",
|
||||
"\n",
|
||||
"# モデルのパス決定\n",
|
||||
"if user_sbv2_model_path:\n",
|
||||
" sbv2_model_path = user_sbv2_model_path # ローカルモデルのパスを使用\n",
|
||||
"elif user_sbv2_model_url:\n",
|
||||
" sbv2_model_filename = os.path.basename(user_sbv2_model_url)\n",
|
||||
" sbv2_model_path = os.path.join(model_dir, sbv2_model_filename)\n",
|
||||
" file_urls.append(user_sbv2_model_url)\n",
|
||||
"else:\n",
|
||||
" # デフォルトのモデルを使用\n",
|
||||
" sbv2_model_filename = \"tsukuyomi.sbv2\"\n",
|
||||
" sbv2_model_path = os.path.join(model_dir, sbv2_model_filename)\n",
|
||||
" file_urls.append(\"https://huggingface.co/googlefan/sbv2_onnx_models/resolve/main/tsukuyomi.sbv2\")\n",
|
||||
"\n",
|
||||
"# ファイルをダウンロード\n",
|
||||
"for url in file_urls:\n",
|
||||
" file_name = os.path.join(model_dir, os.path.basename(url))\n",
|
||||
" if not os.path.exists(file_name):\n",
|
||||
" print(f\"{file_name} をダウンロードしています...\")\n",
|
||||
" urllib.request.urlretrieve(url, file_name)\n",
|
||||
" else:\n",
|
||||
" print(f\"{file_name} は既に存在します。\")\n",
|
||||
"\n",
|
||||
"# ダウンロードまたは使用するファイルを確認\n",
|
||||
"print(\"\\n使用するファイル:\")\n",
|
||||
"for file in os.listdir(model_dir):\n",
|
||||
" print(file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## モデルの読み込みと音声合成\n",
|
||||
"\n",
|
||||
"モデルを読み込み、ユーザーが入力したテキストから音声を生成します。話者名は使用する `.sbv2` ファイル名から自動的に取得します。音声合成が終わったら、再度テキストの入力を求め、ユーザーが終了するまで繰り返します。"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# 音声合成の実行\n",
|
||||
"def main():\n",
|
||||
" try:\n",
|
||||
" print(\"\\nモデルを読み込んでいます...\")\n",
|
||||
" model = TTSModel.from_path(\n",
|
||||
" os.path.join(model_dir, \"deberta.onnx\"),\n",
|
||||
" os.path.join(model_dir, \"tokenizer.json\")\n",
|
||||
" )\n",
|
||||
" print(\"モデルの読み込みが完了しました!\")\n",
|
||||
" except Exception as e:\n",
|
||||
" print(f\"モデルの読み込みに失敗しました: {e}\")\n",
|
||||
" return\n",
|
||||
"\n",
|
||||
" # 話者名を取得(.sbv2 ファイル名の拡張子を除いた部分)\n",
|
||||
" speaker_name = os.path.splitext(os.path.basename(sbv2_model_path))[0]\n",
|
||||
" \n",
|
||||
" # 指定されたモデルのパスを使用\n",
|
||||
" try:\n",
|
||||
" model.load_sbv2file_from_path(speaker_name, sbv2_model_path)\n",
|
||||
" print(f\"話者 '{speaker_name}' のセットアップが完了しました!\")\n",
|
||||
" except Exception as e:\n",
|
||||
" print(f\"SBV2ファイルの読み込みに失敗しました: {e}\")\n",
|
||||
" return\n",
|
||||
"\n",
|
||||
" # 音声合成を繰り返し実行\n",
|
||||
" while True:\n",
|
||||
" # 合成したいテキストをユーザーから入力\n",
|
||||
" user_input = input(\"\\n音声合成したいテキストを入力してください(終了するには 'exit' と入力): \")\n",
|
||||
" \n",
|
||||
" if user_input.strip().lower() == 'exit':\n",
|
||||
" print(\"音声合成を終了します。\")\n",
|
||||
" break\n",
|
||||
"\n",
|
||||
" # 出力ファイル名\n",
|
||||
" output_file = \"output.wav\"\n",
|
||||
"\n",
|
||||
" # 音声合成を実行\n",
|
||||
" try:\n",
|
||||
" print(\"\\n音声合成を開始します...\")\n",
|
||||
" start_time = time.time()\n",
|
||||
"\n",
|
||||
" audio_data = model.synthesize(user_input, speaker_name, 0, 0.0, 1)\n",
|
||||
"\n",
|
||||
" with open(output_file, \"wb\") as f:\n",
|
||||
" f.write(audio_data)\n",
|
||||
"\n",
|
||||
" end_time = time.time()\n",
|
||||
" elapsed_time = end_time - start_time\n",
|
||||
"\n",
|
||||
" print(f\"\\n音声が '{output_file}' に保存されました。\")\n",
|
||||
" print(f\"音声合成にかかった時間: {elapsed_time:.2f} 秒\")\n",
|
||||
" except Exception as e:\n",
|
||||
" print(f\"音声合成に失敗しました: {e}\")\n",
|
||||
"\n",
|
||||
"if __name__ == \"__main__\":\n",
|
||||
" main()"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.x"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 4
|
||||
}
|
||||
@@ -3,16 +3,16 @@ from sbv2_bindings import TTSModel
|
||||
|
||||
def main():
|
||||
print("Loading models...")
|
||||
model = TTSModel.from_path("../models/debert.onnx", "../models/tokenizer.json")
|
||||
model = TTSModel.from_path("./models/debert.onnx", "./models/tokenizer.json")
|
||||
print("Models loaded!")
|
||||
|
||||
model.load_sbv2file_from_path("amitaro", "../models/amitaro.sbv2")
|
||||
model.load_sbv2file_from_path("amitaro", "./models/amitaro.sbv2")
|
||||
print("All setup is done!")
|
||||
|
||||
style_vector = model.get_style_vector("amitaro", 0, 1.0)
|
||||
with open("output.wav", "wb") as f:
|
||||
f.write(
|
||||
model.synthesize("おはようございます。", "amitaro", style_vector, 0.0, 0.5)
|
||||
model.synthesize("おはようございます。", "amitaro", 0, 0, 0.0, 0.5)
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user