From 3e601662d9313bfc6b3154c1a6499b0ea539e2d8 Mon Sep 17 00:00:00 2001 From: tuna2134 Date: Mon, 9 Sep 2024 08:47:05 +0000 Subject: [PATCH] clippy --- sbv2_core/src/main.rs | 4 ++-- sbv2_core/src/text.rs | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/sbv2_core/src/main.rs b/sbv2_core/src/main.rs index be8b450..f1d2642 100644 --- a/sbv2_core/src/main.rs +++ b/sbv2_core/src/main.rs @@ -7,8 +7,8 @@ fn main() -> error::Result<()> { println!("{}", normalized_text); let jtalk = text::JTalk::new()?; - let phones = jtalk.g2p(&normalized_text)?; - println!("{:?}", phones); + jtalk.g2p(&normalized_text)?; + println!("{:?}", ()); let tokenizer = text::get_tokenizer()?; println!("{:?}", tokenizer); diff --git a/sbv2_core/src/text.rs b/sbv2_core/src/text.rs index dc0a2e8..c604bd2 100644 --- a/sbv2_core/src/text.rs +++ b/sbv2_core/src/text.rs @@ -54,11 +54,11 @@ impl JTalk { fn fix_phone_tone(&self, phone_tone_list: Vec<(String, i32)>) -> Result> { let tone_values: HashSet = phone_tone_list .iter() - .map(|(_letter, tone)| tone.clone()) + .map(|(_letter, tone)| *tone) .collect(); if tone_values.len() == 1 { assert!(tone_values == hash_set![0], "{:?}", tone_values); - return Ok(phone_tone_list); + Ok(phone_tone_list) } else if tone_values.len() == 2 { if tone_values == hash_set![0, 1] { return Ok(phone_tone_list); @@ -93,9 +93,9 @@ impl JTalk { for (i, letter) in prosodies.iter().enumerate() { if letter == "^" { assert!(i == 0); - } else if vec!["$", "?", "_", "#"].contains(&letter.as_str()) { + } else if ["$", "?", "_", "#"].contains(&letter.as_str()) { results.extend(self.fix_phone_tone(current_phrase.clone())?); - if vec!["$", "?"].contains(&letter.as_str()) { + if ["$", "?"].contains(&letter.as_str()) { assert!(i == prosodies.len() - 1); } current_phrase = Vec::new(); @@ -175,11 +175,11 @@ impl JTalk { pub fn normalize_text(text: &str) -> String { // 日本語のテキストを正規化する - let text = text.replace("~", "ー"); - let text = text.replace("~", "ー"); - let text = text.replace("〜", "ー"); + let text = text.replace('~', "ー"); + let text = text.replace('~', "ー"); + - text + text.replace('〜', "ー") } pub fn get_tokenizer() -> Result {