apply cargo clippy --fix on the latest stable rustc (1.67.0)

This commit is contained in:
TOKUNAGA Hiroyuki
2023-01-28 02:57:38 +09:00
parent fdb11f707b
commit 3daaea9bbe
21 changed files with 30 additions and 34 deletions

View File

@ -49,7 +49,7 @@ pub fn check(
let lattice = engine.to_lattice(yomi, None)?;
if let Some(expected) = expected {
let _dot = lattice.dump_cost_dot(expected.as_str());
println!("{}", _dot);
println!("{_dot}");
let mut file = File::create("/tmp/dump.dot")?;
file.write_all(_dot.as_bytes())?;
}
@ -57,7 +57,7 @@ pub fn check(
let got = engine.resolve(&lattice)?;
let terms: Vec<String> = got.iter().map(|f| f[0].surface.clone()).collect();
let result = terms.join("/");
println!("{}", result);
println!("{result}");
Ok(())
}

View File

@ -15,7 +15,7 @@ pub fn dump_bigram_dict(unigram_file: &str, bigram_file: &str) -> anyhow::Result
for ((word_id1, word_id2), cost) in bigram.as_hash_map() {
let key1 = unigram_map.get(&word_id1).unwrap();
let key2 = unigram_map.get(&word_id2).unwrap();
println!("{} {} {}", cost, key1, key2);
println!("{cost} {key1} {key2}");
}
Ok(())

View File

@ -6,7 +6,7 @@ pub fn dump_unigram_dict(filename: &str) -> anyhow::Result<()> {
let dict_map = dict.as_hash_map();
for yomi in dict_map.keys() {
let (word_id, score) = dict.find(yomi.as_str()).unwrap();
println!("{} {} {}", yomi, word_id, score);
println!("{yomi} {word_id} {score}");
}
Ok(())

View File

@ -86,7 +86,7 @@ pub fn evaluate(
let mut saigen_ritsu = SaigenRitsu::default();
for file in corpus {
let fp = File::open(file).with_context(|| format!("File: {}", file))?;
let fp = File::open(file).with_context(|| format!("File: {file}"))?;
for line in BufReader::new(fp).lines() {
let line = line?;
let line = line.trim();
@ -96,7 +96,7 @@ pub fn evaluate(
let (yomi, surface) = line
.split_once(' ')
.with_context(|| format!("source: {}", line))
.with_context(|| format!("source: {line}"))
.unwrap();
let yomi = yomi.replace('|', "");
let surface = surface.replace('|', "");

View File

@ -132,7 +132,7 @@ impl LearningService {
let terms: Vec<String> = got.iter().map(|f| f[0].surface.clone()).collect();
let result = terms.join("");
println!("{}", result);
println!("{result}");
// 正解じゃないときには出現頻度の確率が正しくないということだと思いますんで
// 頻度を増やす。

View File

@ -75,7 +75,7 @@ pub fn make_stats_system_bigram_lm(
"work/dump/bigram-{}.txt",
Local::now().format("%Y%m%d-%H%M%S")
);
println!("Dump to text file: {}", dumpfname);
println!("Dump to text file: {dumpfname}");
let mut file = File::create(dumpfname)?;
for ((word_id1, word_id2), cnt) in &merged {
let Some(word1) = reverse_unigram_map.get(word_id1) else {
@ -85,7 +85,7 @@ pub fn make_stats_system_bigram_lm(
continue;
};
if *cnt > 16 {
file.write_fmt(format_args!("{}\t{}\t{}\n", cnt, word1, word2))?;
file.write_fmt(format_args!("{cnt}\t{word1}\t{word2}\n"))?;
}
}
@ -155,18 +155,17 @@ fn validation(unigram_dst: &str, bigram_dst: &str) -> Result<()> {
let (word1_id, watshi_cost) = unigram
.find(word1)
.ok_or_else(|| anyhow!("Cannot find '{}' in unigram dict.", word1))?;
println!("word1_id={} word1_cost={}", word1_id, watshi_cost);
println!("word1_id={word1_id} word1_cost={watshi_cost}");
let word2 = "から/から";
let (word2_id, word2_cost) = unigram
.find(word2)
.ok_or_else(|| anyhow!("Cannot find '{}' in unigram dict.", word1))?;
println!("word2_id={} word2_cost={}", word2_id, word2_cost);
println!("word2_id={word2_id} word2_cost={word2_cost}");
bigram.get_edge_cost(word1_id, word2_id).with_context(|| {
format!(
"Get bigram entry: '{} -> {}' {},{}",
word1, word2, word1_id, word2_id
"Get bigram entry: '{word1} -> {word2}' {word1_id},{word2_id}"
)
})?;

View File

@ -18,7 +18,7 @@ pub fn make_stats_system_unigram_lm(srcpath: &str, dstpath: &str) -> anyhow::Res
// 3 byte に ID が収めるようにする。
// よって、最大でも 8,388,608 単語までになるように vocab を制限する。
// 現実的な線で切っても、500万単語ぐらいで十分。
panic!("too much words in wfreq file: {}", srcpath);
panic!("too much words in wfreq file: {srcpath}");
}
homograph_hack(&mut wordcnt);
@ -29,7 +29,7 @@ pub fn make_stats_system_unigram_lm(srcpath: &str, dstpath: &str) -> anyhow::Res
builder.add(word.as_str(), *score);
}
println!("Writing {}", dstpath);
println!("Writing {dstpath}");
builder.save(dstpath)?;
Ok(())

View File

@ -27,7 +27,7 @@ pub fn vocab(src_file: &str, dst_file: &str, threshold: u32) -> anyhow::Result<(
}
let cnt: u32 = cnt.parse()?;
if cnt > threshold {
ofp.write_fmt(format_args!("{}\n", word))?;
ofp.write_fmt(format_args!("{word}\n"))?;
}
}
fs::rename(dst_file.to_owned() + ".tmp", dst_file)?;

View File

@ -80,7 +80,7 @@ pub fn wfreq(src_dirs: &Vec<String>, dst_file: &str) -> anyhow::Result<()> {
info!("Skip 2 character katakana entry: {}", word);
continue;
}
ofp.write_fmt(format_args!("{}\t{}\n", word, cnt))?;
ofp.write_fmt(format_args!("{word}\t{cnt}\n"))?;
}
fs::rename(dst_file.to_owned() + ".tmp", dst_file)?;

View File

@ -82,7 +82,7 @@ pub(crate) fn merge_terms_ipadic(intermediates: Vec<IntermediateToken>) -> Strin
}
}
buf += format!("{}/{} ", surface, yomi).as_str();
buf += format!("{surface}/{yomi} ").as_str();
i = j;
}

View File

@ -38,7 +38,7 @@ fn main() {
c = c.flag(flag.as_str());
}
for flag in pkgconfig(module, "--libs") {
println!("cargo:rustc-link-arg={}", flag);
println!("cargo:rustc-link-arg={flag}");
}
}
p.compile("wrapper");

View File

@ -128,8 +128,7 @@ impl CurrentState {
if idex >= nodes.len() {
// 発生しないはずだが、発生している。。なぜだろう?
panic!(
"[BUG] self.node_selected and self.clauses missmatch: {:?}",
self
"[BUG] self.node_selected and self.clauses missmatch: {self:?}"
)
}
result += &nodes[idex].surface_with_dynamic();

View File

@ -125,7 +125,7 @@ impl PropController {
// 有効化する input mode のメニュー項目にチェックを入れる。
let Some(property) = self.prop_dict.get(input_mode.prop_name) else {
panic!("Unknown input mode: {:?}", input_mode);
panic!("Unknown input mode: {input_mode:?}");
};
unsafe {
ibus_property_set_state(*property, IBusPropState_PROP_STATE_CHECKED);

View File

@ -71,7 +71,7 @@ pub fn parse_skkdict(src: &str) -> Result<HashMap<String, Vec<String>>> {
.map(|s| comment_regex.replace(s, "").to_string())
.filter(|it| !it.is_empty())
.collect();
assert!(!yomi.is_empty(), "yomi must not empty: line={}", line);
assert!(!yomi.is_empty(), "yomi must not empty: line={line}");
target.insert(yomi.to_string(), surfaces);
}

View File

@ -20,9 +20,9 @@ pub fn write_skk_dict(
keys.sort();
for yomi in keys {
let kanjis = merged_dict.get(yomi).unwrap();
assert!(!yomi.is_empty(), "yomi must not be empty: {:?}", kanjis);
assert!(!yomi.is_empty(), "yomi must not be empty: {kanjis:?}");
let kanjis = kanjis.join("/");
wfp.write_fmt(format_args!("{} /{}/\n", yomi, kanjis))?;
wfp.write_fmt(format_args!("{yomi} /{kanjis}/\n"))?;
}
}
Ok(())

View File

@ -71,9 +71,7 @@ impl WordNode {
) -> WordNode {
assert!(
!surface.is_empty(),
"Kanji shouldn't be empty: {}/{}",
surface,
yomi
"Kanji shouldn't be empty: {surface}/{yomi}"
);
WordNode {

View File

@ -98,7 +98,7 @@ impl Keymap {
))?;
if let Some(parent) = &got.extends {
let path = detect_resource_path("keymap", &format!("{}.yml", parent))?;
let path = detect_resource_path("keymap", &format!("{parent}.yml"))?;
let mut map = Keymap::load(&path)?;
for (kp, opts) in &got.to_map()? {
@ -135,7 +135,7 @@ mod tests {
let keymap: Keymap =
serde_yaml::from_reader(BufReader::new(File::open("../keymap/default.yml")?))?;
for kc in keymap.keys {
println!("{:?}", kc);
println!("{kc:?}");
}
Ok(())
}

View File

@ -58,7 +58,7 @@ impl MarisaSystemBigramLMBuilder {
}
pub fn set_default_edge_cost(&mut self, score: f32) -> &mut Self {
let key = format!("{}\t{}", DEFAULT_COST_KEY, score);
let key = format!("{DEFAULT_COST_KEY}\t{score}");
let key1 = key.as_bytes().to_vec();
self.keyset.push_back(key1.as_slice());
self

View File

@ -6,7 +6,7 @@ pub fn detect_resource_path(base: &str, name: &str) -> anyhow::Result<String> {
let pathstr: String = if cfg!(test) {
format!("{}/../{}/{}", env!("CARGO_MANIFEST_DIR"), base, name)
} else {
let target_path = format!("{}/{}", base, name);
let target_path = format!("{base}/{name}");
let basedirs = xdg::BaseDirectories::with_prefix("akaza")
.with_context(|| "Opening xdg directory with 'akaza' prefix")?;
let pathbuf = basedirs.find_data_file(&target_path);

View File

@ -23,7 +23,7 @@ fn load_romkan_map(file_path: &str) -> anyhow::Result<HashMap<String, String>> {
if let Some(parent) = got.extends {
// 継承しているので親を読み込む。
// 再帰的な処理になる。
let path = detect_resource_path("romkan", &format!("{}.yml", parent))?;
let path = detect_resource_path("romkan", &format!("{parent}.yml"))?;
let mut parent = load_romkan_map(&path)?;
for (k, v) in got.mapping {

View File

@ -19,7 +19,7 @@ pub(crate) fn read_user_stats_file(path: &String) -> Result<Vec<(String, u32)>>
let count = count
.to_string()
.parse::<u32>()
.with_context(|| format!("Invalid line in user language model: {}", count))?;
.with_context(|| format!("Invalid line in user language model: {count}"))?;
result.push((key.to_string(), count));
}