From ea656de889f7a7055dc887353228f76d1ac53e57 Mon Sep 17 00:00:00 2001 From: Ryo Igarashi Date: Sat, 7 Mar 2026 13:49:16 +0900 Subject: [PATCH] fix!: Use object/named parameter for keep_whitespace --- README.md | 12 ++++++------ ainu-utils-js/Cargo.toml | 1 + ainu-utils-js/src/lib.rs | 11 ++++++++--- ainu-utils-js/tests/index.spec.js | 2 +- ainu-utils-python/ainu_utils.pyi | 2 +- ainu-utils-python/src/lib.rs | 1 + 6 files changed, 18 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 95a07a9..a140566 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ Tokenizes Ainu text into morphemes. ```py from ainu_utils import tokenize -tokenize("irankarapte. e=iwanke ya?", False) +tokenize("irankarapte. e=iwanke ya?", keep_whitespace=False) # => ["irankarapte", ".", "e=", "iwanke", "ya?"] ``` @@ -37,7 +37,7 @@ tokenize("irankarapte. e=iwanke ya?", False) ```js import { tokenize } from "ainu-utils"; -tokenize("irankarapte. e=iwanke ya?", false); +tokenize("irankarapte. e=iwanke ya?", { keepWhitespace: false }); // => ["irankarapte", ".", "e=", "iwanke", "ya?"] ``` @@ -57,9 +57,9 @@ to_kana("irankarapte. e=iwanke ya?") **JS:** ```js -import { to_kana } from "ainu-utils"; +import { toKana } from "ainu-utils"; -to_kana("irankarapte. e=iwanke ya?"); +toKana("irankarapte. e=iwanke ya?"); // => "イランカラㇷ゚テ。 エイワンケ ヤ?" ``` @@ -79,9 +79,9 @@ number_to_words(91) **JS:** ```js -import { number_to_words } from "ainu-utils"; +import { numberToWords } from "ainu-utils"; -number_to_words(91); +numberToWords(91); // => "sine ikasma wan easiknehotne" ``` diff --git a/ainu-utils-js/Cargo.toml b/ainu-utils-js/Cargo.toml index 1dd0426..f38e3f0 100644 --- a/ainu-utils-js/Cargo.toml +++ b/ainu-utils-js/Cargo.toml @@ -12,3 +12,4 @@ crate-type = ["cdylib", "rlib"] [dependencies] ainu-utils = { path = "../ainu-utils" } wasm-bindgen = "0.2.114" +js-sys = "0.3" diff --git a/ainu-utils-js/src/lib.rs b/ainu-utils-js/src/lib.rs index 87ef8b6..072719c 100644 --- a/ainu-utils-js/src/lib.rs +++ b/ainu-utils-js/src/lib.rs @@ -1,17 +1,22 @@ use ainu_utils::{kana, numbers, syllables, tokenizer}; +use js_sys::Reflect; use wasm_bindgen::prelude::*; #[wasm_bindgen] -pub fn tokenize(text: &str, keep_whitespace: bool) -> Vec { +pub fn tokenize(text: &str, options: JsValue) -> Vec { + let keep_whitespace = Reflect::get(&options, &JsValue::from_str("keepWhitespace")) + .ok() + .and_then(|v| v.as_bool()) + .unwrap_or(false); tokenizer::tokenize(text, keep_whitespace) } -#[wasm_bindgen] +#[wasm_bindgen(js_name = toKana)] pub fn to_kana(text: &str) -> String { kana::to_kana(text) } -#[wasm_bindgen] +#[wasm_bindgen(js_name = numberToWords)] pub fn number_to_words(input: i32) -> String { numbers::parse(input).unwrap().to_string() } diff --git a/ainu-utils-js/tests/index.spec.js b/ainu-utils-js/tests/index.spec.js index 04fae7e..d20b09c 100644 --- a/ainu-utils-js/tests/index.spec.js +++ b/ainu-utils-js/tests/index.spec.js @@ -2,6 +2,6 @@ import { test, expect } from "vitest"; import { tokenize } from "../dist/index.js"; test("tokenize", () => { - const tokens = tokenize("irankarapte. e=iwanke ya?", false); + const tokens = tokenize("irankarapte. e=iwanke ya?", { keepWhitespace: false }); expect(tokens).toEqual(["irankarapte", ".", "e=", "iwanke", "ya", "?"]); }); diff --git a/ainu-utils-python/ainu_utils.pyi b/ainu-utils-python/ainu_utils.pyi index 8e99e3c..ce965a7 100644 --- a/ainu-utils-python/ainu_utils.pyi +++ b/ainu-utils-python/ainu_utils.pyi @@ -1,5 +1,5 @@ # https://www.maturin.rs/project_layout#adding-python-type-information -def tokenize(text: str, keep_whitespace: bool) -> list[str]: ... +def tokenize(text: str, *, keep_whitespace: bool = False) -> list[str]: ... def to_kana(text: str) -> str: ... def number_to_words(number: int) -> str: ... def syllabicate(text: str) -> list[str]: ... diff --git a/ainu-utils-python/src/lib.rs b/ainu-utils-python/src/lib.rs index 679b083..b3c2d15 100644 --- a/ainu-utils-python/src/lib.rs +++ b/ainu-utils-python/src/lib.rs @@ -3,6 +3,7 @@ extern crate ainu_utils as ainu_utils_rust; use pyo3::prelude::*; #[pyfunction] +#[pyo3(signature = (text, *, keep_whitespace = false))] fn tokenize(text: &str, keep_whitespace: bool) -> Vec { ainu_utils_rust::tokenizer::tokenize(text, keep_whitespace) }