Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions config/data/config_sample.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ buffer_size = 64
auto_capitalize = false
auto_commit = false
page_size = 10
suggestions = false

[data]
sample = { path = "./data_sample.toml" }
Expand Down
4 changes: 4 additions & 0 deletions config/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@ pub struct Config {
/// auto_capitalize = false
/// page_size = 10
/// auto_commit = true
/// suggestions = true
/// # "#.to_owned());
/// #
/// # // Loads the config file.
Expand All @@ -193,6 +194,8 @@ pub struct CoreConfig {
pub page_size: Option<usize>,
/// Whether the predicate should be automatically committed.
pub auto_commit: Option<bool>,
/// Whether we should show suggestions automatically
pub suggestions: Option<bool>,
}

#[derive(Deserialize, Debug, Clone)]
Expand Down Expand Up @@ -443,6 +446,7 @@ mod tests {
assert_eq!(core.buffer_size.unwrap(), 64);
assert!(!core.auto_capitalize.unwrap());
assert!(!core.auto_commit.unwrap());
assert!(!core.suggestions.unwrap());
assert_eq!(core.page_size.unwrap(), 10);
true
}),
Expand Down
9 changes: 6 additions & 3 deletions service/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,17 +27,18 @@ pub fn run(
.map(|(key, value)| vec![key.as_str(), value.as_str()])
.collect(),
);
let (buffer_size, auto_commit, page_size) = config
let (buffer_size, auto_commit, page_size, suggestions) = config
.core
.as_ref()
.map(|core| {
(
core.buffer_size.unwrap_or(32),
core.auto_commit.unwrap_or(false),
core.page_size.unwrap_or(10),
core.suggestions.unwrap_or(false),
)
})
.unwrap_or((32, false, 10));
.unwrap_or((32, false, 10, false));
let mut keyboard = Enigo::new(&Default::default()).unwrap();
let mut preprocessor = Preprocessor::new(Rc::new(memory), buffer_size);
#[cfg(not(feature = "rhai"))]
Expand Down Expand Up @@ -150,7 +151,9 @@ pub fn run(
} else if auto_commit && predicate.can_commit {
preprocessor.commit(predicate.texts[0].to_owned());
} else {
frontend_tx1.send(GUICmd::Predicate(predicate))?;
if suggestions {
frontend_tx1.send(GUICmd::Predicate(predicate))?;
}
Copy link
Member

@pythonbrad pythonbrad Oct 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@omer-biz

Thanks for your contribution and interest in this project.

The objective of making the autocompletion/autosuggestion on demand, is to speed up the typing using afrim.
Currently, you make the display on demand, but not the computation.

To inhibit the translation, you can check from service/src/lib.rs:145

}

Ok(())
Expand Down
Loading