Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 0 additions & 50 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,53 +71,3 @@ jobs:
tool: cargo-audit
- name: cargo audit
run: cargo audit

# Build both architectures on push to main to catch cross-compilation
# regressions early. No artifact upload — that's for release.yml only.
build:
name: Build (${{ matrix.target }})
needs: [lint, test, audit]
if: github.event_name == 'push'
runs-on: ${{ matrix.runner }}
strategy:
matrix:
include:
- target: x86_64-unknown-linux-musl
runner: ubuntu-latest
cross_packages: musl-tools
- target: aarch64-unknown-linux-musl
runner: ubuntu-latest
cross_packages: musl-tools gcc-aarch64-linux-gnu
linker: aarch64-linux-gnu-gcc
- target: aarch64-apple-darwin
runner: macos-latest
- target: x86_64-apple-darwin
runner: macos-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6

- name: Install Rust
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
with:
targets: ${{ matrix.target }}

- name: Cache cargo
uses: Swatinem/rust-cache@23869a5bd66c73db3c0ac40331f3206eb23791dc # v2
with:
key: build-${{ matrix.target }}
cache-targets: "false"

- name: Install cross-compilation tools
if: matrix.cross_packages
run: sudo apt-get update && sudo apt-get install -y ${{ matrix.cross_packages }}

- name: Set cross-linker
if: matrix.linker
run: |
echo "CARGO_TARGET_$(echo '${{ matrix.target }}' | tr '[:lower:]-' '[:upper:]_')_LINKER=${{ matrix.linker }}" >> "$GITHUB_ENV"

- name: Build release binary
run: cargo build --release --target ${{ matrix.target }}

- name: Verify binary
run: file target/${{ matrix.target }}/release/op-bridge
10 changes: 8 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -293,11 +293,17 @@ async fn run_daemon(
info!("SIGHUP received, re-resolving secrets...");
let mut temp = store::SecretStore::new();
let (ok, fail) = resolver::resolve_all(&refs, &mut temp).await;
{
if fail == 0 {
// All secrets resolved — safe to swap
let mut s = store.write().await;
s.replace_with(temp);
info!("re-resolved {ok} secret(s)");
} else {
// Partial failure — merge successes, keep stale values for failures
let mut s = store.write().await;
s.merge_from(temp);
info!("re-resolved {ok} secret(s), {fail} failed (kept stale values for failures)");
}
info!("re-resolved {ok} secret(s), {fail} failed");
}

_ = sigterm.recv() => {
Expand Down
8 changes: 4 additions & 4 deletions src/socket.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ async fn handle_set(line: &str, store: &Arc<RwLock<SecretStore>>) -> String {

let mut parts = rest.splitn(3, ' ');
let name = match parts.next() {
Some(n) if !n.is_empty() => n,
Some(n) if !n.is_empty() => n.to_ascii_uppercase(),
_ => return "ERR SET requires: <name> <op://uri> <value>\n".to_string(),
};
let uri = match parts.next() {
Expand All @@ -140,19 +140,19 @@ async fn handle_set(line: &str, store: &Arc<RwLock<SecretStore>>) -> String {
};

if let Err(e) = resolver::op_write(uri, value).await {
error!("SET write-back failed for {name}: {e}");
error!("SET write-back failed for {}: {e}", name);
return format!("ERR write-back failed: {e}\n");
}

{
let mut s = store.write().await;
s.insert_with_uri(
name.to_string(),
name.clone(),
secrecy::SecretString::from(value.to_string()),
uri.to_string(),
);
}

info!("SET {name} -> {uri} ({} chars)", value.len());
info!("SET {} -> {uri} ({} chars)", name, value.len());
"OK\n".to_string()
}
16 changes: 13 additions & 3 deletions src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,20 @@ impl SecretStore {

/// Atomically replaces the entire store contents with `other`.
///
/// Used by the SIGHUP refresh path to minimize write-lock hold time:
/// secrets are resolved into a temporary store, then swapped in with a
/// single assignment (microseconds vs. seconds of resolution time).
/// Used by the SIGHUP refresh path when all secrets resolve successfully.
pub fn replace_with(&mut self, other: SecretStore) {
self.entries = other.entries;
}

/// Merges successfully-resolved secrets from `other` into this store.
///
/// Secrets present in `other` overwrite existing entries. Secrets NOT in
/// `other` (i.e., those that failed to re-resolve) are kept at their
/// previous stale values. This prevents a transient 1Password outage
/// from wiping the in-memory cache of working secrets.
pub fn merge_from(&mut self, other: SecretStore) {
for (name, entry) in other.entries {
self.entries.insert(name, entry);
}
}
}
23 changes: 21 additions & 2 deletions src/watcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
//!
//! When enabled via `--watch` flags on the daemon, this module monitors
//! specified files for changes and automatically writes their contents back
//! to 1Password. Designed for OpenClaw containers where Claude Code OAuth
//! tokens get refreshed and need to be persisted.
//! to 1Password. Designed for agent containers where OAuth tokens or other
//! credentials get refreshed at runtime and need to be persisted.
//!
//! ## Watch spec format
//!
Expand Down Expand Up @@ -132,6 +132,25 @@ pub async fn start_watchers(
path.display(),
entry.uri
);
// Check file size before reading (prevent OOM from large files)
const MAX_FILE_SIZE: u64 = 1_048_576; // 1 MB
match tokio::fs::metadata(path).await {
Ok(meta) if meta.len() > MAX_FILE_SIZE => {
error!(
"file {} is too large ({} bytes, max {}), skipping write-back",
path.display(),
meta.len(),
MAX_FILE_SIZE
);
continue;
}
Err(e) => {
error!("failed to stat {}: {}", path.display(), e);
continue;
}
_ => {}
}

match tokio::fs::read_to_string(path).await {
Ok(contents) => {
let value = contents.trim().to_string();
Expand Down