From 2b69e4682ae052227e8e0127ca13f7e25c3f822c Mon Sep 17 00:00:00 2001
From: jinmaa
Date: Tue, 1 Apr 2025 21:09:44 -0400
Subject: [PATCH 1/4] transaction io
---
-H | 0
.gitignore | 4 +-
rest.rs | 1391 +++++++++++++++++
src/pages/BitcoinAddressExplorer.jsx | 141 +-
src/pages/Home.jsx | 7 +
.../TransactionInputsOutputsExplorer.jsx | 853 ++++++++++
src/routes.jsx | 5 +
src/sdk/esplora.js | 120 +-
8 files changed, 2458 insertions(+), 63 deletions(-)
create mode 100644 -H
create mode 100644 rest.rs
create mode 100644 src/pages/TransactionInputsOutputsExplorer.jsx
diff --git a/-H b/-H
new file mode 100644
index 0000000..e69de29
diff --git a/.gitignore b/.gitignore
index afb9f7b..41beeca 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,7 +2,9 @@
# windsurf rules
.windsurfrules
-
+./backend
+backend
+mempool
# Logs
logs
*.log
diff --git a/rest.rs b/rest.rs
new file mode 100644
index 0000000..329bb59
--- /dev/null
+++ b/rest.rs
@@ -0,0 +1,1391 @@
+use crate::chain::{
+ address, BlockHash, Network, OutPoint, Script, Sequence, Transaction, TxIn, TxMerkleNode,
+ TxOut, Txid,
+};
+use crate::config::Config;
+use crate::errors;
+use crate::new_index::{compute_script_hash, Query, SpendingInput, Utxo};
+use crate::util::{
+ create_socket, electrum_merkle, extract_tx_prevouts, get_innerscripts, get_tx_fee, has_prevout,
+ is_coinbase, BlockHeaderMeta, BlockId, FullHash, ScriptToAddr, ScriptToAsm, TransactionStatus,
+ DEFAULT_BLOCKHASH,
+};
+
+#[cfg(not(feature = "liquid"))]
+use bitcoin::consensus::encode;
+
+use bitcoin::hashes::FromSliceError as HashError;
+use bitcoin::hex::{self, DisplayHex, FromHex};
+use hyper::service::{make_service_fn, service_fn};
+use hyper::{Body, Method, Response, Server, StatusCode};
+use hyperlocal::UnixServerExt;
+use tokio::sync::oneshot;
+
+use std::fs;
+use std::str::FromStr;
+
+use electrs_macros::trace;
+
+#[cfg(feature = "liquid")]
+use {
+ crate::elements::{ebcompact::*, peg::PegoutValue, AssetSorting, IssuanceValue},
+ elements::{encode, secp256k1_zkp as zkp, AssetId},
+};
+
+use serde::Serialize;
+use serde_json;
+use std::collections::HashMap;
+use std::num::ParseIntError;
+use std::os::unix::fs::FileTypeExt;
+use std::sync::Arc;
+use std::thread;
+use url::form_urlencoded;
+
+const CHAIN_TXS_PER_PAGE: usize = 25;
+const MAX_MEMPOOL_TXS: usize = 50;
+const BLOCK_LIMIT: usize = 10;
+const ADDRESS_SEARCH_LIMIT: usize = 10;
+
+#[cfg(feature = "liquid")]
+const ASSETS_PER_PAGE: usize = 25;
+#[cfg(feature = "liquid")]
+const ASSETS_MAX_PER_PAGE: usize = 100;
+
+const TTL_LONG: u32 = 157_784_630; // ttl for static resources (5 years)
+const TTL_SHORT: u32 = 10; // ttl for volatie resources
+const TTL_MEMPOOL_RECENT: u32 = 5; // ttl for GET /mempool/recent
+const CONF_FINAL: usize = 10; // reorgs deeper than this are considered unlikely
+
+#[derive(Serialize, Deserialize)]
+struct BlockValue {
+ id: BlockHash,
+ height: u32,
+ version: u32,
+ timestamp: u32,
+ tx_count: u32,
+ size: u32,
+ weight: u64,
+ merkle_root: TxMerkleNode,
+ previousblockhash: Option,
+ mediantime: u32,
+
+ #[cfg(not(feature = "liquid"))]
+ nonce: u32,
+ #[cfg(not(feature = "liquid"))]
+ bits: bitcoin::pow::CompactTarget,
+ #[cfg(not(feature = "liquid"))]
+ difficulty: f64,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ ext: Option,
+}
+
+impl BlockValue {
+ #[cfg_attr(feature = "liquid", allow(unused_variables))]
+ fn new(blockhm: BlockHeaderMeta) -> Self {
+ let header = blockhm.header_entry.header();
+ BlockValue {
+ id: header.block_hash(),
+ height: blockhm.header_entry.height() as u32,
+ #[cfg(not(feature = "liquid"))]
+ version: header.version.to_consensus() as u32,
+ #[cfg(feature = "liquid")]
+ version: header.version,
+ timestamp: header.time,
+ tx_count: blockhm.meta.tx_count,
+ size: blockhm.meta.size,
+ weight: blockhm.meta.weight as u64,
+ merkle_root: header.merkle_root,
+ previousblockhash: if header.prev_blockhash != *DEFAULT_BLOCKHASH {
+ Some(header.prev_blockhash)
+ } else {
+ None
+ },
+ mediantime: blockhm.mtp,
+
+ #[cfg(not(feature = "liquid"))]
+ bits: header.bits,
+ #[cfg(not(feature = "liquid"))]
+ nonce: header.nonce,
+ #[cfg(not(feature = "liquid"))]
+ difficulty: header.difficulty_float(),
+
+ #[cfg(feature = "liquid")]
+ ext: Some(header.ext.clone()),
+ }
+ }
+}
+
+#[derive(Serialize)]
+struct TransactionValue {
+ txid: Txid,
+ version: u32,
+ locktime: u32,
+ vin: Vec,
+ vout: Vec,
+ size: u32,
+ weight: u64,
+ fee: u64,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ status: Option,
+
+ #[cfg(feature = "liquid")]
+ discount_vsize: usize,
+
+ #[cfg(feature = "liquid")]
+ discount_weight: usize,
+}
+
+impl TransactionValue {
+ fn new(
+ tx: Transaction,
+ blockid: Option,
+ txos: &HashMap,
+ config: &Config,
+ ) -> Self {
+ let prevouts = extract_tx_prevouts(&tx, &txos, true);
+ let vins: Vec = tx
+ .input
+ .iter()
+ .enumerate()
+ .map(|(index, txin)| {
+ TxInValue::new(txin, prevouts.get(&(index as u32)).cloned(), config)
+ })
+ .collect();
+ let vouts: Vec = tx
+ .output
+ .iter()
+ .map(|txout| TxOutValue::new(txout, config))
+ .collect();
+
+ let fee = get_tx_fee(&tx, &prevouts, config.network_type);
+
+ let weight = tx.weight();
+ #[cfg(not(feature = "liquid"))] // rust-bitcoin has a wrapper Weight type
+ let weight = weight.to_wu();
+
+ TransactionValue {
+ txid: tx.compute_txid(),
+ #[cfg(not(feature = "liquid"))]
+ version: tx.version.0 as u32,
+ #[cfg(feature = "liquid")]
+ version: tx.version as u32,
+ locktime: tx.lock_time.to_consensus_u32(),
+ vin: vins,
+ vout: vouts,
+ size: tx.total_size() as u32,
+ weight: weight as u64,
+ fee,
+ status: Some(TransactionStatus::from(blockid)),
+
+ #[cfg(feature = "liquid")]
+ discount_vsize: tx.discount_vsize(),
+
+ #[cfg(feature = "liquid")]
+ discount_weight: tx.discount_weight(),
+ }
+ }
+}
+
+#[derive(Serialize, Clone)]
+struct TxInValue {
+ txid: Txid,
+ vout: u32,
+ prevout: Option,
+ scriptsig: Script,
+ scriptsig_asm: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ witness: Option>,
+ is_coinbase: bool,
+ sequence: Sequence,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ inner_redeemscript_asm: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ inner_witnessscript_asm: Option,
+
+ #[cfg(feature = "liquid")]
+ is_pegin: bool,
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ issuance: Option,
+}
+
+impl TxInValue {
+ fn new(txin: &TxIn, prevout: Option<&TxOut>, config: &Config) -> Self {
+ let witness = &txin.witness;
+ #[cfg(feature = "liquid")]
+ let witness = &witness.script_witness;
+
+ let witness = if !witness.is_empty() {
+ Some(
+ witness
+ .iter()
+ .map(DisplayHex::to_lower_hex_string)
+ .collect(),
+ )
+ } else {
+ None
+ };
+
+ let is_coinbase = is_coinbase(&txin);
+
+ let innerscripts = prevout.map(|prevout| get_innerscripts(&txin, &prevout));
+
+ TxInValue {
+ txid: txin.previous_output.txid,
+ vout: txin.previous_output.vout,
+ prevout: prevout.map(|prevout| TxOutValue::new(prevout, config)),
+ scriptsig_asm: txin.script_sig.to_asm(),
+ witness,
+
+ inner_redeemscript_asm: innerscripts
+ .as_ref()
+ .and_then(|i| i.redeem_script.as_ref())
+ .map(ScriptToAsm::to_asm),
+ inner_witnessscript_asm: innerscripts
+ .as_ref()
+ .and_then(|i| i.witness_script.as_ref())
+ .map(ScriptToAsm::to_asm),
+
+ is_coinbase,
+ sequence: txin.sequence,
+ #[cfg(feature = "liquid")]
+ is_pegin: txin.is_pegin,
+ #[cfg(feature = "liquid")]
+ issuance: if txin.has_issuance() {
+ Some(IssuanceValue::from(txin))
+ } else {
+ None
+ },
+
+ scriptsig: txin.script_sig.clone(),
+ }
+ }
+}
+
+#[derive(Serialize, Clone)]
+struct TxOutValue {
+ scriptpubkey: Script,
+ scriptpubkey_asm: String,
+ scriptpubkey_type: String,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ scriptpubkey_address: Option,
+
+ #[cfg(not(feature = "liquid"))]
+ value: u64,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ value: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ valuecommitment: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ asset: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ assetcommitment: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pegout: Option,
+}
+
+impl TxOutValue {
+ fn new(txout: &TxOut, config: &Config) -> Self {
+ #[cfg(not(feature = "liquid"))]
+ let value = txout.value.to_sat();
+ #[cfg(feature = "liquid")]
+ let value = txout.value.explicit();
+
+ #[cfg(not(feature = "liquid"))]
+ let is_fee = false;
+ #[cfg(feature = "liquid")]
+ let is_fee = txout.is_fee();
+
+ let script = &txout.script_pubkey;
+ let script_asm = script.to_asm();
+ let script_addr = script.to_address_str(config.network_type);
+
+ // TODO should the following something to put inside rust-elements lib?
+ let script_type = if is_fee {
+ "fee"
+ } else if script.is_empty() {
+ "empty"
+ } else if script.is_op_return() {
+ "op_return"
+ } else if script.is_p2pk() {
+ "p2pk"
+ } else if script.is_p2pkh() {
+ "p2pkh"
+ } else if script.is_p2sh() {
+ "p2sh"
+ } else if script.is_p2wpkh() {
+ "v0_p2wpkh"
+ } else if script.is_p2wsh() {
+ "v0_p2wsh"
+ } else if script.is_p2tr() {
+ "v1_p2tr"
+ } else if script.is_op_return() {
+ "provably_unspendable"
+ } else {
+ "unknown"
+ };
+
+ #[cfg(feature = "liquid")]
+ let pegout = PegoutValue::from_txout(txout, config.network_type, config.parent_network);
+
+ TxOutValue {
+ scriptpubkey: script.clone(),
+ scriptpubkey_asm: script_asm,
+ scriptpubkey_address: script_addr,
+ scriptpubkey_type: script_type.to_string(),
+ value,
+ #[cfg(feature = "liquid")]
+ valuecommitment: txout.value.commitment(),
+ #[cfg(feature = "liquid")]
+ asset: txout.asset.explicit(),
+ #[cfg(feature = "liquid")]
+ assetcommitment: txout.asset.commitment(),
+ #[cfg(feature = "liquid")]
+ pegout,
+ }
+ }
+}
+
+#[derive(Serialize)]
+struct UtxoValue {
+ txid: Txid,
+ vout: u32,
+ status: TransactionStatus,
+
+ #[cfg(not(feature = "liquid"))]
+ value: u64,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ value: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ valuecommitment: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ asset: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ assetcommitment: Option,
+
+ // nonces are never explicit
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ noncecommitment: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ surjection_proof: Option,
+
+ #[cfg(feature = "liquid")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ range_proof: Option,
+}
+impl From for UtxoValue {
+ fn from(utxo: Utxo) -> Self {
+ UtxoValue {
+ txid: utxo.txid,
+ vout: utxo.vout,
+ status: TransactionStatus::from(utxo.confirmed),
+
+ #[cfg(not(feature = "liquid"))]
+ value: utxo.value,
+
+ #[cfg(feature = "liquid")]
+ value: utxo.value.explicit(),
+ #[cfg(feature = "liquid")]
+ valuecommitment: utxo.value.commitment(),
+ #[cfg(feature = "liquid")]
+ asset: utxo.asset.explicit(),
+ #[cfg(feature = "liquid")]
+ assetcommitment: utxo.asset.commitment(),
+ #[cfg(feature = "liquid")]
+ noncecommitment: utxo.nonce.commitment(),
+ #[cfg(feature = "liquid")]
+ surjection_proof: utxo.witness.surjection_proof.map(|p| *p),
+ #[cfg(feature = "liquid")]
+ range_proof: utxo.witness.rangeproof.map(|p| *p),
+ }
+ }
+}
+
+#[derive(Serialize)]
+struct SpendingValue {
+ spent: bool,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ txid: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ vin: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ status: Option,
+}
+impl From for SpendingValue {
+ fn from(spend: SpendingInput) -> Self {
+ SpendingValue {
+ spent: true,
+ txid: Some(spend.txid),
+ vin: Some(spend.vin),
+ status: Some(TransactionStatus::from(spend.confirmed)),
+ }
+ }
+}
+impl Default for SpendingValue {
+ fn default() -> Self {
+ SpendingValue {
+ spent: false,
+ txid: None,
+ vin: None,
+ status: None,
+ }
+ }
+}
+
+fn ttl_by_depth(height: Option, query: &Query) -> u32 {
+ height.map_or(TTL_SHORT, |height| {
+ if query.chain().best_height() - height >= CONF_FINAL {
+ TTL_LONG
+ } else {
+ TTL_SHORT
+ }
+ })
+}
+
+fn prepare_txs(
+ txs: Vec<(Transaction, Option)>,
+ query: &Query,
+ config: &Config,
+) -> Vec {
+ let outpoints = txs
+ .iter()
+ .flat_map(|(tx, _)| {
+ tx.input
+ .iter()
+ .filter(|txin| has_prevout(txin))
+ .map(|txin| txin.previous_output)
+ })
+ .collect();
+
+ let prevouts = query.lookup_txos(outpoints);
+
+ txs.into_iter()
+ .map(|(tx, blockid)| TransactionValue::new(tx, blockid, &prevouts, config))
+ .collect()
+}
+
+#[tokio::main]
+async fn run_server(config: Arc, query: Arc, rx: oneshot::Receiver<()>) {
+ let addr = &config.http_addr;
+ let socket_file = &config.http_socket_file;
+
+ let config = Arc::clone(&config);
+ let query = Arc::clone(&query);
+
+ let make_service_fn_inn = || {
+ let query = Arc::clone(&query);
+ let config = Arc::clone(&config);
+
+ async move {
+ Ok::<_, hyper::Error>(service_fn(move |req| {
+ let query = Arc::clone(&query);
+ let config = Arc::clone(&config);
+
+ async move {
+ let method = req.method().clone();
+ let uri = req.uri().clone();
+ let body = hyper::body::to_bytes(req.into_body()).await?;
+
+ let mut resp = handle_request(method, uri, body, &query, &config)
+ .unwrap_or_else(|err| {
+ warn!("{:?}", err);
+ Response::builder()
+ .status(err.0)
+ .header("Content-Type", "text/plain")
+ .body(Body::from(err.1))
+ .unwrap()
+ });
+ if let Some(ref origins) = config.cors {
+ resp.headers_mut()
+ .insert("Access-Control-Allow-Origin", origins.parse().unwrap());
+ }
+ Ok::<_, hyper::Error>(resp)
+ }
+ }))
+ }
+ };
+
+ let server = match socket_file {
+ None => {
+ info!("REST server running on {}", addr);
+
+ let socket = create_socket(&addr);
+ socket.listen(511).expect("setting backlog failed");
+
+ Server::from_tcp(socket.into())
+ .expect("Server::from_tcp failed")
+ .serve(make_service_fn(move |_| make_service_fn_inn()))
+ .with_graceful_shutdown(async {
+ rx.await.ok();
+ })
+ .await
+ }
+ Some(path) => {
+ if let Ok(meta) = fs::metadata(&path) {
+ // Cleanup socket file left by previous execution
+ if meta.file_type().is_socket() {
+ fs::remove_file(path).ok();
+ }
+ }
+
+ info!("REST server running on unix socket {}", path.display());
+
+ Server::bind_unix(path)
+ .expect("Server::bind_unix failed")
+ .serve(make_service_fn(move |_| make_service_fn_inn()))
+ .with_graceful_shutdown(async {
+ rx.await.ok();
+ })
+ .await
+ }
+ };
+
+ if let Err(e) = server {
+ eprintln!("server error: {}", e);
+ }
+}
+
+pub fn start(config: Arc, query: Arc) -> Handle {
+ let (tx, rx) = oneshot::channel::<()>();
+
+ Handle {
+ tx,
+ thread: thread::spawn(move || {
+ run_server(config, query, rx);
+ }),
+ }
+}
+
+pub struct Handle {
+ tx: oneshot::Sender<()>,
+ thread: thread::JoinHandle<()>,
+}
+
+impl Handle {
+ pub fn stop(self) {
+ self.tx.send(()).expect("failed to send shutdown signal");
+ self.thread.join().expect("REST server failed");
+ }
+}
+
+#[trace]
+fn handle_request(
+ method: Method,
+ uri: hyper::Uri,
+ body: hyper::body::Bytes,
+ query: &Query,
+ config: &Config,
+) -> Result, HttpError> {
+ // TODO it looks hyper does not have routing and query parsing :(
+ let path: Vec<&str> = uri.path().split('/').skip(1).collect();
+ let query_params = match uri.query() {
+ Some(value) => form_urlencoded::parse(&value.as_bytes())
+ .into_owned()
+ .collect::>(),
+ None => HashMap::new(),
+ };
+
+ info!("handle {:?} {:?}", method, uri);
+ match (
+ &method,
+ path.get(0),
+ path.get(1),
+ path.get(2),
+ path.get(3),
+ path.get(4),
+ ) {
+ (&Method::GET, Some(&"blocks"), Some(&"tip"), Some(&"hash"), None, None) => http_message(
+ StatusCode::OK,
+ query.chain().best_hash().to_string(),
+ TTL_SHORT,
+ ),
+
+ (&Method::GET, Some(&"blocks"), Some(&"tip"), Some(&"height"), None, None) => http_message(
+ StatusCode::OK,
+ query.chain().best_height().to_string(),
+ TTL_SHORT,
+ ),
+
+ (&Method::GET, Some(&"blocks"), start_height, None, None, None) => {
+ let start_height = start_height.and_then(|height| height.parse::().ok());
+ blocks(&query, start_height)
+ }
+ (&Method::GET, Some(&"block-height"), Some(height), None, None, None) => {
+ let height = height.parse::()?;
+ let header = query
+ .chain()
+ .header_by_height(height)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
+ let ttl = ttl_by_depth(Some(height), query);
+ http_message(StatusCode::OK, header.hash().to_string(), ttl)
+ }
+ (&Method::GET, Some(&"block"), Some(hash), None, None, None) => {
+ let hash = BlockHash::from_str(hash)?;
+ let blockhm = query
+ .chain()
+ .get_block_with_meta(&hash)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
+ let block_value = BlockValue::new(blockhm);
+ json_response(block_value, TTL_LONG)
+ }
+ (&Method::GET, Some(&"block"), Some(hash), Some(&"status"), None, None) => {
+ let hash = BlockHash::from_str(hash)?;
+ let status = query.chain().get_block_status(&hash);
+ let ttl = ttl_by_depth(status.height, query);
+ json_response(status, ttl)
+ }
+ (&Method::GET, Some(&"block"), Some(hash), Some(&"txids"), None, None) => {
+ let hash = BlockHash::from_str(hash)?;
+ let txids = query
+ .chain()
+ .get_block_txids(&hash)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
+ json_response(txids, TTL_LONG)
+ }
+ (&Method::GET, Some(&"block"), Some(hash), Some(&"header"), None, None) => {
+ let hash = BlockHash::from_str(hash)?;
+ let header = query
+ .chain()
+ .get_block_header(&hash)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
+
+ let header_hex = encode::serialize_hex(&header);
+ http_message(StatusCode::OK, header_hex, TTL_LONG)
+ }
+ (&Method::GET, Some(&"block"), Some(hash), Some(&"raw"), None, None) => {
+ let hash = BlockHash::from_str(hash)?;
+ let raw = query
+ .chain()
+ .get_block_raw(&hash)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "application/octet-stream")
+ .header("Cache-Control", format!("public, max-age={:}", TTL_LONG))
+ .body(Body::from(raw))
+ .unwrap())
+ }
+ (&Method::GET, Some(&"block"), Some(hash), Some(&"txid"), Some(index), None) => {
+ let hash = BlockHash::from_str(hash)?;
+ let index: usize = index.parse()?;
+ let txids = query
+ .chain()
+ .get_block_txids(&hash)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
+ if index >= txids.len() {
+ bail!(HttpError::not_found("tx index out of range".to_string()));
+ }
+ http_message(StatusCode::OK, txids[index].to_string(), TTL_LONG)
+ }
+ (&Method::GET, Some(&"block"), Some(hash), Some(&"txs"), start_index, None) => {
+ let hash = BlockHash::from_str(hash)?;
+ let txids = query
+ .chain()
+ .get_block_txids(&hash)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
+
+ let start_index = start_index
+ .map_or(0u32, |el| el.parse().unwrap_or(0))
+ .max(0u32) as usize;
+ if start_index >= txids.len() {
+ bail!(HttpError::not_found("start index out of range".to_string()));
+ } else if start_index % CHAIN_TXS_PER_PAGE != 0 {
+ bail!(HttpError::from(format!(
+ "start index must be a multipication of {}",
+ CHAIN_TXS_PER_PAGE
+ )));
+ }
+
+ // blockid_by_hash() only returns the BlockId for non-orphaned blocks,
+ // or None for orphaned
+ let confirmed_blockid = query.chain().blockid_by_hash(&hash);
+
+ let txs = txids
+ .iter()
+ .skip(start_index)
+ .take(CHAIN_TXS_PER_PAGE)
+ .map(|txid| {
+ query
+ .lookup_txn(&txid)
+ .map(|tx| (tx, confirmed_blockid.clone()))
+ .ok_or_else(|| "missing tx".to_string())
+ })
+ .collect::)>, _>>()?;
+
+ // XXX orphraned blocks alway get TTL_SHORT
+ let ttl = ttl_by_depth(confirmed_blockid.map(|b| b.height), query);
+
+ json_response(prepare_txs(txs, query, config), ttl)
+ }
+ (&Method::GET, Some(script_type @ &"address"), Some(script_str), None, None, None)
+ | (&Method::GET, Some(script_type @ &"scripthash"), Some(script_str), None, None, None) => {
+ let script_hash = to_scripthash(script_type, script_str, config.network_type)?;
+ let stats = query.stats(&script_hash[..]);
+ json_response(
+ json!({
+ *script_type: script_str,
+ "chain_stats": stats.0,
+ "mempool_stats": stats.1,
+ }),
+ TTL_SHORT,
+ )
+ }
+ (
+ &Method::GET,
+ Some(script_type @ &"address"),
+ Some(script_str),
+ Some(&"txs"),
+ None,
+ None,
+ )
+ | (
+ &Method::GET,
+ Some(script_type @ &"scripthash"),
+ Some(script_str),
+ Some(&"txs"),
+ None,
+ None,
+ ) => {
+ let script_hash = to_scripthash(script_type, script_str, config.network_type)?;
+
+ let mut txs = vec![];
+
+ txs.extend(
+ query
+ .mempool()
+ .history(&script_hash[..], MAX_MEMPOOL_TXS)
+ .into_iter()
+ .map(|tx| (tx, None)),
+ );
+
+ txs.extend(
+ query
+ .chain()
+ .history(&script_hash[..], None, CHAIN_TXS_PER_PAGE)
+ .into_iter()
+ .map(|(tx, blockid)| (tx, Some(blockid))),
+ );
+
+ json_response(prepare_txs(txs, query, config), TTL_SHORT)
+ }
+
+ (
+ &Method::GET,
+ Some(script_type @ &"address"),
+ Some(script_str),
+ Some(&"txs"),
+ Some(&"chain"),
+ last_seen_txid,
+ )
+ | (
+ &Method::GET,
+ Some(script_type @ &"scripthash"),
+ Some(script_str),
+ Some(&"txs"),
+ Some(&"chain"),
+ last_seen_txid,
+ ) => {
+ let script_hash = to_scripthash(script_type, script_str, config.network_type)?;
+ let last_seen_txid = last_seen_txid.and_then(|txid| Txid::from_str(txid).ok());
+
+ let txs = query
+ .chain()
+ .history(
+ &script_hash[..],
+ last_seen_txid.as_ref(),
+ CHAIN_TXS_PER_PAGE,
+ )
+ .into_iter()
+ .map(|(tx, blockid)| (tx, Some(blockid)))
+ .collect();
+
+ json_response(prepare_txs(txs, query, config), TTL_SHORT)
+ }
+ (
+ &Method::GET,
+ Some(script_type @ &"address"),
+ Some(script_str),
+ Some(&"txs"),
+ Some(&"mempool"),
+ None,
+ )
+ | (
+ &Method::GET,
+ Some(script_type @ &"scripthash"),
+ Some(script_str),
+ Some(&"txs"),
+ Some(&"mempool"),
+ None,
+ ) => {
+ let script_hash = to_scripthash(script_type, script_str, config.network_type)?;
+
+ let txs = query
+ .mempool()
+ .history(&script_hash[..], MAX_MEMPOOL_TXS)
+ .into_iter()
+ .map(|tx| (tx, None))
+ .collect();
+
+ json_response(prepare_txs(txs, query, config), TTL_SHORT)
+ }
+
+ (
+ &Method::GET,
+ Some(script_type @ &"address"),
+ Some(script_str),
+ Some(&"utxo"),
+ None,
+ None,
+ )
+ | (
+ &Method::GET,
+ Some(script_type @ &"scripthash"),
+ Some(script_str),
+ Some(&"utxo"),
+ None,
+ None,
+ ) => {
+ let script_hash = to_scripthash(script_type, script_str, config.network_type)?;
+ let utxos: Vec = query
+ .utxo(&script_hash[..])?
+ .into_iter()
+ .map(UtxoValue::from)
+ .collect();
+ // XXX paging?
+ json_response(utxos, TTL_SHORT)
+ }
+ (&Method::GET, Some(&"address-prefix"), Some(prefix), None, None, None) => {
+ if !config.address_search {
+ return Err(HttpError::from("address search disabled".to_string()));
+ }
+ let results = query.chain().address_search(prefix, ADDRESS_SEARCH_LIMIT);
+ json_response(results, TTL_SHORT)
+ }
+ (&Method::GET, Some(&"tx"), Some(hash), None, None, None) => {
+ let hash = Txid::from_str(hash)?;
+ let tx = query
+ .lookup_txn(&hash)
+ .ok_or_else(|| HttpError::not_found("Transaction not found".to_string()))?;
+ let blockid = query.chain().tx_confirming_block(&hash);
+ let ttl = ttl_by_depth(blockid.as_ref().map(|b| b.height), query);
+
+ let tx = prepare_txs(vec![(tx, blockid)], query, config).remove(0);
+
+ json_response(tx, ttl)
+ }
+ (&Method::GET, Some(&"tx"), Some(hash), Some(out_type @ &"hex"), None, None)
+ | (&Method::GET, Some(&"tx"), Some(hash), Some(out_type @ &"raw"), None, None) => {
+ let hash = Txid::from_str(hash)?;
+ let rawtx = query
+ .lookup_raw_txn(&hash)
+ .ok_or_else(|| HttpError::not_found("Transaction not found".to_string()))?;
+
+ let (content_type, body) = match *out_type {
+ "raw" => ("application/octet-stream", Body::from(rawtx)),
+ "hex" => ("text/plain", Body::from(rawtx.to_lower_hex_string())),
+ _ => unreachable!(),
+ };
+ let ttl = ttl_by_depth(query.get_tx_status(&hash).block_height, query);
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", content_type)
+ .header("Cache-Control", format!("public, max-age={:}", ttl))
+ .body(body)
+ .unwrap())
+ }
+ (&Method::GET, Some(&"tx"), Some(hash), Some(&"status"), None, None) => {
+ let hash = Txid::from_str(hash)?;
+ let status = query.get_tx_status(&hash);
+ let ttl = ttl_by_depth(status.block_height, query);
+ json_response(status, ttl)
+ }
+
+ (&Method::GET, Some(&"tx"), Some(hash), Some(&"merkle-proof"), None, None) => {
+ let hash = Txid::from_str(hash)?;
+ let blockid = query.chain().tx_confirming_block(&hash).ok_or_else(|| {
+ HttpError::not_found("Transaction not found or is unconfirmed".to_string())
+ })?;
+ let (merkle, pos) =
+ electrum_merkle::get_tx_merkle_proof(query.chain(), &hash, &blockid.hash)?;
+ let merkle: Vec = merkle.into_iter().map(|txid| txid.to_string()).collect();
+ let ttl = ttl_by_depth(Some(blockid.height), query);
+ json_response(
+ json!({ "block_height": blockid.height, "merkle": merkle, "pos": pos }),
+ ttl,
+ )
+ }
+ #[cfg(not(feature = "liquid"))]
+ (&Method::GET, Some(&"tx"), Some(hash), Some(&"merkleblock-proof"), None, None) => {
+ let hash = Txid::from_str(hash)?;
+
+ let merkleblock = query.chain().get_merkleblock_proof(&hash).ok_or_else(|| {
+ HttpError::not_found("Transaction not found or is unconfirmed".to_string())
+ })?;
+
+ let height = query
+ .chain()
+ .height_by_hash(&merkleblock.header.block_hash());
+
+ http_message(
+ StatusCode::OK,
+ encode::serialize_hex(&merkleblock),
+ ttl_by_depth(height, query),
+ )
+ }
+ (&Method::GET, Some(&"tx"), Some(hash), Some(&"outspend"), Some(index), None) => {
+ let hash = Txid::from_str(hash)?;
+ let outpoint = OutPoint {
+ txid: hash,
+ vout: index.parse::()?,
+ };
+ let spend = query
+ .lookup_spend(&outpoint)
+ .map_or_else(SpendingValue::default, SpendingValue::from);
+ let ttl = ttl_by_depth(
+ spend
+ .status
+ .as_ref()
+ .and_then(|ref status| status.block_height),
+ query,
+ );
+ json_response(spend, ttl)
+ }
+ (&Method::GET, Some(&"tx"), Some(hash), Some(&"outspends"), None, None) => {
+ let hash = Txid::from_str(hash)?;
+ let tx = query
+ .lookup_txn(&hash)
+ .ok_or_else(|| HttpError::not_found("Transaction not found".to_string()))?;
+ let spends: Vec = query
+ .lookup_tx_spends(tx)
+ .into_iter()
+ .map(|spend| spend.map_or_else(SpendingValue::default, SpendingValue::from))
+ .collect();
+ // @TODO long ttl if all outputs are either spent long ago or unspendable
+ json_response(spends, TTL_SHORT)
+ }
+ (&Method::GET, Some(&"broadcast"), None, None, None, None)
+ | (&Method::POST, Some(&"tx"), None, None, None, None) => {
+ // accept both POST and GET for backward compatibility.
+ // GET will eventually be removed in favor of POST.
+ let txhex = match method {
+ Method::POST => String::from_utf8(body.to_vec())?,
+ Method::GET => query_params
+ .get("tx")
+ .cloned()
+ .ok_or_else(|| HttpError::from("Missing tx".to_string()))?,
+ _ => return http_message(StatusCode::METHOD_NOT_ALLOWED, "Invalid method", 0),
+ };
+ let txid = query.broadcast_raw(&txhex)?;
+ http_message(StatusCode::OK, txid.to_string(), 0)
+ }
+
+ (&Method::GET, Some(&"mempool"), None, None, None, None) => {
+ json_response(query.mempool().backlog_stats(), TTL_SHORT)
+ }
+ (&Method::GET, Some(&"mempool"), Some(&"txids"), None, None, None) => {
+ json_response(query.mempool().txids(), TTL_SHORT)
+ }
+ (&Method::GET, Some(&"mempool"), Some(&"recent"), None, None, None) => {
+ let mempool = query.mempool();
+ let recent = mempool.recent_txs_overview();
+ json_response(recent, TTL_MEMPOOL_RECENT)
+ }
+
+ (&Method::GET, Some(&"fee-estimates"), None, None, None, None) => {
+ json_response(query.estimate_fee_map(), TTL_SHORT)
+ }
+
+ #[cfg(feature = "liquid")]
+ (&Method::GET, Some(&"assets"), Some(&"registry"), None, None, None) => {
+ let start_index: usize = query_params
+ .get("start_index")
+ .and_then(|n| n.parse().ok())
+ .unwrap_or(0);
+
+ let limit: usize = query_params
+ .get("limit")
+ .and_then(|n| n.parse().ok())
+ .map(|n: usize| n.min(ASSETS_MAX_PER_PAGE))
+ .unwrap_or(ASSETS_PER_PAGE);
+
+ let sorting = AssetSorting::from_query_params(&query_params)?;
+
+ let (total_num, assets) = query.list_registry_assets(start_index, limit, sorting)?;
+
+ Ok(Response::builder()
+ // Disable caching because we don't currently support caching with query string params
+ .header("Cache-Control", "no-store")
+ .header("Content-Type", "application/json")
+ .header("X-Total-Results", total_num.to_string())
+ .body(Body::from(serde_json::to_string(&assets)?))
+ .unwrap())
+ }
+
+ #[cfg(feature = "liquid")]
+ (&Method::GET, Some(&"asset"), Some(asset_str), None, None, None) => {
+ let asset_id = AssetId::from_str(asset_str)?;
+ let asset_entry = query
+ .lookup_asset(&asset_id)?
+ .ok_or_else(|| HttpError::not_found("Asset id not found".to_string()))?;
+
+ json_response(asset_entry, TTL_SHORT)
+ }
+
+ #[cfg(feature = "liquid")]
+ (&Method::GET, Some(&"asset"), Some(asset_str), Some(&"txs"), None, None) => {
+ let asset_id = AssetId::from_str(asset_str)?;
+
+ let mut txs = vec![];
+
+ txs.extend(
+ query
+ .mempool()
+ .asset_history(&asset_id, MAX_MEMPOOL_TXS)
+ .into_iter()
+ .map(|tx| (tx, None)),
+ );
+
+ txs.extend(
+ query
+ .chain()
+ .asset_history(&asset_id, None, CHAIN_TXS_PER_PAGE)
+ .into_iter()
+ .map(|(tx, blockid)| (tx, Some(blockid))),
+ );
+
+ json_response(prepare_txs(txs, query, config), TTL_SHORT)
+ }
+
+ #[cfg(feature = "liquid")]
+ (
+ &Method::GET,
+ Some(&"asset"),
+ Some(asset_str),
+ Some(&"txs"),
+ Some(&"chain"),
+ last_seen_txid,
+ ) => {
+ let asset_id = AssetId::from_str(asset_str)?;
+ let last_seen_txid = last_seen_txid.and_then(|txid| Txid::from_str(txid).ok());
+
+ let txs = query
+ .chain()
+ .asset_history(&asset_id, last_seen_txid.as_ref(), CHAIN_TXS_PER_PAGE)
+ .into_iter()
+ .map(|(tx, blockid)| (tx, Some(blockid)))
+ .collect();
+
+ json_response(prepare_txs(txs, query, config), TTL_SHORT)
+ }
+
+ #[cfg(feature = "liquid")]
+ (&Method::GET, Some(&"asset"), Some(asset_str), Some(&"txs"), Some(&"mempool"), None) => {
+ let asset_id = AssetId::from_str(asset_str)?;
+
+ let txs = query
+ .mempool()
+ .asset_history(&asset_id, MAX_MEMPOOL_TXS)
+ .into_iter()
+ .map(|tx| (tx, None))
+ .collect();
+
+ json_response(prepare_txs(txs, query, config), TTL_SHORT)
+ }
+
+ #[cfg(feature = "liquid")]
+ (&Method::GET, Some(&"asset"), Some(asset_str), Some(&"supply"), param, None) => {
+ let asset_id = AssetId::from_str(asset_str)?;
+ let asset_entry = query
+ .lookup_asset(&asset_id)?
+ .ok_or_else(|| HttpError::not_found("Asset id not found".to_string()))?;
+
+ let supply = asset_entry
+ .supply()
+ .ok_or_else(|| HttpError::from("Asset supply is blinded".to_string()))?;
+ let precision = asset_entry.precision();
+
+ if param == Some(&"decimal") && precision > 0 {
+ let supply_dec = supply as f64 / 10u32.pow(precision.into()) as f64;
+ http_message(StatusCode::OK, supply_dec.to_string(), TTL_SHORT)
+ } else {
+ http_message(StatusCode::OK, supply.to_string(), TTL_SHORT)
+ }
+ }
+
+ _ => Err(HttpError::not_found(format!(
+ "endpoint does not exist {:?}",
+ uri.path()
+ ))),
+ }
+}
+
+fn http_message(status: StatusCode, message: T, ttl: u32) -> Result, HttpError>
+where
+ T: Into,
+{
+ Ok(Response::builder()
+ .status(status)
+ .header("Content-Type", "text/plain")
+ .header("Cache-Control", format!("public, max-age={:}", ttl))
+ .body(message.into())
+ .unwrap())
+}
+
+fn json_response(value: T, ttl: u32) -> Result, HttpError> {
+ let value = serde_json::to_string(&value)?;
+ Ok(Response::builder()
+ .header("Content-Type", "application/json")
+ .header("Cache-Control", format!("public, max-age={:}", ttl))
+ .body(Body::from(value))
+ .unwrap())
+}
+
+#[trace]
+fn blocks(query: &Query, start_height: Option) -> Result, HttpError> {
+ let mut values = Vec::new();
+ let mut current_hash = match start_height {
+ Some(height) => *query
+ .chain()
+ .header_by_height(height)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?
+ .hash(),
+ None => query.chain().best_hash(),
+ };
+
+ let zero = [0u8; 32];
+ for _ in 0..BLOCK_LIMIT {
+ let blockhm = query
+ .chain()
+ .get_block_with_meta(¤t_hash)
+ .ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
+ current_hash = blockhm.header_entry.header().prev_blockhash;
+
+ #[allow(unused_mut)]
+ let mut value = BlockValue::new(blockhm);
+
+ #[cfg(feature = "liquid")]
+ {
+ // exclude ExtData in block list view
+ value.ext = None;
+ }
+ values.push(value);
+
+ if current_hash[..] == zero[..] {
+ break;
+ }
+ }
+ json_response(values, TTL_SHORT)
+}
+
+fn to_scripthash(
+ script_type: &str,
+ script_str: &str,
+ network: Network,
+) -> Result {
+ match script_type {
+ "address" => address_to_scripthash(script_str, network),
+ "scripthash" => parse_scripthash(script_str),
+ _ => bail!("Invalid script type".to_string()),
+ }
+}
+
+fn address_to_scripthash(addr: &str, network: Network) -> Result {
+ #[cfg(not(feature = "liquid"))]
+ let addr = address::Address::from_str(addr)?;
+ #[cfg(feature = "liquid")]
+ let addr = address::Address::parse_with_params(addr, network.address_params())?;
+
+ #[cfg(not(feature = "liquid"))]
+ let is_expected_net = addr.is_valid_for_network(network.into());
+
+ #[cfg(feature = "liquid")]
+ let is_expected_net = addr.params == network.address_params();
+
+ if !is_expected_net {
+ bail!(HttpError::from("Address on invalid network".to_string()))
+ }
+
+ #[cfg(not(feature = "liquid"))]
+ let addr = addr.assume_checked();
+
+ Ok(compute_script_hash(&addr.script_pubkey()))
+}
+
+fn parse_scripthash(scripthash: &str) -> Result {
+ FullHash::from_hex(scripthash).map_err(|_| HttpError::from("Invalid scripthash".to_string()))
+}
+
+#[derive(Debug)]
+struct HttpError(StatusCode, String);
+
+impl HttpError {
+ fn not_found(msg: String) -> Self {
+ HttpError(StatusCode::NOT_FOUND, msg)
+ }
+}
+
+impl From for HttpError {
+ fn from(msg: String) -> Self {
+ HttpError(StatusCode::BAD_REQUEST, msg)
+ }
+}
+impl From for HttpError {
+ fn from(_e: ParseIntError) -> Self {
+ //HttpError::from(e.description().to_string())
+ HttpError::from("Invalid number".to_string())
+ }
+}
+impl From for HttpError {
+ fn from(_e: HashError) -> Self {
+ //HttpError::from(e.description().to_string())
+ HttpError::from("Invalid hash string".to_string())
+ }
+}
+impl From for HttpError {
+ fn from(_e: hex::HexToBytesError) -> Self {
+ //HttpError::from(e.description().to_string())
+ HttpError::from("Invalid hex string".to_string())
+ }
+}
+impl From for HttpError {
+ fn from(_e: hex::HexToArrayError) -> Self {
+ //HttpError::from(e.description().to_string())
+ HttpError::from("Invalid hex string".to_string())
+ }
+}
+impl From for HttpError {
+ fn from(e: errors::Error) -> Self {
+ warn!("errors::Error: {:?}", e);
+ match e.description().to_string().as_ref() {
+ "getblock RPC error: {\"code\":-5,\"message\":\"Block not found\"}" => {
+ HttpError::not_found("Block not found".to_string())
+ }
+ _ => HttpError::from(e.to_string()),
+ }
+ }
+}
+impl From for HttpError {
+ fn from(e: serde_json::Error) -> Self {
+ HttpError::from(e.to_string())
+ }
+}
+impl From for HttpError {
+ fn from(e: encode::Error) -> Self {
+ HttpError::from(e.to_string())
+ }
+}
+impl From for HttpError {
+ fn from(e: std::string::FromUtf8Error) -> Self {
+ HttpError::from(e.to_string())
+ }
+}
+
+#[cfg(not(feature = "liquid"))]
+impl From for HttpError {
+ fn from(e: address::ParseError) -> Self {
+ HttpError::from(e.to_string())
+ }
+}
+
+#[cfg(feature = "liquid")]
+impl From for HttpError {
+ fn from(e: address::AddressError) -> Self {
+ HttpError::from(e.to_string())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::rest::HttpError;
+ use serde_json::Value;
+ use std::collections::HashMap;
+
+ #[test]
+ fn test_parse_query_param() {
+ let mut query_params = HashMap::new();
+
+ query_params.insert("limit", "10");
+ let limit = query_params
+ .get("limit")
+ .map_or(10u32, |el| el.parse().unwrap_or(10u32))
+ .min(30u32);
+ assert_eq!(10, limit);
+
+ query_params.insert("limit", "100");
+ let limit = query_params
+ .get("limit")
+ .map_or(10u32, |el| el.parse().unwrap_or(10u32))
+ .min(30u32);
+ assert_eq!(30, limit);
+
+ query_params.insert("limit", "5");
+ let limit = query_params
+ .get("limit")
+ .map_or(10u32, |el| el.parse().unwrap_or(10u32))
+ .min(30u32);
+ assert_eq!(5, limit);
+
+ query_params.insert("limit", "aaa");
+ let limit = query_params
+ .get("limit")
+ .map_or(10u32, |el| el.parse().unwrap_or(10u32))
+ .min(30u32);
+ assert_eq!(10, limit);
+
+ query_params.remove("limit");
+ let limit = query_params
+ .get("limit")
+ .map_or(10u32, |el| el.parse().unwrap_or(10u32))
+ .min(30u32);
+ assert_eq!(10, limit);
+ }
+
+ #[test]
+ fn test_parse_value_param() {
+ let v: Value = json!({ "confirmations": 10 });
+
+ let confirmations = v
+ .get("confirmations")
+ .and_then(|el| el.as_u64())
+ .ok_or(HttpError::from(
+ "confirmations absent or not a u64".to_string(),
+ ))
+ .unwrap();
+
+ assert_eq!(10, confirmations);
+
+ let err = v
+ .get("notexist")
+ .and_then(|el| el.as_u64())
+ .ok_or(HttpError::from("notexist absent or not a u64".to_string()));
+
+ assert!(err.is_err());
+ }
+}
\ No newline at end of file
diff --git a/src/pages/BitcoinAddressExplorer.jsx b/src/pages/BitcoinAddressExplorer.jsx
index d1dacdf..f4d5528 100644
--- a/src/pages/BitcoinAddressExplorer.jsx
+++ b/src/pages/BitcoinAddressExplorer.jsx
@@ -1,10 +1,11 @@
import React, { useState, useEffect } from 'react';
import { useOutletContext } from 'react-router-dom';
import { useLaserEyes } from '@omnisat/lasereyes';
-import {
- getAddressTransactions,
- getTransactionInfo,
- getAddressTransactionsWithTrace
+import {
+ getAddressInfo,
+ getAddressTransactionsChain,
+ getTransactionInfo,
+ getAddressTransactionsWithTrace
} from '../sdk/esplora';
import { traceTransaction } from '../sdk/alkanes';
@@ -33,6 +34,7 @@ const BitcoinAddressExplorer = () => {
const [totalPages, setTotalPages] = useState(1);
const [totalTransactions, setTotalTransactions] = useState(0);
const [pageLoading, setPageLoading] = useState(false);
+ const [lastSeenTxid, setLastSeenTxid] = useState(null);
const transactionsPerPage = 10;
// Helper function to shorten txids
@@ -120,12 +122,18 @@ const BitcoinAddressExplorer = () => {
try {
console.log(`Searching for transactions on network ${endpoint} for address ${addressToUse}`);
- // Fetch first page of transactions with pagination
- const result = await getAddressTransactions(
+ // First get the address info to get the total transaction count
+ const addressInfoResult = await getAddressInfo(addressToUse, endpoint);
+
+ if (addressInfoResult.status === "error") {
+ throw new Error(addressInfoResult.message);
+ }
+
+ // Fetch first page of transactions with cursor-based pagination
+ const result = await getAddressTransactionsChain(
addressToUse,
endpoint,
- transactionsPerPage,
- 0
+ null // null for first page
);
if (result.status === "error") {
@@ -138,15 +146,17 @@ const BitcoinAddressExplorer = () => {
setAddress(addressToUse);
// Set pagination data
- if (result.pagination) {
- setTotalTransactions(result.pagination.total);
- setTotalPages(Math.max(1, Math.ceil(result.pagination.total / transactionsPerPage)));
- } else {
- // Fallback if pagination info is not available
- setTotalTransactions(txs.length);
- setTotalPages(Math.max(1, Math.ceil(txs.length / transactionsPerPage)));
+ const totalTxCount = addressInfoResult.totalTxCount;
+ setTotalTransactions(totalTxCount);
+ setTotalPages(Math.max(1, Math.ceil(totalTxCount / transactionsPerPage)));
+
+ // Store the last seen txid for pagination
+ if (txs.length > 0) {
+ setLastSeenTxid(txs[txs.length - 1].txid);
}
+ console.log(`Total transactions: ${totalTxCount}, Pages: ${Math.ceil(totalTxCount / transactionsPerPage)}`);
+
} catch (err) {
console.error("Error fetching transactions data:", err);
setError(err.message || "Failed to fetch transactions data");
@@ -208,27 +218,86 @@ const BitcoinAddressExplorer = () => {
setPageLoading(true);
try {
- const offset = (pageNumber - 1) * transactionsPerPage;
-
- // Fetch transactions for the specified page
- const result = await getAddressTransactions(
- address,
- endpoint,
- transactionsPerPage,
- offset
- );
-
- if (result.status === "error") {
- throw new Error(result.message);
- }
-
- // Update transactions
- setTransactions(result.transactions || []);
-
- // Update pagination data if available
- if (result.pagination) {
- setTotalTransactions(result.pagination.total);
- setTotalPages(Math.max(1, Math.ceil(result.pagination.total / transactionsPerPage)));
+ // For page 1, we don't need a lastSeenTxid
+ if (pageNumber === 1) {
+ // Get address info first to get total transaction count
+ const addressInfoResult = await getAddressInfo(address, endpoint);
+
+ if (addressInfoResult.status === "error") {
+ throw new Error(addressInfoResult.message);
+ }
+
+ // Fetch first page of transactions
+ const result = await getAddressTransactionsChain(
+ address,
+ endpoint,
+ null // null for first page
+ );
+
+ if (result.status === "error") {
+ throw new Error(result.message);
+ }
+
+ // Update transactions
+ const txs = result.transactions || [];
+ setTransactions(txs);
+
+ // Update pagination data
+ setTotalTransactions(addressInfoResult.totalTxCount);
+
+ // Store the last seen txid for pagination
+ if (txs.length > 0) {
+ setLastSeenTxid(txs[txs.length - 1].txid);
+ }
+ } else {
+ // For pages > 1, we need to implement a different approach
+ // since we're using cursor-based pagination
+
+ // This is a simplified implementation - in a real app, you would
+ // need to keep track of the lastSeenTxid for each page
+
+ // For now, we'll just fetch the first page and then fetch additional
+ // pages one by one until we reach the requested page
+
+ let currentPage = 1;
+ let currentLastSeenTxid = null;
+ let currentTxs = [];
+
+ while (currentPage < pageNumber) {
+ const result = await getAddressTransactionsChain(
+ address,
+ endpoint,
+ currentLastSeenTxid
+ );
+
+ if (result.status === "error" || !result.transactions || !result.transactions.length) {
+ break;
+ }
+
+ currentTxs = result.transactions;
+ currentLastSeenTxid = result.pagination.lastSeenTxid;
+ currentPage++;
+ }
+
+ // Now fetch the actual page we want
+ const result = await getAddressTransactionsChain(
+ address,
+ endpoint,
+ currentLastSeenTxid
+ );
+
+ if (result.status === "error") {
+ throw new Error(result.message);
+ }
+
+ // Update transactions
+ const txs = result.transactions || [];
+ setTransactions(txs);
+
+ // Store the last seen txid for pagination
+ if (txs.length > 0) {
+ setLastSeenTxid(txs[txs.length - 1].txid);
+ }
}
// Scroll to top of results
diff --git a/src/pages/Home.jsx b/src/pages/Home.jsx
index 77b0914..6a8b23c 100644
--- a/src/pages/Home.jsx
+++ b/src/pages/Home.jsx
@@ -205,6 +205,13 @@ const Home = () => {
Explore transactions for an address
+
+
+
/explorer/transaction-io
+
+ Explore transaction inputs and outputs
+
+
diff --git a/src/pages/TransactionInputsOutputsExplorer.jsx b/src/pages/TransactionInputsOutputsExplorer.jsx
new file mode 100644
index 0000000..a32187f
--- /dev/null
+++ b/src/pages/TransactionInputsOutputsExplorer.jsx
@@ -0,0 +1,853 @@
+import React, { useState, useEffect } from 'react';
+import { useOutletContext } from 'react-router-dom';
+import { useLaserEyes } from '@omnisat/lasereyes';
+import {
+ getAddressInfo,
+ getAddressTransactionsChain,
+ getTransactionInfo,
+ getTransactionOutspends
+} from '../sdk/esplora';
+import getProvider from '../sdk/provider';
+
+/**
+ * TransactionInputsOutputsExplorer Component
+ *
+ * Page for exploring Bitcoin transaction inputs and outputs
+ * Allows users to search for an address and view all its transactions
+ * with detailed inputs and outputs visualization
+ */
+const TransactionInputsOutputsExplorer = () => {
+ const { endpoint = 'mainnet' } = useOutletContext() || {};
+ const { connected, address: walletAddress } = useLaserEyes();
+
+ // State for address input
+ const [address, setAddress] = useState('');
+ const [manualAddress, setManualAddress] = useState('');
+
+ // State for transaction data
+ const [transactions, setTransactions] = useState([]);
+ const [processedTransactions, setProcessedTransactions] = useState([]);
+ const [loading, setLoading] = useState(false);
+ const [error, setError] = useState(null);
+
+ // State for pagination
+ const [page, setPage] = useState(1);
+ const [totalPages, setTotalPages] = useState(1);
+ const [totalTransactions, setTotalTransactions] = useState(0);
+ const [pageLoading, setPageLoading] = useState(false);
+ const [fetchingAllTransactions, setFetchingAllTransactions] = useState(false);
+ const [fetchProgress, setFetchProgress] = useState(0);
+ const [maxTransactionsToFetch, setMaxTransactionsToFetch] = useState(10000);
+ const transactionsPerPage = 25; // Default page size from Esplora API
+
+ // Reset component state when endpoint/network changes
+ useEffect(() => {
+ // Reset the state to prevent issues when switching networks
+ setTransactions([]);
+ setProcessedTransactions([]);
+ setError(null);
+ setPage(1);
+ setTotalPages(1);
+
+ console.log(`Network switched to ${endpoint}`);
+ }, [endpoint]);
+
+ // Only populate the address field when both wallet connects and no address is already entered
+ useEffect(() => {
+ if (connected && walletAddress && !address && !manualAddress) {
+ // Only set the wallet address when both address and manualAddress are empty
+ // This prevents overriding any user input
+ setAddress(walletAddress);
+ }
+ }, [connected, walletAddress, address, manualAddress]);
+
+ // Helper function to shorten txids and addresses
+ const shortenTxid = (txid) => {
+ if (!txid) return 'N/A';
+ if (txid.length <= 13) return txid;
+ return `${txid.substring(0, 6)}...${txid.substring(txid.length - 6)}`;
+ };
+
+ const shortenAddress = (address) => {
+ if (!address) return 'Unknown';
+ if (address === 'OP_RETURN') return address;
+ if (address.length <= 15) return address;
+ return `${address.substring(0, 6)}...${address.substring(address.length - 6)}`;
+ };
+
+ // Function to copy text to clipboard
+ const copyToClipboard = (text) => {
+ if (!text) return;
+ navigator.clipboard.writeText(text)
+ .then(() => {
+ console.log('Copied to clipboard:', text);
+ })
+ .catch(err => {
+ console.error('Failed to copy text: ', err);
+ });
+ };
+
+ // Validate Bitcoin address (basic validation)
+ const isValidBitcoinAddress = (addr) => {
+ // Basic validation - check if it starts with valid prefixes
+ return addr && (
+ addr.startsWith('bc1') ||
+ addr.startsWith('1') ||
+ addr.startsWith('3') ||
+ addr.startsWith('bcr') || //regtest
+ addr.startsWith('tb1') || // testnet
+ addr.startsWith('m') || // testnet
+ addr.startsWith('n') || // testnet
+ addr.startsWith('2') // testnet
+ );
+ };
+
+ // Format date from timestamp
+ const formatDate = (timestamp) => {
+ if (!timestamp) return 'N/A';
+ const date = new Date(timestamp * 1000);
+ return date.toLocaleDateString() + ' ' + date.toLocaleTimeString();
+ };
+
+ // Helper function to convert hex to decimal
+ const hexToDec = (hexString) => {
+ if (!hexString || typeof hexString !== 'string') return 'N/A';
+ // Remove '0x' prefix if present
+ const cleanHex = hexString.startsWith('0x') ? hexString.slice(2) : hexString;
+ try {
+ return BigInt(`0x${cleanHex}`).toString(10);
+ } catch (error) {
+ console.error("Error converting hex to decimal:", error);
+ return hexString; // Return original if conversion fails
+ }
+ };
+
+ // Note: We now use the getAddressInfo function from esplora.js
+
+ // Fetch all transactions for an address up to the maximum limit
+ const fetchAllTransactions = async (address) => {
+ try {
+ setFetchingAllTransactions(true);
+ setFetchProgress(0);
+
+ // First get the total transaction count
+ const addressInfoResult = await getAddressInfo(address, endpoint);
+
+ if (addressInfoResult.status === "error") {
+ throw new Error(addressInfoResult.message);
+ }
+
+ const totalTxCount = Math.min(addressInfoResult.totalTxCount, maxTransactionsToFetch);
+ setTotalTransactions(totalTxCount);
+
+ // Start with an empty array of transactions
+ let allTransactions = [];
+ let lastSeenTxid = null;
+
+ // Update progress
+ let progress = 0;
+ setFetchProgress(progress);
+
+ // Fetch transactions in batches using cursor-based pagination
+ while (allTransactions.length < totalTxCount && allTransactions.length < maxTransactionsToFetch) {
+ // Fetch the next batch
+ const result = await getAddressTransactionsChain(
+ address,
+ endpoint,
+ lastSeenTxid
+ );
+
+ if (result.status === "error" || !result.transactions || !result.transactions.length) {
+ break; // No more transactions or error
+ }
+
+ // Add transactions to our collection
+ allTransactions = [...allTransactions, ...result.transactions];
+
+ // Update progress
+ progress = Math.min(100, Math.round((allTransactions.length / totalTxCount) * 100));
+ setFetchProgress(progress);
+
+ // Update the last seen txid for the next batch
+ lastSeenTxid = result.pagination.lastSeenTxid;
+
+ // If we don't have more transactions or we've reached the maximum, stop fetching
+ if (!result.pagination.hasMore || allTransactions.length >= maxTransactionsToFetch) {
+ break;
+ }
+ }
+
+ return {
+ status: "success",
+ message: "All transactions retrieved",
+ address,
+ transactions: allTransactions,
+ pagination: {
+ total: totalTxCount,
+ fetched: allTransactions.length
+ }
+ };
+ } catch (error) {
+ console.error('Error fetching all transactions:', error);
+ return {
+ status: "error",
+ message: error.message || "Unknown error",
+ address,
+ transactions: []
+ };
+ } finally {
+ setFetchingAllTransactions(false);
+ }
+ };
+
+ // Handle form submission
+ const handleSubmit = async (e) => {
+ e.preventDefault();
+
+ // Reset all states before making a new request
+ setError(null);
+ setPage(1);
+
+ // Validate address
+ const addressToUse = manualAddress || address;
+ if (!addressToUse) {
+ setError("Please enter an address");
+ return;
+ }
+
+ if (!isValidBitcoinAddress(addressToUse)) {
+ setError("Please enter a valid Bitcoin address");
+ return;
+ }
+
+ // Set loading state
+ setLoading(true);
+ setTransactions([]); // Clear previous results
+ setProcessedTransactions([]);
+
+ try {
+ console.log(`Searching for transactions on network ${endpoint} for address ${addressToUse}`);
+
+ // Fetch all transactions for the address (up to maxTransactionsToFetch)
+ const result = await fetchAllTransactions(addressToUse);
+
+ if (result.status === "error") {
+ throw new Error(result.message);
+ }
+
+ // Set data
+ const txs = result.transactions || [];
+ setTransactions(txs);
+ setAddress(addressToUse);
+
+ // Set pagination data for UI display
+ if (result.pagination) {
+ setTotalTransactions(result.pagination.total);
+ // Calculate total pages based on the number of transactions we actually fetched
+ const calculatedPages = Math.max(1, Math.ceil(txs.length / transactionsPerPage));
+ setTotalPages(calculatedPages);
+
+ console.log(`Total transactions: ${result.pagination.total}, Fetched: ${txs.length}, Pages: ${calculatedPages}`);
+ } else {
+ // Fallback if pagination info is not available
+ setTotalTransactions(txs.length);
+ setTotalPages(Math.max(1, Math.ceil(txs.length / transactionsPerPage)));
+
+ console.log(`Total transactions: ${txs.length}, Pages: ${Math.ceil(txs.length / transactionsPerPage)}`);
+ }
+
+ // Process transactions to get inputs and outputs
+ await processTransactions(txs);
+
+ } catch (err) {
+ console.error("Error fetching transactions data:", err);
+ setError(err.message || "Failed to fetch transactions data");
+ setTransactions([]);
+ setProcessedTransactions([]);
+ setTotalTransactions(0);
+ setTotalPages(1);
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ // Process transactions to get detailed input and output information
+ const processTransactions = async (txs) => {
+ const processed = [];
+
+ // Process transactions in batches to avoid overwhelming the browser
+ const batchSize = 25;
+ const totalBatches = Math.ceil(txs.length / batchSize);
+
+ for (let batchIndex = 0; batchIndex < totalBatches; batchIndex++) {
+ const startIndex = batchIndex * batchSize;
+ const endIndex = Math.min(startIndex + batchSize, txs.length);
+ const batch = txs.slice(startIndex, endIndex);
+
+ console.log(`Processing batch ${batchIndex + 1}/${totalBatches} (${startIndex}-${endIndex} of ${txs.length} transactions)`);
+
+ // Process each transaction in the batch
+ for (const tx of batch) {
+ try {
+ // Get detailed transaction info
+ const txInfo = await getTransactionInfo(tx.txid, endpoint);
+
+ if (txInfo.status === "error") {
+ processed.push({
+ ...tx,
+ error: txInfo.message,
+ inputs: [],
+ outputs: []
+ });
+ continue;
+ }
+
+ const transaction = txInfo.transaction;
+
+ // Get outspends to determine if outputs have been spent
+ const outspends = await getTransactionOutspends(tx.txid, endpoint);
+
+ // Process inputs
+ const inputs = transaction.vin.map(input => ({
+ txid: input.txid,
+ vout: input.vout,
+ address: input.prevout?.scriptpubkey_address || 'Unknown',
+ value: input.prevout?.value || 0,
+ valueBTC: (input.prevout?.value || 0) / 100000000, // Convert satoshis to BTC
+ isCoinbase: input.is_coinbase || false
+ }));
+
+ // Process outputs
+ const outputs = transaction.vout.map((output, index) => {
+ const isSpent = outspends.status === "success" &&
+ outspends.outspends &&
+ outspends.outspends[index] &&
+ outspends.outspends[index].spent;
+
+ return {
+ n: output.n,
+ address: output.scriptpubkey_address || 'OP_RETURN',
+ value: output.value || 0,
+ valueBTC: (output.value || 0) / 100000000, // Convert satoshis to BTC
+ type: output.scriptpubkey_type,
+ isOpReturn: output.scriptpubkey_type === 'op_return',
+ spent: isSpent
+ };
+ });
+
+ // Calculate total input and output values
+ const totalInput = inputs.reduce((sum, input) => sum + input.value, 0);
+ const totalOutput = outputs.reduce((sum, output) => sum + output.value, 0);
+
+ processed.push({
+ ...tx,
+ inputs,
+ outputs,
+ totalInput,
+ totalOutput,
+ totalInputBTC: totalInput / 100000000,
+ totalOutputBTC: totalOutput / 100000000,
+ fee: totalInput - totalOutput,
+ feeBTC: (totalInput - totalOutput) / 100000000
+ });
+
+ } catch (error) {
+ console.error(`Error processing transaction ${tx.txid}:`, error);
+ processed.push({
+ ...tx,
+ error: error.message,
+ inputs: [],
+ outputs: []
+ });
+ }
+ }
+
+ // Update the processed transactions after each batch
+ setProcessedTransactions([...processed]);
+
+ // Update progress
+ const progress = Math.min(100, Math.round((processed.length / txs.length) * 100));
+ setFetchProgress(progress);
+ }
+
+ console.log(`Processed ${processed.length} transactions in total`);
+ setProcessedTransactions(processed);
+ };
+
+ // Get current page transactions - paginate locally since we've fetched all transactions
+ const getCurrentPageTransactions = () => {
+ const startIndex = (page - 1) * transactionsPerPage;
+ const endIndex = startIndex + transactionsPerPage;
+ const pageTransactions = processedTransactions.slice(startIndex, endIndex);
+
+ console.log(`Getting page ${page} transactions: ${startIndex}-${endIndex} of ${processedTransactions.length}`);
+ return pageTransactions;
+ };
+
+ // Handle pagination - now just updates the page state for local pagination
+ const handlePreviousPage = () => {
+ if (page > 1) {
+ const newPage = page - 1;
+ setPage(newPage);
+ // Scroll to top of results
+ window.scrollTo(0, 0);
+ }
+ };
+
+ const handleNextPage = () => {
+ if (page < totalPages) {
+ const newPage = page + 1;
+ setPage(newPage);
+ // Scroll to top of results
+ window.scrollTo(0, 0);
+ }
+ };
+
+ // Handle using connected wallet
+ const useConnectedWallet = () => {
+ if (connected && walletAddress) {
+ setManualAddress('');
+ setAddress(walletAddress);
+ }
+ };
+
+ // CSS for inline styling according to design guidelines
+ const styles = {
+ container: {
+ width: '100%',
+ maxWidth: '1200px',
+ margin: '0 auto',
+ backgroundColor: '#FFFFFF',
+ padding: '20px',
+ border: '1px solid #E0E0E0',
+ },
+ title: {
+ fontSize: '24px',
+ fontWeight: 'bold',
+ marginBottom: '16px',
+ textAlign: 'left',
+ fontFamily: 'Roboto Mono, monospace',
+ },
+ subtitle: {
+ fontSize: '20px',
+ fontWeight: 'bold',
+ marginBottom: '12px',
+ textAlign: 'left',
+ fontFamily: 'Roboto Mono, monospace',
+ },
+ description: {
+ fontSize: '14px',
+ marginBottom: '20px',
+ textAlign: 'left',
+ fontFamily: 'Roboto Mono, monospace',
+ },
+ section: {
+ marginBottom: '20px',
+ padding: '20px',
+ backgroundColor: '#FFFFFF',
+ border: '1px solid #E0E0E0',
+ },
+ form: {
+ display: 'flex',
+ flexDirection: 'column',
+ gap: '16px',
+ },
+ formRow: {
+ display: 'flex',
+ alignItems: 'center',
+ gap: '10px',
+ flexWrap: 'wrap',
+ },
+ label: {
+ fontWeight: 'bold',
+ marginBottom: '8px',
+ display: 'block',
+ fontFamily: 'Roboto Mono, monospace',
+ fontSize: '14px',
+ },
+ input: {
+ padding: '8px',
+ border: '1px solid #E0E0E0',
+ borderRadius: '4px',
+ width: '100%',
+ fontFamily: 'Roboto Mono, monospace',
+ fontSize: '14px',
+ },
+ button: {
+ backgroundColor: '#000000',
+ color: '#FFFFFF',
+ border: 'none',
+ padding: '8px 16px',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ fontFamily: 'Roboto Mono, monospace',
+ fontSize: '14px',
+ fontWeight: 'bold',
+ },
+ secondaryButton: {
+ backgroundColor: '#FFFFFF',
+ color: '#000000',
+ border: '1px solid #000000',
+ padding: '8px 16px',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ fontFamily: 'Roboto Mono, monospace',
+ fontSize: '14px',
+ },
+ disabledButton: {
+ backgroundColor: '#CCCCCC',
+ color: '#666666',
+ border: 'none',
+ padding: '8px 16px',
+ borderRadius: '4px',
+ cursor: 'not-allowed',
+ fontFamily: 'Roboto Mono, monospace',
+ fontSize: '14px',
+ },
+ transactionCard: {
+ marginBottom: '20px',
+ padding: '15px',
+ border: '1px solid #E0E0E0',
+ borderRadius: '4px',
+ backgroundColor: '#FFFFFF',
+ },
+ transactionHeader: {
+ display: 'flex',
+ justifyContent: 'space-between',
+ marginBottom: '10px',
+ padding: '5px 0',
+ borderBottom: '1px solid #E0E0E0',
+ },
+ txid: {
+ fontFamily: 'monospace',
+ cursor: 'pointer',
+ },
+ inputOutputContainer: {
+ display: 'flex',
+ gap: '20px',
+ },
+ column: {
+ flex: 1,
+ padding: '10px',
+ backgroundColor: '#F5F5F5',
+ borderRadius: '4px',
+ },
+ columnHeader: {
+ fontSize: '16px',
+ fontWeight: 'bold',
+ marginBottom: '10px',
+ textAlign: 'center',
+ },
+ item: {
+ display: 'flex',
+ justifyContent: 'space-between',
+ alignItems: 'center',
+ padding: '8px',
+ marginBottom: '5px',
+ backgroundColor: '#FFFFFF',
+ borderRadius: '4px',
+ border: '1px solid #E0E0E0',
+ },
+ itemAddress: {
+ display: 'flex',
+ alignItems: 'center',
+ gap: '5px',
+ },
+ address: {
+ fontFamily: 'monospace',
+ cursor: 'pointer',
+ },
+ itemValue: {
+ fontWeight: 'bold',
+ },
+ redCircle: {
+ color: '#F44336',
+ fontSize: '12px',
+ },
+ greenCircle: {
+ color: '#4CAF50',
+ fontSize: '12px',
+ },
+ totalRow: {
+ padding: '10px',
+ textAlign: 'right',
+ fontWeight: 'bold',
+ borderTop: '1px solid #E0E0E0',
+ marginTop: '10px',
+ },
+ feeRow: {
+ padding: '10px',
+ textAlign: 'right',
+ fontWeight: 'bold',
+ borderTop: '1px solid #E0E0E0',
+ marginTop: '10px',
+ },
+ pagination: {
+ display: 'flex',
+ justifyContent: 'center',
+ alignItems: 'center',
+ gap: '10px',
+ marginTop: '20px',
+ },
+ paginationButton: {
+ padding: '5px 10px',
+ backgroundColor: '#000000',
+ color: '#FFFFFF',
+ border: 'none',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ },
+ pageInfo: {
+ fontFamily: 'Roboto Mono, monospace',
+ },
+ opReturn: {
+ fontFamily: 'monospace',
+ backgroundColor: '#E0E0E0',
+ padding: '2px 5px',
+ borderRadius: '4px',
+ },
+ runestone: {
+ backgroundColor: '#9C27B0',
+ color: '#FFFFFF',
+ padding: '2px 5px',
+ borderRadius: '4px',
+ marginLeft: '5px',
+ fontSize: '12px',
+ },
+ detailsButton: {
+ backgroundColor: '#2196F3',
+ color: '#FFFFFF',
+ border: 'none',
+ padding: '5px 10px',
+ borderRadius: '4px',
+ cursor: 'pointer',
+ fontSize: '12px',
+ },
+ progressContainer: {
+ marginTop: '20px',
+ marginBottom: '20px',
+ textAlign: 'center',
+ },
+ progressBar: {
+ width: '100%',
+ height: '20px',
+ backgroundColor: '#E0E0E0',
+ borderRadius: '4px',
+ margin: '10px 0',
+ overflow: 'hidden',
+ },
+ progressBarFill: {
+ height: '100%',
+ backgroundColor: '#4CAF50',
+ borderRadius: '4px',
+ transition: 'width 0.3s ease',
+ },
+ transactionStats: {
+ marginBottom: '15px',
+ padding: '10px',
+ backgroundColor: '#F5F5F5',
+ borderRadius: '4px',
+ textAlign: 'center',
+ },
+ };
+
+ return (
+
+
+
Transaction Inputs & Outputs Explorer
+
+ Search for a Bitcoin address to view all its transactions with inputs and outputs.
+
+
+
+
+ {error && (
+
+ Error: {error}
+
+ )}
+
+
+ {address && (
+
+
Transactions for {address}
+
+ {loading ? (
+
Loading transactions...
+ ) : fetchingAllTransactions ? (
+
+
Fetching transactions... {fetchProgress}% complete
+
+
This may take a while for addresses with many transactions.
+
+ ) : processedTransactions.length > 0 ? (
+
+
+
Showing {getCurrentPageTransactions().length} of {processedTransactions.length} transactions (Total: {totalTransactions})
+
+
+ {getCurrentPageTransactions().map((tx, index) => (
+
+
+
+ Transaction:
+ copyToClipboard(tx.txid)}
+ title="Click to copy"
+ >
+ {tx.txid}
+
+
+
+ Date: {formatDate(tx.status?.block_time)}
+
+
+
+
+
Inputs & Outputs
+
+
+
+
+ {/* Inputs Column */}
+
+
Inputs
+ {tx.inputs.map((input, i) => (
+
+
+ ●
+ {input.isCoinbase ? (
+ Coinbase (New Coins)
+ ) : (
+ copyToClipboard(input.address)}
+ title="Click to copy"
+ >
+ {shortenAddress(input.address)}
+
+ )}
+
+
+ {input.valueBTC.toFixed(8)} BTC
+
+
+ ))}
+
+ Total: {tx.totalInputBTC.toFixed(8)} BTC
+
+
+
+ {/* Outputs Column */}
+
+
Outputs
+ {tx.outputs.map((output, i) => (
+
+
+
●
+ {output.isOpReturn ? (
+
+ OP_RETURN
+ Runestone
+
+ ) : (
+
copyToClipboard(output.address)}
+ title="Click to copy"
+ >
+ {shortenAddress(output.address)}
+
+ )}
+
+
+ {output.valueBTC.toFixed(8)} BTC
+
+
+ ))}
+
+ Total: {tx.totalOutputBTC.toFixed(8)} BTC
+
+
+
+
+
+ Fee: {tx.feeBTC.toFixed(8)} BTC
+
+
+ ))}
+
+ {/* Pagination */}
+ {totalPages > 1 && (
+
+
+
+ Page {page} of {totalPages} ({processedTransactions.length} of {totalTransactions} transactions)
+
+
+
+ )}
+
+ ) : (
+
No transactions found for this address.
+ )}
+
+ )}
+
+ );
+};
+
+export default TransactionInputsOutputsExplorer;
\ No newline at end of file
diff --git a/src/routes.jsx b/src/routes.jsx
index 3e4506c..9e48584 100644
--- a/src/routes.jsx
+++ b/src/routes.jsx
@@ -9,6 +9,7 @@ import AlkanesBalanceExplorer from './pages/AlkanesBalanceExplorer';
import AlkanesTokensExplorer from './pages/AlkanesTokensExplorer';
import AlkanesTemplatesExplorer from './pages/AlkanesTemplatesExplorer';
import BitcoinAddressExplorer from './pages/BitcoinAddressExplorer';
+import TransactionInputsOutputsExplorer from './pages/TransactionInputsOutputsExplorer';
import TraceBlockStatusForm from './components/methods/TraceBlockStatusForm';
import SimulateForm from './components/methods/SimulateForm';
import TraceForm from './components/methods/TraceForm';
@@ -67,6 +68,10 @@ const router = createBrowserRouter([
path: 'explorer/address',
element:
},
+ {
+ path: 'explorer/transaction-io',
+ element:
+ },
// Not found route
{
path: '*',
diff --git a/src/sdk/esplora.js b/src/sdk/esplora.js
index c3f2ac1..b6d26aa 100644
--- a/src/sdk/esplora.js
+++ b/src/sdk/esplora.js
@@ -143,44 +143,111 @@ export const getTransactionOutspends = async (txid, endpoint = 'regtest') => {
};
/**
- * Gets transactions for a specific Bitcoin address with pagination support
+ * Gets address information including transaction count using the Esplora API
* @param {string} address - Bitcoin address to query
* @param {string} endpoint - API endpoint to use ('regtest', 'mainnet', 'oylnet')
- * @param {number} limit - Maximum number of transactions to return (default: 10)
- * @param {number} offset - Number of transactions to skip (default: 0)
- * @returns {Promise