diff --git a/ExplorerFrontend/app/tx/[query]/transaction-view.tsx b/ExplorerFrontend/app/tx/[query]/transaction-view.tsx
index c740db0..6d85b99 100644
--- a/ExplorerFrontend/app/tx/[query]/transaction-view.tsx
+++ b/ExplorerFrontend/app/tx/[query]/transaction-view.tsx
@@ -46,18 +46,24 @@ const formatTimestamp = (timestamp: number): string => {
return `${month} ${day}, ${year}, ${hours}:${minutes}:${seconds} UTC`;
};
+const isZeroAddress = (addr: string): boolean => {
+ if (!addr) return false;
+ const stripped = addr.replace(/^[Zz]/, '').replace(/^0x/, '');
+ return /^0*$/.test(stripped);
+};
+
const AddressDisplay = ({ address, isMobile }: { address: string, isMobile: boolean }): JSX.Element => {
const displayAddress = isMobile ? `${address.slice(0, 8)}...${address.slice(-6)}` : address;
-
+
return (
-
{displayAddress}
-
@@ -266,27 +272,46 @@ export default function TransactionView({ transaction }: TransactionViewProps):
diff --git a/ExplorerFrontend/package-lock.json b/ExplorerFrontend/package-lock.json
index 550dd38..8857562 100644
--- a/ExplorerFrontend/package-lock.json
+++ b/ExplorerFrontend/package-lock.json
@@ -29,14 +29,14 @@
"@visx/scale": "^3.2.0",
"@visx/shape": "^3.2.0",
"@visx/tooltip": "^3.12.0",
- "axios": "^1.13.2",
+ "axios": "^1.13.6",
"buffer": "^6.0.3",
"d3": "^7.4.4",
"encoding": "^0.1.13",
- "eslint-config-next": "^16.1.1",
+ "eslint-config-next": "^16.1.6",
"ethereumjs-util": "^7.1.5",
"json-formatter-js": "^2.3.4",
- "next": "^16.1.1",
+ "next": "^16.1.6",
"qrcode.react": "^4.2.0",
"react": "^19.2.3",
"react-dom": "^19.2.3",
@@ -87,12 +87,12 @@
}
},
"node_modules/@babel/code-frame": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
- "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz",
+ "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-validator-identifier": "^7.27.1",
+ "@babel/helper-validator-identifier": "^7.28.5",
"js-tokens": "^4.0.0",
"picocolors": "^1.1.1"
},
@@ -101,29 +101,29 @@
}
},
"node_modules/@babel/compat-data": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz",
- "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz",
+ "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/core": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz",
- "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
- "license": "MIT",
- "dependencies": {
- "@babel/code-frame": "^7.27.1",
- "@babel/generator": "^7.28.5",
- "@babel/helper-compilation-targets": "^7.27.2",
- "@babel/helper-module-transforms": "^7.28.3",
- "@babel/helpers": "^7.28.4",
- "@babel/parser": "^7.28.5",
- "@babel/template": "^7.27.2",
- "@babel/traverse": "^7.28.5",
- "@babel/types": "^7.28.5",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz",
+ "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/code-frame": "^7.29.0",
+ "@babel/generator": "^7.29.0",
+ "@babel/helper-compilation-targets": "^7.28.6",
+ "@babel/helper-module-transforms": "^7.28.6",
+ "@babel/helpers": "^7.28.6",
+ "@babel/parser": "^7.29.0",
+ "@babel/template": "^7.28.6",
+ "@babel/traverse": "^7.29.0",
+ "@babel/types": "^7.29.0",
"@jridgewell/remapping": "^2.3.5",
"convert-source-map": "^2.0.0",
"debug": "^4.1.0",
@@ -139,12 +139,6 @@
"url": "https://opencollective.com/babel"
}
},
- "node_modules/@babel/core/node_modules/convert-source-map": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
- "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
- "license": "MIT"
- },
"node_modules/@babel/core/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
@@ -155,13 +149,13 @@
}
},
"node_modules/@babel/generator": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz",
- "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==",
+ "version": "7.29.1",
+ "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz",
+ "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==",
"license": "MIT",
"dependencies": {
- "@babel/parser": "^7.28.5",
- "@babel/types": "^7.28.5",
+ "@babel/parser": "^7.29.0",
+ "@babel/types": "^7.29.0",
"@jridgewell/gen-mapping": "^0.3.12",
"@jridgewell/trace-mapping": "^0.3.28",
"jsesc": "^3.0.2"
@@ -171,12 +165,12 @@
}
},
"node_modules/@babel/helper-compilation-targets": {
- "version": "7.27.2",
- "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz",
- "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz",
+ "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==",
"license": "MIT",
"dependencies": {
- "@babel/compat-data": "^7.27.2",
+ "@babel/compat-data": "^7.28.6",
"@babel/helper-validator-option": "^7.27.1",
"browserslist": "^4.24.0",
"lru-cache": "^5.1.1",
@@ -205,27 +199,27 @@
}
},
"node_modules/@babel/helper-module-imports": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz",
- "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz",
+ "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==",
"license": "MIT",
"dependencies": {
- "@babel/traverse": "^7.27.1",
- "@babel/types": "^7.27.1"
+ "@babel/traverse": "^7.28.6",
+ "@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-module-transforms": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz",
- "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz",
+ "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==",
"license": "MIT",
"dependencies": {
- "@babel/helper-module-imports": "^7.27.1",
- "@babel/helper-validator-identifier": "^7.27.1",
- "@babel/traverse": "^7.28.3"
+ "@babel/helper-module-imports": "^7.28.6",
+ "@babel/helper-validator-identifier": "^7.28.5",
+ "@babel/traverse": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -262,25 +256,25 @@
}
},
"node_modules/@babel/helpers": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
- "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz",
+ "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==",
"license": "MIT",
"dependencies": {
- "@babel/template": "^7.27.2",
- "@babel/types": "^7.28.4"
+ "@babel/template": "^7.28.6",
+ "@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz",
- "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz",
+ "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==",
"license": "MIT",
"dependencies": {
- "@babel/types": "^7.28.5"
+ "@babel/types": "^7.29.0"
},
"bin": {
"parser": "bin/babel-parser.js"
@@ -299,31 +293,31 @@
}
},
"node_modules/@babel/template": {
- "version": "7.27.2",
- "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
- "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz",
+ "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==",
"license": "MIT",
"dependencies": {
- "@babel/code-frame": "^7.27.1",
- "@babel/parser": "^7.27.2",
- "@babel/types": "^7.27.1"
+ "@babel/code-frame": "^7.28.6",
+ "@babel/parser": "^7.28.6",
+ "@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/traverse": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz",
- "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz",
+ "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==",
"license": "MIT",
"dependencies": {
- "@babel/code-frame": "^7.27.1",
- "@babel/generator": "^7.28.5",
+ "@babel/code-frame": "^7.29.0",
+ "@babel/generator": "^7.29.0",
"@babel/helper-globals": "^7.28.0",
- "@babel/parser": "^7.28.5",
- "@babel/template": "^7.27.2",
- "@babel/types": "^7.28.5",
+ "@babel/parser": "^7.29.0",
+ "@babel/template": "^7.28.6",
+ "@babel/types": "^7.29.0",
"debug": "^4.3.1"
},
"engines": {
@@ -331,9 +325,9 @@
}
},
"node_modules/@babel/types": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz",
- "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz",
+ "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==",
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.27.1",
@@ -379,9 +373,9 @@
}
},
"node_modules/@cypress/request": {
- "version": "3.0.9",
- "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.9.tgz",
- "integrity": "sha512-I3l7FdGRXluAS44/0NguwWlO83J18p0vlr2FYHrJkWdNYhgVoiYo61IXPqaOsL+vNxU1ZqMACzItGK3/KKDsdw==",
+ "version": "3.0.10",
+ "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.10.tgz",
+ "integrity": "sha512-hauBrOdvu08vOsagkZ/Aju5XuiZx6ldsLfByg1htFeldhex+PeMrYauANzFsMJeAA0+dyPLbDoX2OYuvVoLDkQ==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
@@ -398,7 +392,7 @@
"json-stringify-safe": "~5.0.1",
"mime-types": "~2.1.19",
"performance-now": "^2.1.0",
- "qs": "6.14.0",
+ "qs": "~6.14.1",
"safe-buffer": "^5.1.2",
"tough-cookie": "^5.0.0",
"tunnel-agent": "^0.6.0",
@@ -430,9 +424,9 @@
}
},
"node_modules/@emnapi/core": {
- "version": "1.8.0",
- "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.8.0.tgz",
- "integrity": "sha512-ryJnSmj4UhrGLZZPJ6PKVb4wNPAIkW6iyLy+0TRwazd3L1u0wzMe8RfqevAh2HbcSkoeLiSYnOVDOys4JSGYyg==",
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.8.1.tgz",
+ "integrity": "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==",
"license": "MIT",
"optional": true,
"dependencies": {
@@ -441,9 +435,9 @@
}
},
"node_modules/@emnapi/runtime": {
- "version": "1.8.0",
- "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.0.tgz",
- "integrity": "sha512-Z82FDl1ByxqPEPrAYYeTQVlx2FSHPe1qwX465c+96IRS3fTdSYRoJcRxg3g2fEG5I69z1dSEWQlNRRr0/677mg==",
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz",
+ "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==",
"license": "MIT",
"optional": true,
"dependencies": {
@@ -478,6 +472,18 @@
"eslint": "^6.0.0 || ^7.0.0 || >=8.0.0"
}
},
+ "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": {
+ "version": "3.4.3",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
+ "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@eslint-community/regexpp": {
"version": "4.12.2",
"resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz",
@@ -502,30 +508,6 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
- "node_modules/@eslint/config-array/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/@eslint/config-array/node_modules/minimatch": {
- "version": "3.1.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
- "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
"node_modules/@eslint/config-helpers": {
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz",
@@ -576,30 +558,6 @@
"url": "https://opencollective.com/eslint"
}
},
- "node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/minimatch": {
- "version": "3.1.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
- "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
"node_modules/@eslint/js": {
"version": "9.39.3",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.3.tgz",
@@ -1052,21 +1010,21 @@
}
},
"node_modules/@floating-ui/core": {
- "version": "1.7.3",
- "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz",
- "integrity": "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==",
+ "version": "1.7.4",
+ "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.4.tgz",
+ "integrity": "sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg==",
"license": "MIT",
"dependencies": {
"@floating-ui/utils": "^0.2.10"
}
},
"node_modules/@floating-ui/dom": {
- "version": "1.7.4",
- "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.4.tgz",
- "integrity": "sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==",
+ "version": "1.7.5",
+ "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.5.tgz",
+ "integrity": "sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg==",
"license": "MIT",
"dependencies": {
- "@floating-ui/core": "^1.7.3",
+ "@floating-ui/core": "^1.7.4",
"@floating-ui/utils": "^0.2.10"
}
},
@@ -1086,12 +1044,12 @@
}
},
"node_modules/@floating-ui/react-dom": {
- "version": "2.1.6",
- "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.6.tgz",
- "integrity": "sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==",
+ "version": "2.1.7",
+ "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.7.tgz",
+ "integrity": "sha512-0tLRojf/1Go2JgEVm+3Frg9A3IW8bJgKgdO0BN5RkF//ufuz2joZM63Npau2ff3J6lUVYgDSNzNkR+aH3IVfjg==",
"license": "MIT",
"dependencies": {
- "@floating-ui/dom": "^1.7.4"
+ "@floating-ui/dom": "^1.7.5"
},
"peerDependencies": {
"react": ">=16.8.0",
@@ -1186,9 +1144,9 @@
}
},
"node_modules/@img/colour": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.0.0.tgz",
- "integrity": "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.1.0.tgz",
+ "integrity": "sha512-Td76q7j57o/tLVdgS746cYARfSyxk8iEfRxewL9h4OMzYhbW4TAcppl0mT4eyqXddh6L/jwoM75mo7ixa/pCeQ==",
"license": "MIT",
"optional": true,
"engines": {
@@ -1753,24 +1711,24 @@
}
},
"node_modules/@next/env": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/env/-/env-16.1.1.tgz",
- "integrity": "sha512-3oxyM97Sr2PqiVyMyrZUtrtM3jqqFxOQJVuKclDsgj/L728iZt/GyslkN4NwarledZATCenbk4Offjk1hQmaAA==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/env/-/env-16.1.6.tgz",
+ "integrity": "sha512-N1ySLuZjnAtN3kFnwhAwPvZah8RJxKasD7x1f8shFqhncnWZn4JMfg37diLNuoHsLAlrDfM3g4mawVdtAG8XLQ==",
"license": "MIT"
},
"node_modules/@next/eslint-plugin-next": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-16.1.1.tgz",
- "integrity": "sha512-Ovb/6TuLKbE1UiPcg0p39Ke3puyTCIKN9hGbNItmpQsp+WX3qrjO3WaMVSi6JHr9X1NrmthqIguVHodMJbh/dw==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-16.1.6.tgz",
+ "integrity": "sha512-/Qq3PTagA6+nYVfryAtQ7/9FEr/6YVyvOtl6rZnGsbReGLf0jZU6gkpr1FuChAQpvV46a78p4cmHOVP8mbfSMQ==",
"license": "MIT",
"dependencies": {
"fast-glob": "3.3.1"
}
},
"node_modules/@next/swc-darwin-arm64": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-16.1.1.tgz",
- "integrity": "sha512-JS3m42ifsVSJjSTzh27nW+Igfha3NdBOFScr9C80hHGrWx55pTrVL23RJbqir7k7/15SKlrLHhh/MQzqBBYrQA==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-16.1.6.tgz",
+ "integrity": "sha512-wTzYulosJr/6nFnqGW7FrG3jfUUlEf8UjGA0/pyypJl42ExdVgC6xJgcXQ+V8QFn6niSG2Pb8+MIG1mZr2vczw==",
"cpu": [
"arm64"
],
@@ -1784,9 +1742,9 @@
}
},
"node_modules/@next/swc-darwin-x64": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-16.1.1.tgz",
- "integrity": "sha512-hbyKtrDGUkgkyQi1m1IyD3q4I/3m9ngr+V93z4oKHrPcmxwNL5iMWORvLSGAf2YujL+6HxgVvZuCYZfLfb4bGw==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-16.1.6.tgz",
+ "integrity": "sha512-BLFPYPDO+MNJsiDWbeVzqvYd4NyuRrEYVB5k2N3JfWncuHAy2IVwMAOlVQDFjj+krkWzhY2apvmekMkfQR0CUQ==",
"cpu": [
"x64"
],
@@ -1800,9 +1758,9 @@
}
},
"node_modules/@next/swc-linux-arm64-gnu": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-16.1.1.tgz",
- "integrity": "sha512-/fvHet+EYckFvRLQ0jPHJCUI5/B56+2DpI1xDSvi80r/3Ez+Eaa2Yq4tJcRTaB1kqj/HrYKn8Yplm9bNoMJpwQ==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-16.1.6.tgz",
+ "integrity": "sha512-OJYkCd5pj/QloBvoEcJ2XiMnlJkRv9idWA/j0ugSuA34gMT6f5b7vOiCQHVRpvStoZUknhl6/UxOXL4OwtdaBw==",
"cpu": [
"arm64"
],
@@ -1816,9 +1774,9 @@
}
},
"node_modules/@next/swc-linux-arm64-musl": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-16.1.1.tgz",
- "integrity": "sha512-MFHrgL4TXNQbBPzkKKur4Fb5ICEJa87HM7fczFs2+HWblM7mMLdco3dvyTI+QmLBU9xgns/EeeINSZD6Ar+oLg==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-16.1.6.tgz",
+ "integrity": "sha512-S4J2v+8tT3NIO9u2q+S0G5KdvNDjXfAv06OhfOzNDaBn5rw84DGXWndOEB7d5/x852A20sW1M56vhC/tRVbccQ==",
"cpu": [
"arm64"
],
@@ -1832,9 +1790,9 @@
}
},
"node_modules/@next/swc-linux-x64-gnu": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-16.1.1.tgz",
- "integrity": "sha512-20bYDfgOQAPUkkKBnyP9PTuHiJGM7HzNBbuqmD0jiFVZ0aOldz+VnJhbxzjcSabYsnNjMPsE0cyzEudpYxsrUQ==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-16.1.6.tgz",
+ "integrity": "sha512-2eEBDkFlMMNQnkTyPBhQOAyn2qMxyG2eE7GPH2WIDGEpEILcBPI/jdSv4t6xupSP+ot/jkfrCShLAa7+ZUPcJQ==",
"cpu": [
"x64"
],
@@ -1848,9 +1806,9 @@
}
},
"node_modules/@next/swc-linux-x64-musl": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-16.1.1.tgz",
- "integrity": "sha512-9pRbK3M4asAHQRkwaXwu601oPZHghuSC8IXNENgbBSyImHv/zY4K5udBusgdHkvJ/Tcr96jJwQYOll0qU8+fPA==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-16.1.6.tgz",
+ "integrity": "sha512-oicJwRlyOoZXVlxmIMaTq7f8pN9QNbdes0q2FXfRsPhfCi8n8JmOZJm5oo1pwDaFbnnD421rVU409M3evFbIqg==",
"cpu": [
"x64"
],
@@ -1864,9 +1822,9 @@
}
},
"node_modules/@next/swc-win32-arm64-msvc": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-16.1.1.tgz",
- "integrity": "sha512-bdfQkggaLgnmYrFkSQfsHfOhk/mCYmjnrbRCGgkMcoOBZ4n+TRRSLmT/CU5SATzlBJ9TpioUyBW/vWFXTqQRiA==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-16.1.6.tgz",
+ "integrity": "sha512-gQmm8izDTPgs+DCWH22kcDmuUp7NyiJgEl18bcr8irXA5N2m2O+JQIr6f3ct42GOs9c0h8QF3L5SzIxcYAAXXw==",
"cpu": [
"arm64"
],
@@ -1880,9 +1838,9 @@
}
},
"node_modules/@next/swc-win32-x64-msvc": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-16.1.1.tgz",
- "integrity": "sha512-Ncwbw2WJ57Al5OX0k4chM68DKhEPlrXBaSXDCi2kPi5f4d8b3ejr3RRJGfKBLrn2YJL5ezNS7w2TZLHSti8CMw==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-16.1.6.tgz",
+ "integrity": "sha512-NRfO39AIrzBnixKbjuo2YiYhB6o9d8v/ymU9m/Xk8cyVk+k7XylniXkHwjs4s70wedVffc6bQNbufk5v0xEm0A==",
"cpu": [
"x64"
],
@@ -1907,7 +1865,7 @@
"url": "https://paulmillr.com/funding/"
}
},
- "node_modules/@noble/curves/node_modules/@noble/hashes": {
+ "node_modules/@noble/hashes": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz",
"integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==",
@@ -1964,14 +1922,14 @@
}
},
"node_modules/@react-aria/focus": {
- "version": "3.21.3",
- "resolved": "https://registry.npmjs.org/@react-aria/focus/-/focus-3.21.3.tgz",
- "integrity": "sha512-FsquWvjSCwC2/sBk4b+OqJyONETUIXQ2vM0YdPAuC+QFQh2DT6TIBo6dOZVSezlhudDla69xFBd6JvCFq1AbUw==",
+ "version": "3.21.4",
+ "resolved": "https://registry.npmjs.org/@react-aria/focus/-/focus-3.21.4.tgz",
+ "integrity": "sha512-6gz+j9ip0/vFRTKJMl3R30MHopn4i19HqqLfSQfElxJD+r9hBnYG1Q6Wd/kl/WRR1+CALn2F+rn06jUnf5sT8Q==",
"license": "Apache-2.0",
"dependencies": {
- "@react-aria/interactions": "^3.26.0",
- "@react-aria/utils": "^3.32.0",
- "@react-types/shared": "^3.32.1",
+ "@react-aria/interactions": "^3.27.0",
+ "@react-aria/utils": "^3.33.0",
+ "@react-types/shared": "^3.33.0",
"@swc/helpers": "^0.5.0",
"clsx": "^2.0.0"
},
@@ -1981,15 +1939,15 @@
}
},
"node_modules/@react-aria/interactions": {
- "version": "3.26.0",
- "resolved": "https://registry.npmjs.org/@react-aria/interactions/-/interactions-3.26.0.tgz",
- "integrity": "sha512-AAEcHiltjfbmP1i9iaVw34Mb7kbkiHpYdqieWufldh4aplWgsF11YQZOfaCJW4QoR2ML4Zzoa9nfFwLXA52R7Q==",
+ "version": "3.27.0",
+ "resolved": "https://registry.npmjs.org/@react-aria/interactions/-/interactions-3.27.0.tgz",
+ "integrity": "sha512-D27pOy+0jIfHK60BB26AgqjjRFOYdvVSkwC31b2LicIzRCSPOSP06V4gMHuGmkhNTF4+YWDi1HHYjxIvMeiSlA==",
"license": "Apache-2.0",
"dependencies": {
"@react-aria/ssr": "^3.9.10",
- "@react-aria/utils": "^3.32.0",
+ "@react-aria/utils": "^3.33.0",
"@react-stately/flags": "^3.1.2",
- "@react-types/shared": "^3.32.1",
+ "@react-types/shared": "^3.33.0",
"@swc/helpers": "^0.5.0"
},
"peerDependencies": {
@@ -2013,15 +1971,15 @@
}
},
"node_modules/@react-aria/utils": {
- "version": "3.32.0",
- "resolved": "https://registry.npmjs.org/@react-aria/utils/-/utils-3.32.0.tgz",
- "integrity": "sha512-/7Rud06+HVBIlTwmwmJa2W8xVtgxgzm0+kLbuFooZRzKDON6hhozS1dOMR/YLMxyJOaYOTpImcP4vRR9gL1hEg==",
+ "version": "3.33.0",
+ "resolved": "https://registry.npmjs.org/@react-aria/utils/-/utils-3.33.0.tgz",
+ "integrity": "sha512-yvz7CMH8d2VjwbSa5nGXqjU031tYhD8ddax95VzJsHSPyqHDEGfxul8RkhGV6oO7bVqZxVs6xY66NIgae+FHjw==",
"license": "Apache-2.0",
"dependencies": {
"@react-aria/ssr": "^3.9.10",
"@react-stately/flags": "^3.1.2",
"@react-stately/utils": "^3.11.0",
- "@react-types/shared": "^3.32.1",
+ "@react-types/shared": "^3.33.0",
"@swc/helpers": "^0.5.0",
"clsx": "^2.0.0"
},
@@ -2052,9 +2010,9 @@
}
},
"node_modules/@react-types/shared": {
- "version": "3.32.1",
- "resolved": "https://registry.npmjs.org/@react-types/shared/-/shared-3.32.1.tgz",
- "integrity": "sha512-famxyD5emrGGpFuUlgOP6fVW2h/ZaF405G5KDi3zPHzyjAWys/8W6NAVJtNbkCkhedmvL0xOhvt8feGXyXaw5w==",
+ "version": "3.33.0",
+ "resolved": "https://registry.npmjs.org/@react-types/shared/-/shared-3.33.0.tgz",
+ "integrity": "sha512-xuUpP6MyuPmJtzNOqF5pzFUIHH2YogyOQfUQHag54PRmWB7AbjuGWBUv0l1UDmz6+AbzAYGmDVAzcRDOu2PFpw==",
"license": "Apache-2.0",
"peerDependencies": {
"react": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1"
@@ -2089,18 +2047,6 @@
"url": "https://paulmillr.com/funding/"
}
},
- "node_modules/@scure/bip32/node_modules/@noble/hashes": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz",
- "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==",
- "license": "MIT",
- "engines": {
- "node": ">= 16"
- },
- "funding": {
- "url": "https://paulmillr.com/funding/"
- }
- },
"node_modules/@scure/bip39": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/@scure/bip39/-/bip39-1.3.0.tgz",
@@ -2114,38 +2060,26 @@
"url": "https://paulmillr.com/funding/"
}
},
- "node_modules/@scure/bip39/node_modules/@noble/hashes": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz",
- "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==",
- "license": "MIT",
- "engines": {
- "node": ">= 16"
- },
- "funding": {
- "url": "https://paulmillr.com/funding/"
- }
- },
"node_modules/@sinclair/typebox": {
- "version": "0.27.8",
- "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz",
- "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==",
+ "version": "0.27.10",
+ "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.10.tgz",
+ "integrity": "sha512-MTBk/3jGLNB2tVxv6uLlFh1iu64iYOQ2PbdOSK3NW8JZsmlaOh2q6sdtKowBhfw8QFLmYNzTW4/oK4uATIi6ZA==",
"dev": true,
"license": "MIT"
},
"node_modules/@swc/helpers": {
- "version": "0.5.18",
- "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.18.tgz",
- "integrity": "sha512-TXTnIcNJQEKwThMMqBXsZ4VGAza6bvN4pa41Rkqoio6QBKMvo+5lexeTMScGCIxtzgQJzElcvIltani+adC5PQ==",
+ "version": "0.5.19",
+ "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.19.tgz",
+ "integrity": "sha512-QamiFeIK3txNjgUTNppE6MiG3p7TdninpZu0E0PbqVh1a9FNLT2FRhisaa4NcaX52XVhA5l7Pk58Ft7Sqi/2sA==",
"license": "Apache-2.0",
"dependencies": {
"tslib": "^2.8.0"
}
},
"node_modules/@tanstack/query-core": {
- "version": "5.90.16",
- "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.16.tgz",
- "integrity": "sha512-MvtWckSVufs/ja463/K4PyJeqT+HMlJWtw6PrCpywznd2NSgO3m4KwO9RqbFqGg6iDE8vVMFWMeQI4Io3eEYww==",
+ "version": "5.90.20",
+ "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.20.tgz",
+ "integrity": "sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==",
"license": "MIT",
"funding": {
"type": "github",
@@ -2153,12 +2087,12 @@
}
},
"node_modules/@tanstack/react-query": {
- "version": "5.90.16",
- "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.16.tgz",
- "integrity": "sha512-bpMGOmV4OPmif7TNMteU/Ehf/hoC0Kf98PDc0F4BZkFrEapRMEqI/V6YS0lyzwSV6PQpY1y4xxArUIfBW5LVxQ==",
+ "version": "5.90.21",
+ "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.21.tgz",
+ "integrity": "sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==",
"license": "MIT",
"dependencies": {
- "@tanstack/query-core": "5.90.16"
+ "@tanstack/query-core": "5.90.20"
},
"funding": {
"type": "github",
@@ -2190,12 +2124,12 @@
}
},
"node_modules/@tanstack/react-virtual": {
- "version": "3.13.16",
- "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.13.16.tgz",
- "integrity": "sha512-y4xLKvLu6UZWiGdNcgk3yYlzCznYIV0m8dSyUzr3eAC0dHLos5V74qhUHxutYddFGgGU8sWLkp6H5c2RCrsrXw==",
+ "version": "3.13.19",
+ "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.13.19.tgz",
+ "integrity": "sha512-KzwmU1IbE0IvCZSm6OXkS+kRdrgW2c2P3Ho3NC+zZXWK6oObv/L+lcV/2VuJ+snVESRlMJ+w/fg4WXI/JzoNGQ==",
"license": "MIT",
"dependencies": {
- "@tanstack/virtual-core": "3.13.16"
+ "@tanstack/virtual-core": "3.13.19"
},
"funding": {
"type": "github",
@@ -2221,9 +2155,9 @@
}
},
"node_modules/@tanstack/virtual-core": {
- "version": "3.13.16",
- "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.13.16.tgz",
- "integrity": "sha512-njazUC8mDkrxWmyZmn/3eXrDcP8Msb3chSr4q6a65RmwdSbMlMCdnOphv6/8mLO7O3Fuza5s4M4DclmvAO5w0w==",
+ "version": "3.13.19",
+ "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.13.19.tgz",
+ "integrity": "sha512-/BMP7kNhzKOd7wnDeB8NrIRNLwkf5AhCYCvtfZV2GXWbBieFm/el0n6LOAXlTi6ZwHICSNnQcIxRCWHrLzDY+g==",
"license": "MIT",
"funding": {
"type": "github",
@@ -2250,9 +2184,9 @@
}
},
"node_modules/@testing-library/react": {
- "version": "16.3.1",
- "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.1.tgz",
- "integrity": "sha512-gr4KtAWqIOQoucWYD/f6ki+j5chXfcPc74Col/6poTyqTmn7zRmodWahWRCp8tYd+GMqBonw6hstNzqjbs6gjw==",
+ "version": "16.3.2",
+ "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.2.tgz",
+ "integrity": "sha512-XU5/SytQM+ykqMnAnvB2umaJNIOsLF3PVv//1Ew4CTcpz0/BRyy/af40qqrt7SjKpDdT1saBMc42CUok5gaw+g==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.12.5"
@@ -2861,15 +2795,15 @@
"license": "MIT"
},
"node_modules/@types/lodash": {
- "version": "4.17.21",
- "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.21.tgz",
- "integrity": "sha512-FOvQ0YPD5NOfPgMzJihoT+Za5pdkDJWcbpuj1DjaKZIr/gxodQjY/uWEFlTNqW2ugXHUiL8lRQgw63dzKHZdeQ==",
+ "version": "4.17.24",
+ "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.24.tgz",
+ "integrity": "sha512-gIW7lQLZbue7lRSWEFql49QJJWThrTFFeIMJdp3eH4tKoxm1OvEPg02rm4wCCSHS0cL3/Fizimb35b7k8atwsQ==",
"license": "MIT"
},
"node_modules/@types/node": {
- "version": "20.19.27",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.27.tgz",
- "integrity": "sha512-N2clP5pJhB2YnZJ3PIHFk5RkygRX5WO/5f0WC08tp0wd+sv0rsJk3MqWn3CbNmT2J505a5336jaQj4ph1AdMug==",
+ "version": "20.19.35",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.35.tgz",
+ "integrity": "sha512-Uarfe6J91b9HAUXxjvSOdiO2UPOKLm07Q1oh0JHxoZ1y8HoqxDAu3gVrsrOHeiio0kSsoVBt4wFrKOm0dKxVPQ==",
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
@@ -2885,9 +2819,9 @@
}
},
"node_modules/@types/react": {
- "version": "19.2.7",
- "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz",
- "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==",
+ "version": "19.2.14",
+ "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz",
+ "integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==",
"license": "MIT",
"dependencies": {
"csstype": "^3.2.2"
@@ -2969,15 +2903,76 @@
"@types/node": "*"
}
},
+ "node_modules/@typescript-eslint/eslint-plugin": {
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz",
+ "integrity": "sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==",
+ "license": "MIT",
+ "dependencies": {
+ "@eslint-community/regexpp": "^4.12.2",
+ "@typescript-eslint/scope-manager": "8.56.1",
+ "@typescript-eslint/type-utils": "8.56.1",
+ "@typescript-eslint/utils": "8.56.1",
+ "@typescript-eslint/visitor-keys": "8.56.1",
+ "ignore": "^7.0.5",
+ "natural-compare": "^1.4.0",
+ "ts-api-utils": "^2.4.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "@typescript-eslint/parser": "^8.56.1",
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
+ "typescript": ">=4.8.4 <6.0.0"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": {
+ "version": "7.0.5",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz",
+ "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 4"
+ }
+ },
+ "node_modules/@typescript-eslint/parser": {
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.1.tgz",
+ "integrity": "sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==",
+ "license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/scope-manager": "8.56.1",
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/typescript-estree": "8.56.1",
+ "@typescript-eslint/visitor-keys": "8.56.1",
+ "debug": "^4.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
+ "typescript": ">=4.8.4 <6.0.0"
+ }
+ },
"node_modules/@typescript-eslint/project-service": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.51.0.tgz",
- "integrity": "sha512-Luv/GafO07Z7HpiI7qeEW5NW8HUtZI/fo/kE0YbtQEFpJRUuR0ajcWfCE5bnMvL7QQFrmT/odMe8QZww8X2nfQ==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.1.tgz",
+ "integrity": "sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==",
"license": "MIT",
"dependencies": {
- "@typescript-eslint/tsconfig-utils": "^8.51.0",
- "@typescript-eslint/types": "^8.51.0",
- "debug": "^4.3.4"
+ "@typescript-eslint/tsconfig-utils": "^8.56.1",
+ "@typescript-eslint/types": "^8.56.1",
+ "debug": "^4.4.3"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2990,11 +2985,15 @@
"typescript": ">=4.8.4 <6.0.0"
}
},
- "node_modules/@typescript-eslint/project-service/node_modules/@typescript-eslint/types": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.51.0.tgz",
- "integrity": "sha512-TizAvWYFM6sSscmEakjY3sPqGwxZRSywSsPEiuZF6d5GmGD9Gvlsv0f6N8FvAAA0CD06l3rIcWNbsN1e5F/9Ag==",
+ "node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.1.tgz",
+ "integrity": "sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==",
"license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/visitor-keys": "8.56.1"
+ },
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -3004,10 +3003,96 @@
}
},
"node_modules/@typescript-eslint/tsconfig-utils": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.51.0.tgz",
- "integrity": "sha512-Qi5bSy/vuHeWyir2C8u/uqGMIlIDu8fuiYWv48ZGlZ/k+PRPHtaAu7erpc7p5bzw2WNNSniuxoMSO4Ar6V9OXw==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.1.tgz",
+ "integrity": "sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==",
+ "license": "MIT",
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "typescript": ">=4.8.4 <6.0.0"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils": {
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.1.tgz",
+ "integrity": "sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==",
+ "license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/typescript-estree": "8.56.1",
+ "@typescript-eslint/utils": "8.56.1",
+ "debug": "^4.4.3",
+ "ts-api-utils": "^2.4.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
+ "typescript": ">=4.8.4 <6.0.0"
+ }
+ },
+ "node_modules/@typescript-eslint/types": {
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.1.tgz",
+ "integrity": "sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==",
+ "license": "MIT",
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/typescript-estree": {
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.1.tgz",
+ "integrity": "sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==",
+ "license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/project-service": "8.56.1",
+ "@typescript-eslint/tsconfig-utils": "8.56.1",
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/visitor-keys": "8.56.1",
+ "debug": "^4.4.3",
+ "minimatch": "^10.2.2",
+ "semver": "^7.7.3",
+ "tinyglobby": "^0.2.15",
+ "ts-api-utils": "^2.4.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "typescript": ">=4.8.4 <6.0.0"
+ }
+ },
+ "node_modules/@typescript-eslint/utils": {
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.1.tgz",
+ "integrity": "sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==",
"license": "MIT",
+ "dependencies": {
+ "@eslint-community/eslint-utils": "^4.9.1",
+ "@typescript-eslint/scope-manager": "8.56.1",
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/typescript-estree": "8.56.1"
+ },
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -3016,9 +3101,39 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
},
+ "node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.1.tgz",
+ "integrity": "sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==",
+ "license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/types": "8.56.1",
+ "eslint-visitor-keys": "^5.0.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz",
+ "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": "^20.19.0 || ^22.13.0 || >=24"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@unrs/resolver-binding-android-arm-eabi": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz",
@@ -3541,9 +3656,9 @@
}
},
"node_modules/acorn": {
- "version": "8.15.0",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
- "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz",
+ "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==",
"dev": true,
"license": "MIT",
"bin": {
@@ -3564,9 +3679,9 @@
}
},
"node_modules/acorn-walk": {
- "version": "8.3.4",
- "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
- "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
+ "version": "8.3.5",
+ "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.5.tgz",
+ "integrity": "sha512-HEHNfbars9v4pgpW6SO1KSPkfoS0xVOM/9UzkJltjlsHZmJasxg8aXkuZa7SMf8vKGIBhpUsPluQSqhJFCqebw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3633,19 +3748,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/ansi-escapes/node_modules/type-fest": {
- "version": "0.21.3",
- "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz",
- "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==",
- "dev": true,
- "license": "(MIT OR CC0-1.0)",
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@@ -3958,9 +4060,9 @@
}
},
"node_modules/autoprefixer": {
- "version": "10.4.23",
- "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.23.tgz",
- "integrity": "sha512-YYTXSFulfwytnjAPlw8QHncHJmlvFKtczb8InXaAx9Q0LbfDnfEYDE55omerIJKihhmU61Ft+cAOSzQVaBUmeA==",
+ "version": "10.4.27",
+ "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz",
+ "integrity": "sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==",
"dev": true,
"funding": [
{
@@ -3979,7 +4081,7 @@
"license": "MIT",
"dependencies": {
"browserslist": "^4.28.1",
- "caniuse-lite": "^1.0.30001760",
+ "caniuse-lite": "^1.0.30001774",
"fraction.js": "^5.3.4",
"picocolors": "^1.1.1",
"postcss-value-parser": "^4.2.0"
@@ -4027,22 +4129,22 @@
"license": "MIT"
},
"node_modules/axe-core": {
- "version": "4.11.0",
- "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.11.0.tgz",
- "integrity": "sha512-ilYanEU8vxxBexpJd8cWM4ElSQq4QctCLKih0TSfjIfCQTeyH/6zVrmIJfLPrKTKJRbiG+cfnZbQIjAlJmF1jQ==",
+ "version": "4.11.1",
+ "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.11.1.tgz",
+ "integrity": "sha512-BASOg+YwO2C+346x3LZOeoovTIoTrRqEsqMa6fmfAV0P+U9mFr9NsyOEpiYvFjbc64NMrSswhV50WdXzdb/Z5A==",
"license": "MPL-2.0",
"engines": {
"node": ">=4"
}
},
"node_modules/axios": {
- "version": "1.13.2",
- "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz",
- "integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==",
+ "version": "1.13.6",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.6.tgz",
+ "integrity": "sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==",
"license": "MIT",
"dependencies": {
- "follow-redirects": "^1.15.6",
- "form-data": "^4.0.4",
+ "follow-redirects": "^1.15.11",
+ "form-data": "^4.0.5",
"proxy-from-env": "^1.1.0"
}
},
@@ -4091,12 +4193,15 @@
"license": "MIT"
},
"node_modules/baseline-browser-mapping": {
- "version": "2.9.11",
- "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.11.tgz",
- "integrity": "sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==",
+ "version": "2.10.0",
+ "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz",
+ "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==",
"license": "Apache-2.0",
"bin": {
- "baseline-browser-mapping": "dist/cli.js"
+ "baseline-browser-mapping": "dist/cli.cjs"
+ },
+ "engines": {
+ "node": ">=6.0.0"
}
},
"node_modules/bcrypt-pbkdf": {
@@ -4143,18 +4248,19 @@
"license": "MIT"
},
"node_modules/bn.js": {
- "version": "5.2.2",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.2.tgz",
- "integrity": "sha512-v2YAxEmKaBLahNwE1mjp4WON6huMNeuDvagFZW+ASCuA/ku0bXR9hSMw0XpiqMoA3+rmnyck/tPRSFQkoC9Cuw==",
+ "version": "5.2.3",
+ "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.3.tgz",
+ "integrity": "sha512-EAcmnPkxpntVL+DS7bO1zhcZNvCkxqtkd0ZY53h06GNQ3DEkkGZ/gKgmDv6DdZQGj9BgfSPKtJJ7Dp1GPP8f7w==",
"license": "MIT"
},
"node_modules/brace-expansion": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
- "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
+ "version": "1.1.12",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"license": "MIT",
"dependencies": {
- "balanced-match": "^1.0.0"
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
}
},
"node_modules/braces": {
@@ -4479,9 +4585,9 @@
}
},
"node_modules/ci-info": {
- "version": "4.3.1",
- "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz",
- "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==",
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.4.0.tgz",
+ "integrity": "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg==",
"dev": true,
"funding": [
{
@@ -4650,6 +4756,12 @@
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
"license": "MIT"
},
+ "node_modules/convert-source-map": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
+ "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
+ "license": "MIT"
+ },
"node_modules/core-util-is": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
@@ -5444,9 +5556,9 @@
"license": "Apache-2.0"
},
"node_modules/diff": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
- "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
+ "version": "8.0.3",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.3.tgz",
+ "integrity": "sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
@@ -5470,6 +5582,18 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/doctrine": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
+ "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "esutils": "^2.0.2"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
"node_modules/dom-accessibility-api": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz",
@@ -5502,9 +5626,9 @@
}
},
"node_modules/electron-to-chromium": {
- "version": "1.5.267",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz",
- "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==",
+ "version": "1.5.302",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.302.tgz",
+ "integrity": "sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==",
"license": "ISC"
},
"node_modules/elliptic": {
@@ -5522,12 +5646,6 @@
"minimalistic-crypto-utils": "^1.0.1"
}
},
- "node_modules/elliptic/node_modules/bn.js": {
- "version": "4.12.2",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.2.tgz",
- "integrity": "sha512-n4DSx829VRTRByMRGdjQ9iqsN0Bh4OolPsFnaZBLcbi8iXcB+kJ9s7EnRt4wILZNV3kPLHkRVfOc/HvhC3ovDw==",
- "license": "MIT"
- },
"node_modules/emoji-regex": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
@@ -5819,12 +5937,12 @@
}
},
"node_modules/eslint-config-next": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-16.1.1.tgz",
- "integrity": "sha512-55nTpVWm3qeuxoQKLOjQVciKZJUphKrNM0fCcQHAIOGl6VFXgaqeMfv0aKJhs7QtcnlAPhNVqsqRfRjeKBPIUA==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-16.1.6.tgz",
+ "integrity": "sha512-vKq40io2B0XtkkNDYyleATwblNt8xuh3FWp8SpSz3pt7P01OkBFlKsJZ2mWt5WsCySlDQLckb1zMY9yE9Qy0LA==",
"license": "MIT",
"dependencies": {
- "@next/eslint-plugin-next": "16.1.1",
+ "@next/eslint-plugin-next": "16.1.6",
"eslint-import-resolver-node": "^0.3.6",
"eslint-import-resolver-typescript": "^3.5.2",
"eslint-plugin-import": "^2.32.0",
@@ -5969,16 +6087,6 @@
"eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9"
}
},
- "node_modules/eslint-plugin-import/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
"node_modules/eslint-plugin-import/node_modules/debug": {
"version": "3.2.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
@@ -5988,30 +6096,6 @@
"ms": "^2.1.1"
}
},
- "node_modules/eslint-plugin-import/node_modules/doctrine": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
- "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
- "license": "Apache-2.0",
- "dependencies": {
- "esutils": "^2.0.2"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/eslint-plugin-import/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
"node_modules/eslint-plugin-import/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
@@ -6050,28 +6134,6 @@
"eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9"
}
},
- "node_modules/eslint-plugin-jsx-a11y/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/eslint-plugin-jsx-a11y/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
"node_modules/eslint-plugin-react": {
"version": "7.37.5",
"resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.5.tgz",
@@ -6123,53 +6185,25 @@
"eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0"
}
},
- "node_modules/eslint-plugin-react/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/eslint-plugin-react/node_modules/doctrine": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
- "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
- "license": "Apache-2.0",
- "dependencies": {
- "esutils": "^2.0.2"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/eslint-plugin-react/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
"node_modules/eslint-plugin-react/node_modules/resolve": {
- "version": "2.0.0-next.5",
- "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz",
- "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==",
+ "version": "2.0.0-next.6",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.6.tgz",
+ "integrity": "sha512-3JmVl5hMGtJ3kMmB3zi3DL25KfkCEyy3Tw7Gmw7z5w8M9WlwoPFnIvwChzu1+cF3iaK3sp18hhPz8ANeimdJfA==",
"license": "MIT",
"dependencies": {
- "is-core-module": "^2.13.0",
+ "es-errors": "^1.3.0",
+ "is-core-module": "^2.16.1",
+ "node-exports-info": "^1.6.0",
+ "object-keys": "^1.1.1",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
},
"bin": {
"resolve": "bin/resolve"
},
+ "engines": {
+ "node": ">= 0.4"
+ },
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@@ -6201,29 +6235,6 @@
}
},
"node_modules/eslint-visitor-keys": {
- "version": "3.4.3",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
- "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
- "license": "Apache-2.0",
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/eslint/node_modules/eslint-visitor-keys": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
@@ -6236,19 +6247,6 @@
"url": "https://opencollective.com/eslint"
}
},
- "node_modules/eslint/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
"node_modules/espree": {
"version": "10.4.0",
"resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz",
@@ -6267,19 +6265,6 @@
"url": "https://opencollective.com/eslint"
}
},
- "node_modules/espree/node_modules/eslint-visitor-keys": {
- "version": "4.2.1",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
- "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
"node_modules/esquery": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz",
@@ -6336,18 +6321,6 @@
"@scure/bip39": "1.3.0"
}
},
- "node_modules/ethereum-cryptography/node_modules/@noble/hashes": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz",
- "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==",
- "license": "MIT",
- "engines": {
- "node": ">= 16"
- },
- "funding": {
- "url": "https://paulmillr.com/funding/"
- }
- },
"node_modules/ethereumjs-util": {
"version": "7.1.5",
"resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.5.tgz",
@@ -6903,9 +6876,9 @@
}
},
"node_modules/get-tsconfig": {
- "version": "4.13.0",
- "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.0.tgz",
- "integrity": "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==",
+ "version": "4.13.6",
+ "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.6.tgz",
+ "integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==",
"license": "MIT",
"dependencies": {
"resolve-pkg-maps": "^1.0.0"
@@ -8076,12 +8049,6 @@
"node": ">=10.0.0"
}
},
- "node_modules/keccak/node_modules/node-addon-api": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-2.0.2.tgz",
- "integrity": "sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA==",
- "license": "MIT"
- },
"node_modules/keccak/node_modules/readable-stream": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
@@ -8213,9 +8180,9 @@
}
},
"node_modules/lodash": {
- "version": "4.17.21",
- "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
- "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
+ "version": "4.17.23",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
+ "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
"license": "MIT"
},
"node_modules/lodash.merge": {
@@ -8436,6 +8403,18 @@
"integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==",
"license": "MIT"
},
+ "node_modules/minimatch": {
+ "version": "3.1.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
+ "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
"node_modules/minimist": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
@@ -8503,12 +8482,12 @@
"license": "MIT"
},
"node_modules/next": {
- "version": "16.1.1",
- "resolved": "https://registry.npmjs.org/next/-/next-16.1.1.tgz",
- "integrity": "sha512-QI+T7xrxt1pF6SQ/JYFz95ro/mg/1Znk5vBebsWwbpejj1T0A23hO7GYEaVac9QUOT2BIMiuzm0L99ooq7k0/w==",
+ "version": "16.1.6",
+ "resolved": "https://registry.npmjs.org/next/-/next-16.1.6.tgz",
+ "integrity": "sha512-hkyRkcu5x/41KoqnROkfTm2pZVbKxvbZRuNvKXLRXxs3VfyO0WhY50TQS40EuKO9SW3rBj/sF3WbVwDACeMZyw==",
"license": "MIT",
"dependencies": {
- "@next/env": "16.1.1",
+ "@next/env": "16.1.6",
"@swc/helpers": "0.5.15",
"baseline-browser-mapping": "^2.8.3",
"caniuse-lite": "^1.0.30001579",
@@ -8522,14 +8501,14 @@
"node": ">=20.9.0"
},
"optionalDependencies": {
- "@next/swc-darwin-arm64": "16.1.1",
- "@next/swc-darwin-x64": "16.1.1",
- "@next/swc-linux-arm64-gnu": "16.1.1",
- "@next/swc-linux-arm64-musl": "16.1.1",
- "@next/swc-linux-x64-gnu": "16.1.1",
- "@next/swc-linux-x64-musl": "16.1.1",
- "@next/swc-win32-arm64-msvc": "16.1.1",
- "@next/swc-win32-x64-msvc": "16.1.1",
+ "@next/swc-darwin-arm64": "16.1.6",
+ "@next/swc-darwin-x64": "16.1.6",
+ "@next/swc-linux-arm64-gnu": "16.1.6",
+ "@next/swc-linux-arm64-musl": "16.1.6",
+ "@next/swc-linux-x64-gnu": "16.1.6",
+ "@next/swc-linux-x64-musl": "16.1.6",
+ "@next/swc-win32-arm64-msvc": "16.1.6",
+ "@next/swc-win32-x64-msvc": "16.1.6",
"sharp": "^0.34.4"
},
"peerDependencies": {
@@ -8593,11 +8572,38 @@
}
},
"node_modules/node-addon-api": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
- "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==",
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-2.0.2.tgz",
+ "integrity": "sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA==",
"license": "MIT"
},
+ "node_modules/node-exports-info": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/node-exports-info/-/node-exports-info-1.6.0.tgz",
+ "integrity": "sha512-pyFS63ptit/P5WqUkt+UUfe+4oevH+bFeIiPPdfb0pFeYEu/1ELnJu5l+5EcTKYL5M7zaAa7S8ddywgXypqKCw==",
+ "license": "MIT",
+ "dependencies": {
+ "array.prototype.flatmap": "^1.3.3",
+ "es-errors": "^1.3.0",
+ "object.entries": "^1.1.9",
+ "semver": "^6.3.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/node-exports-info/node_modules/semver": {
+ "version": "6.3.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+ "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ }
+ },
"node_modules/node-fetch": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
@@ -8618,28 +8624,6 @@
}
}
},
- "node_modules/node-fetch/node_modules/tr46": {
- "version": "0.0.3",
- "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
- "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
- "license": "MIT"
- },
- "node_modules/node-fetch/node_modules/webidl-conversions": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
- "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
- "license": "BSD-2-Clause"
- },
- "node_modules/node-fetch/node_modules/whatwg-url": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
- "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
- "license": "MIT",
- "dependencies": {
- "tr46": "~0.0.3",
- "webidl-conversions": "^3.0.0"
- }
- },
"node_modules/node-gyp-build": {
"version": "4.8.4",
"resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz",
@@ -9252,13 +9236,6 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
- "node_modules/pretty-format/node_modules/react-is": {
- "version": "18.3.1",
- "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
- "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/process": {
"version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
@@ -9299,9 +9276,9 @@
"license": "MIT"
},
"node_modules/pump": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz",
- "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==",
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz",
+ "integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -9329,9 +9306,9 @@
}
},
"node_modules/qs": {
- "version": "6.14.0",
- "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
- "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
+ "version": "6.15.0",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.15.0.tgz",
+ "integrity": "sha512-mAZTtNCeetKMH+pSjrb76NAM8V9a05I9aBZOHztWy/UqcJdQYNsf59vrRKWnojAT9Y+GbIvoTBC++CPHqpDBhQ==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
@@ -9374,26 +9351,33 @@
}
},
"node_modules/react": {
- "version": "19.2.3",
- "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz",
- "integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==",
+ "version": "19.2.4",
+ "resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz",
+ "integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/react-dom": {
- "version": "19.2.3",
- "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz",
- "integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==",
+ "version": "19.2.4",
+ "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.4.tgz",
+ "integrity": "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==",
"license": "MIT",
"dependencies": {
"scheduler": "^0.27.0"
},
"peerDependencies": {
- "react": "^19.2.3"
+ "react": "^19.2.4"
}
},
+ "node_modules/react-is": {
+ "version": "18.3.1",
+ "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
+ "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/react-tabs": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/react-tabs/-/react-tabs-6.1.0.tgz",
@@ -9814,10 +9798,16 @@
"node": ">=18.0.0"
}
},
+ "node_modules/secp256k1/node_modules/node-addon-api": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
+ "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==",
+ "license": "MIT"
+ },
"node_modules/semver": {
- "version": "7.7.3",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
- "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
+ "version": "7.7.4",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz",
+ "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==",
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
@@ -10658,6 +10648,12 @@
"node": ">=16"
}
},
+ "node_modules/tr46": {
+ "version": "0.0.3",
+ "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
+ "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
+ "license": "MIT"
+ },
"node_modules/tree-kill": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz",
@@ -10668,6 +10664,18 @@
"tree-kill": "cli.js"
}
},
+ "node_modules/ts-api-utils": {
+ "version": "2.4.0",
+ "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
+ "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18.12"
+ },
+ "peerDependencies": {
+ "typescript": ">=4.8.4"
+ }
+ },
"node_modules/ts-interface-checker": {
"version": "0.1.13",
"resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz",
@@ -10789,6 +10797,19 @@
"node": ">= 0.8.0"
}
},
+ "node_modules/type-fest": {
+ "version": "0.21.3",
+ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz",
+ "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==",
+ "dev": true,
+ "license": "(MIT OR CC0-1.0)",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/typed-array-buffer": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz",
@@ -10878,149 +10899,15 @@
}
},
"node_modules/typescript-eslint": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.51.0.tgz",
- "integrity": "sha512-jh8ZuM5oEh2PSdyQG9YAEM1TCGuWenLSuSUhf/irbVUNW9O5FhbFVONviN2TgMTBnUmyHv7E56rYnfLZK6TkiA==",
- "license": "MIT",
- "dependencies": {
- "@typescript-eslint/eslint-plugin": "8.51.0",
- "@typescript-eslint/parser": "8.51.0",
- "@typescript-eslint/typescript-estree": "8.51.0",
- "@typescript-eslint/utils": "8.51.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <6.0.0"
- }
- },
- "node_modules/typescript-eslint/node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.51.0.tgz",
- "integrity": "sha512-XtssGWJvypyM2ytBnSnKtHYOGT+4ZwTnBVl36TA4nRO2f4PRNGz5/1OszHzcZCvcBMh+qb7I06uoCmLTRdR9og==",
- "license": "MIT",
- "dependencies": {
- "@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.51.0",
- "@typescript-eslint/type-utils": "8.51.0",
- "@typescript-eslint/utils": "8.51.0",
- "@typescript-eslint/visitor-keys": "8.51.0",
- "ignore": "^7.0.0",
- "natural-compare": "^1.4.0",
- "ts-api-utils": "^2.2.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "@typescript-eslint/parser": "^8.51.0",
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <6.0.0"
- }
- },
- "node_modules/typescript-eslint/node_modules/@typescript-eslint/parser": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.51.0.tgz",
- "integrity": "sha512-3xP4XzzDNQOIqBMWogftkwxhg5oMKApqY0BAflmLZiFYHqyhSOxv/cd/zPQLTcCXr4AkaKb25joocY0BD1WC6A==",
- "license": "MIT",
- "dependencies": {
- "@typescript-eslint/scope-manager": "8.51.0",
- "@typescript-eslint/types": "8.51.0",
- "@typescript-eslint/typescript-estree": "8.51.0",
- "@typescript-eslint/visitor-keys": "8.51.0",
- "debug": "^4.3.4"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <6.0.0"
- }
- },
- "node_modules/typescript-eslint/node_modules/@typescript-eslint/scope-manager": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.51.0.tgz",
- "integrity": "sha512-JhhJDVwsSx4hiOEQPeajGhCWgBMBwVkxC/Pet53EpBVs7zHHtayKefw1jtPaNRXpI9RA2uocdmpdfE7T+NrizA==",
- "license": "MIT",
- "dependencies": {
- "@typescript-eslint/types": "8.51.0",
- "@typescript-eslint/visitor-keys": "8.51.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/typescript-eslint/node_modules/@typescript-eslint/type-utils": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.51.0.tgz",
- "integrity": "sha512-0XVtYzxnobc9K0VU7wRWg1yiUrw4oQzexCG2V2IDxxCxhqBMSMbjB+6o91A+Uc0GWtgjCa3Y8bi7hwI0Tu4n5Q==",
- "license": "MIT",
- "dependencies": {
- "@typescript-eslint/types": "8.51.0",
- "@typescript-eslint/typescript-estree": "8.51.0",
- "@typescript-eslint/utils": "8.51.0",
- "debug": "^4.3.4",
- "ts-api-utils": "^2.2.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <6.0.0"
- }
- },
- "node_modules/typescript-eslint/node_modules/@typescript-eslint/types": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.51.0.tgz",
- "integrity": "sha512-TizAvWYFM6sSscmEakjY3sPqGwxZRSywSsPEiuZF6d5GmGD9Gvlsv0f6N8FvAAA0CD06l3rIcWNbsN1e5F/9Ag==",
- "license": "MIT",
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/typescript-eslint/node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.51.0.tgz",
- "integrity": "sha512-1qNjGqFRmlq0VW5iVlcyHBbCjPB7y6SxpBkrbhNWMy/65ZoncXCEPJxkRZL8McrseNH6lFhaxCIaX+vBuFnRng==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.1.tgz",
+ "integrity": "sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==",
"license": "MIT",
"dependencies": {
- "@typescript-eslint/project-service": "8.51.0",
- "@typescript-eslint/tsconfig-utils": "8.51.0",
- "@typescript-eslint/types": "8.51.0",
- "@typescript-eslint/visitor-keys": "8.51.0",
- "debug": "^4.3.4",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "tinyglobby": "^0.2.15",
- "ts-api-utils": "^2.2.0"
+ "@typescript-eslint/eslint-plugin": "8.56.1",
+ "@typescript-eslint/parser": "8.56.1",
+ "@typescript-eslint/typescript-estree": "8.56.1",
+ "@typescript-eslint/utils": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -11030,97 +10917,10 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
},
- "node_modules/typescript-eslint/node_modules/@typescript-eslint/utils": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.51.0.tgz",
- "integrity": "sha512-11rZYxSe0zabiKaCP2QAwRf/dnmgFgvTmeDTtZvUvXG3UuAdg/GU02NExmmIXzz3vLGgMdtrIosI84jITQOxUA==",
- "license": "MIT",
- "dependencies": {
- "@eslint-community/eslint-utils": "^4.7.0",
- "@typescript-eslint/scope-manager": "8.51.0",
- "@typescript-eslint/types": "8.51.0",
- "@typescript-eslint/typescript-estree": "8.51.0"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <6.0.0"
- }
- },
- "node_modules/typescript-eslint/node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.51.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.51.0.tgz",
- "integrity": "sha512-mM/JRQOzhVN1ykejrvwnBRV3+7yTKK8tVANVN3o1O0t0v7o+jqdVu9crPy5Y9dov15TJk/FTIgoUGHrTOVL3Zg==",
- "license": "MIT",
- "dependencies": {
- "@typescript-eslint/types": "8.51.0",
- "eslint-visitor-keys": "^4.2.1"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/typescript-eslint/node_modules/eslint-visitor-keys": {
- "version": "4.2.1",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
- "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
- "license": "Apache-2.0",
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/typescript-eslint/node_modules/ignore": {
- "version": "7.0.5",
- "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz",
- "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==",
- "license": "MIT",
- "engines": {
- "node": ">= 4"
- }
- },
- "node_modules/typescript-eslint/node_modules/minimatch": {
- "version": "9.0.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
- "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^2.0.1"
- },
- "engines": {
- "node": ">=16 || 14 >=14.17"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/typescript-eslint/node_modules/ts-api-utils": {
- "version": "2.4.0",
- "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
- "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==",
- "license": "MIT",
- "engines": {
- "node": ">=18.12"
- },
- "peerDependencies": {
- "typescript": ">=4.8.4"
- }
- },
"node_modules/unbox-primitive": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz",
@@ -11320,6 +11120,22 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/webidl-conversions": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
+ "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
+ "license": "BSD-2-Clause"
+ },
+ "node_modules/whatwg-url": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
+ "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
+ "license": "MIT",
+ "dependencies": {
+ "tr46": "~0.0.3",
+ "webidl-conversions": "^3.0.0"
+ }
+ },
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@@ -11407,9 +11223,9 @@
}
},
"node_modules/which-typed-array": {
- "version": "1.1.19",
- "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz",
- "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==",
+ "version": "1.1.20",
+ "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.20.tgz",
+ "integrity": "sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg==",
"license": "MIT",
"dependencies": {
"available-typed-arrays": "^1.0.7",
@@ -11463,9 +11279,9 @@
"license": "ISC"
},
"node_modules/ws": {
- "version": "8.18.3",
- "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
- "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
+ "version": "8.19.0",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
+ "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
diff --git a/ExplorerFrontend/package.json b/ExplorerFrontend/package.json
index bcee79e..b68baca 100644
--- a/ExplorerFrontend/package.json
+++ b/ExplorerFrontend/package.json
@@ -24,14 +24,14 @@
"@visx/scale": "^3.2.0",
"@visx/shape": "^3.2.0",
"@visx/tooltip": "^3.12.0",
- "axios": "^1.13.2",
+ "axios": "^1.13.6",
"buffer": "^6.0.3",
"d3": "^7.4.4",
"encoding": "^0.1.13",
- "eslint-config-next": "^16.1.1",
+ "eslint-config-next": "^16.1.6",
"ethereumjs-util": "^7.1.5",
"json-formatter-js": "^2.3.4",
- "next": "^16.1.1",
+ "next": "^16.1.6",
"qrcode.react": "^4.2.0",
"react": "^19.2.3",
"react-dom": "^19.2.3",
@@ -43,7 +43,26 @@
"start": "next start",
"lint": "eslint ."
},
-"browserslist": {
+ "overrides": {
+ "minimatch": {
+ ".": "^3.1.5",
+ "typescript-eslint": {
+ "@typescript-eslint/typescript-estree": {
+ "minimatch": "^9.0.9"
+ }
+ }
+ },
+ "qs": "^6.15.0",
+ "lodash": "^4.17.23",
+ "diff": "^8.0.3",
+ "bn.js": {
+ ".": "^5.2.3",
+ "elliptic": {
+ "bn.js": "^4.12.3"
+ }
+ }
+ },
+ "browserslist": {
"production": [
">0.2%",
"not dead",
diff --git a/Zond2mongoDB/configs/setup.go b/Zond2mongoDB/configs/setup.go
index 2f5a4a0..fd13847 100644
--- a/Zond2mongoDB/configs/setup.go
+++ b/Zond2mongoDB/configs/setup.go
@@ -353,6 +353,24 @@ func initializeCollections(db *mongo.Database) {
// Create and set up the rest of the collections
ensureCollection(db, "blocks", nil)
+
+ // Add index on blockNumberInt for efficient numeric range queries on blocks.
+ // This replaces the old pattern of doing hex string $gte/$lte which produced
+ // incorrect lexicographic ordering (e.g. "0x9" > "0x10").
+ blocksCollection := db.Collection("blocks")
+ _, err = blocksCollection.Indexes().CreateOne(
+ ctx,
+ mongo.IndexModel{
+ Keys: bson.D{{Key: "blockNumberInt", Value: -1}},
+ Options: options.Index().SetName("blockNumberInt_desc_idx"),
+ },
+ )
+ if err != nil {
+ Logger.Error("Failed to create blockNumberInt index for blocks collection", zap.Error(err))
+ } else {
+ Logger.Info("Blocks collection initialized with blockNumberInt index")
+ }
+
ensureCollection(db, "validators", nil)
ensureCollection(db, "contractCode", nil)
ensureCollection(db, "transactionByAddress", nil)
@@ -364,6 +382,26 @@ func initializeCollections(db *mongo.Database) {
ensureCollection(db, "totalCirculatingSupply", nil)
ensureCollection(db, "sync_state", nil)
+ // Create indexes on the validators collection for per-document lookup.
+ validatorsCollection := db.Collection("validators")
+ _, err = validatorsCollection.Indexes().CreateMany(ctx, []mongo.IndexModel{
+ {
+ Keys: bson.D{{Key: "publicKeyHex", Value: 1}},
+ Options: options.Index().SetName("validators_pubkey_idx"),
+ },
+ {
+ Keys: bson.D{{Key: "status", Value: 1}},
+ Options: options.Index().SetName("validators_status_idx"),
+ },
+ {
+ Keys: bson.D{{Key: "effectiveBalance", Value: -1}},
+ Options: options.Index().SetName("validators_balance_desc_idx"),
+ },
+ })
+ if err != nil {
+ Logger.Warn("Could not create validators collection indexes", zap.Error(err))
+ }
+
Logger.Info("All collections initialized successfully")
}
@@ -403,10 +441,10 @@ func GetTokenTransfersCollection() *mongo.Collection {
}
func GetListCollectionNames(client *mongo.Client) []string {
- result, err := client.Database("qrldata-z").ListCollectionNames(
- context.TODO(),
- bson.D{})
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+ result, err := client.Database("qrldata-z").ListCollectionNames(ctx, bson.D{})
if err != nil {
log.Fatal(err)
}
diff --git a/Zond2mongoDB/db/blocks.go b/Zond2mongoDB/db/blocks.go
index 6b49158..9a7aea4 100644
--- a/Zond2mongoDB/db/blocks.go
+++ b/Zond2mongoDB/db/blocks.go
@@ -5,6 +5,7 @@ import (
"Zond2mongoDB/models"
"Zond2mongoDB/utils"
"context"
+ "strconv"
"strings"
"time"
@@ -14,6 +15,20 @@ import (
"go.uber.org/zap"
)
+// HexToInt64 parses a hex block number string (e.g. "0x1a2b") to int64.
+// Returns 0 on any parse error.
+func HexToInt64(hex string) int64 {
+ s := strings.TrimPrefix(hex, "0x")
+ if s == "" {
+ return 0
+ }
+ n, err := strconv.ParseInt(s, 16, 64)
+ if err != nil {
+ return 0
+ }
+ return n
+}
+
// Collection name constants for consistency
const (
// SyncStateCollection is the collection for tracking sync state
@@ -185,11 +200,14 @@ func StoreLastKnownBlockNumber(blockNumber string) error {
err := syncColl.FindOne(ctx, bson.M{"_id": lastSyncedBlockID}).Decode(&existingDoc)
+ blockNumberIntVal := HexToInt64(blockNumber)
+
if err == mongo.ErrNoDocuments {
// Document doesn't exist, create it
_, err = syncColl.InsertOne(ctx, bson.M{
- "_id": lastSyncedBlockID,
- "block_number": blockNumber,
+ "_id": lastSyncedBlockID,
+ "block_number": blockNumber,
+ "block_number_int": blockNumberIntVal,
})
if err != nil {
@@ -217,15 +235,20 @@ func StoreLastKnownBlockNumber(blockNumber string) error {
}
}
- // Document exists or was just created by another goroutine
- // Only update if the new block number is higher
+ // Document exists or was just created by another goroutine.
+ // Use the integer field for the $lt guard so the comparison is numeric,
+ // not the lexicographic hex string comparison that would produce wrong
+ // ordering (e.g. "0x9" sorts after "0x10" lexicographically).
result, err := syncColl.UpdateOne(
ctx,
bson.M{
- "_id": lastSyncedBlockID,
- "block_number": bson.M{"$lt": blockNumber},
+ "_id": lastSyncedBlockID,
+ "block_number_int": bson.M{"$lt": blockNumberIntVal},
},
- bson.M{"$set": bson.M{"block_number": blockNumber}},
+ bson.M{"$set": bson.M{
+ "block_number": blockNumber,
+ "block_number_int": blockNumberIntVal,
+ }},
)
if err != nil {
@@ -370,11 +393,19 @@ func InsertBlockDocument(block models.ZondDatabaseBlock) {
return
}
- // Block doesn't exist, insert it
+ // Block doesn't exist, insert it. Use the wrapper struct so blockNumberInt
+ // is written alongside the hex number field for efficient range queries.
ctx, cancel := context.WithTimeout(context.Background(), dbTimeout)
defer cancel()
- result, err := configs.BlocksCollections.InsertOne(ctx, block)
+ doc := models.ZondDatabaseBlockWithInt{
+ Jsonrpc: block.Jsonrpc,
+ ID: block.ID,
+ Result: block.Result,
+ BlockNumberInt: HexToInt64(block.Result.Number),
+ }
+
+ result, err := configs.BlocksCollections.InsertOne(ctx, doc)
if err != nil {
configs.Logger.Warn("Failed to insert block",
zap.String("blockNumber", block.Result.Number),
@@ -386,8 +417,10 @@ func InsertBlockDocument(block models.ZondDatabaseBlock) {
}
}
-// InsertManyBlockDocuments inserts multiple block documents into the database
-// Filters out blocks that already exist before inserting
+// InsertManyBlockDocuments inserts multiple block documents into the database.
+// Instead of calling BlockExists() for each block individually (N round-trips),
+// it issues a single Find with $in to identify all already-stored block numbers
+// and then performs one InsertMany for the remainder.
func InsertManyBlockDocuments(blocks []interface{}) {
if len(blocks) == 0 {
return
@@ -396,55 +429,97 @@ func InsertManyBlockDocuments(blocks []interface{}) {
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
- // Create a slice for unique blocks
- var uniqueBlocks []interface{}
-
- // Track which block numbers we've already processed
- processedBlockNumbers := make(map[string]bool)
+ // --- Step 1: de-duplicate within the incoming batch ---
+ // Map block number → first occurrence to preserve insertion order.
+ seenInBatch := make(map[string]bool, len(blocks))
+ type entry struct {
+ number string
+ block models.ZondDatabaseBlock
+ }
+ var candidates []entry
- // For each block in the input
for _, blockInterface := range blocks {
- // Cast to the correct type
block, ok := blockInterface.(models.ZondDatabaseBlock)
if !ok {
configs.Logger.Warn("Failed to cast block to ZondDatabaseBlock, skipping")
continue
}
-
- blockNumber := block.Result.Number
-
- // Skip if we've already processed this block number in this batch
- if _, exists := processedBlockNumbers[blockNumber]; exists {
- configs.Logger.Info("Skipping duplicate block in batch",
- zap.String("blockNumber", blockNumber))
+ num := block.Result.Number
+ if seenInBatch[num] {
+ configs.Logger.Debug("Skipping duplicate block in batch",
+ zap.String("blockNumber", num))
continue
}
+ seenInBatch[num] = true
+ candidates = append(candidates, entry{number: num, block: block})
+ }
- // Check if this block exists in the database
- if BlockExists(blockNumber) {
- configs.Logger.Info("Block already exists in DB, skipping insertion",
- zap.String("blockNumber", blockNumber))
- continue
- }
+ if len(candidates) == 0 {
+ return
+ }
- // Block is unique, add it to our list
- uniqueBlocks = append(uniqueBlocks, blockInterface)
- processedBlockNumbers[blockNumber] = true
+ // --- Step 2: single $in query to find which block numbers already exist in DB ---
+ numbers := make([]string, len(candidates))
+ for i, c := range candidates {
+ numbers[i] = c.number
}
- // Only insert if we have unique blocks
- if len(uniqueBlocks) > 0 {
- configs.Logger.Info("Inserting unique blocks",
- zap.Int("originalCount", len(blocks)),
- zap.Int("uniqueCount", len(uniqueBlocks)))
+ findOpts := options.Find().SetProjection(bson.M{"result.number": 1})
+ cursor, err := configs.BlocksCollections.Find(
+ ctx,
+ bson.M{"result.number": bson.M{"$in": numbers}},
+ findOpts,
+ )
- _, err := configs.BlocksCollections.InsertMany(ctx, uniqueBlocks)
- if err != nil {
- configs.Logger.Warn("Failed to insert many block documents", zap.Error(err))
- }
+ existsInDB := make(map[string]bool)
+ if err != nil {
+ configs.Logger.Warn("Failed to query existing blocks, will attempt insertion anyway",
+ zap.Error(err))
} else {
+ defer cursor.Close(ctx)
+ var existing []struct {
+ Result struct {
+ Number string `bson:"number"`
+ } `bson:"result"`
+ }
+ if decodeErr := cursor.All(ctx, &existing); decodeErr != nil {
+ configs.Logger.Warn("Failed to decode existing block numbers", zap.Error(decodeErr))
+ }
+ for _, e := range existing {
+ existsInDB[e.Result.Number] = true
+ }
+ }
+
+ // --- Step 3: build the final insert list ---
+ var uniqueBlocks []interface{}
+ for _, c := range candidates {
+ if existsInDB[c.number] {
+ configs.Logger.Debug("Block already exists in DB, skipping insertion",
+ zap.String("blockNumber", c.number))
+ continue
+ }
+ doc := models.ZondDatabaseBlockWithInt{
+ Jsonrpc: c.block.Jsonrpc,
+ ID: c.block.ID,
+ Result: c.block.Result,
+ BlockNumberInt: HexToInt64(c.number),
+ }
+ uniqueBlocks = append(uniqueBlocks, doc)
+ }
+
+ if len(uniqueBlocks) == 0 {
configs.Logger.Info("No unique blocks to insert",
zap.Int("originalCount", len(blocks)))
+ return
+ }
+
+ configs.Logger.Info("Inserting unique blocks",
+ zap.Int("originalCount", len(blocks)),
+ zap.Int("uniqueCount", len(uniqueBlocks)))
+
+ _, err = configs.BlocksCollections.InsertMany(ctx, uniqueBlocks)
+ if err != nil {
+ configs.Logger.Warn("Failed to insert many block documents", zap.Error(err))
}
}
diff --git a/Zond2mongoDB/db/circulating.go b/Zond2mongoDB/db/circulating.go
index d5ee061..e43a2d1 100644
--- a/Zond2mongoDB/db/circulating.go
+++ b/Zond2mongoDB/db/circulating.go
@@ -7,7 +7,6 @@ import (
"time"
"go.mongodb.org/mongo-driver/bson/primitive"
- "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"go.uber.org/zap"
)
@@ -18,28 +17,12 @@ type Address struct {
}
func UpdateTotalBalance() {
- clientOptions := options.Client().ApplyURI("mongodb://localhost:27017")
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
- defer cancel() // Properly cancel the context
+ // Use the shared DB connection and collection references instead of opening
+ // a separate connection to localhost:27017.
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
- client, err := mongo.Connect(ctx, clientOptions)
- if err != nil {
- configs.Logger.Error("Failed to connect to MongoDB", zap.Error(err))
- return
- }
- defer func() {
- if err := client.Disconnect(ctx); err != nil {
- configs.Logger.Error("Failed to disconnect from MongoDB", zap.Error(err))
- }
- }()
-
- // Check the connection
- if err = client.Ping(ctx, nil); err != nil {
- configs.Logger.Error("Failed to ping MongoDB", zap.Error(err))
- return
- }
-
- destCollection := client.Database("qrldata-z").Collection("totalCirculatingSupply")
+ destCollection := configs.GetCollection(configs.DB, "totalCirculatingSupply")
// Get initial total balance
total := big.NewInt(0)
diff --git a/Zond2mongoDB/db/coinbase.go b/Zond2mongoDB/db/coinbase.go
index a54510e..3ce677a 100644
--- a/Zond2mongoDB/db/coinbase.go
+++ b/Zond2mongoDB/db/coinbase.go
@@ -2,8 +2,9 @@ package db
import (
"Zond2mongoDB/configs"
+ "Zond2mongoDB/validation"
"context"
- "strings"
+ "time"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
@@ -11,15 +12,18 @@ import (
)
func InsertManyCoinbase(doc []interface{}) {
- _, err := configs.CoinbaseCollections.InsertMany(context.TODO(), doc)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ _, err := configs.CoinbaseCollections.InsertMany(ctx, doc)
if err != nil {
configs.Logger.Warn("Failed to insertMany in the coinbase collection: ", zap.Error(err))
}
}
func InsertCoinbaseDocument(blockHash string, blockNumber uint64, from string, hash string, nonce uint64, transactionIndex uint64, blockproposerReward uint64, attestorReward uint64, feeReward uint64, txType uint8, chainId uint8, signature string, pk string) (*mongo.InsertOneResult, error) {
- // Normalize address to lowercase for consistent storage
- from = strings.ToLower(from)
+ // Normalize address to canonical Z-prefix form
+ from = validation.ConvertToZAddress(from)
doc := primitive.D{
{Key: "blockhash", Value: blockHash},
@@ -37,7 +41,10 @@ func InsertCoinbaseDocument(blockHash string, blockNumber uint64, from string, h
{Key: "pk", Value: pk},
}
- result, err := configs.CoinbaseCollections.InsertOne(context.TODO(), doc)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ result, err := configs.CoinbaseCollections.InsertOne(ctx, doc)
if err != nil {
configs.Logger.Warn("Failed to insert in the coinbase collection: ", zap.Error(err))
}
diff --git a/Zond2mongoDB/db/contracts.go b/Zond2mongoDB/db/contracts.go
index 09f9a00..0d5bc41 100644
--- a/Zond2mongoDB/db/contracts.go
+++ b/Zond2mongoDB/db/contracts.go
@@ -4,10 +4,10 @@ import (
"Zond2mongoDB/configs"
"Zond2mongoDB/models"
"Zond2mongoDB/rpc"
+ "Zond2mongoDB/validation"
"context"
"errors"
"fmt"
- "strings"
"time"
"go.mongodb.org/mongo-driver/bson"
@@ -21,9 +21,9 @@ func StoreContract(contract models.ContractInfo) error {
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)
defer cancel()
- // Normalize addresses to lowercase for consistent storage
- contract.Address = strings.ToLower(contract.Address)
- contract.CreatorAddress = strings.ToLower(contract.CreatorAddress)
+ // Normalize addresses to canonical Z-prefix form
+ contract.Address = validation.ConvertToZAddress(contract.Address)
+ contract.CreatorAddress = validation.ConvertToZAddress(contract.CreatorAddress)
collection := configs.GetContractsCollection()
filter := bson.M{"address": contract.Address}
@@ -114,8 +114,8 @@ func GetContract(address string) (*models.ContractInfo, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
- // Normalize address to lowercase for consistent lookup
- address = strings.ToLower(address)
+ // Normalize address to canonical Z-prefix form
+ address = validation.ConvertToZAddress(address)
var contract models.ContractInfo
err := configs.GetContractsCollection().FindOne(ctx, bson.M{"address": address}).Decode(&contract)
@@ -222,8 +222,8 @@ func processContracts(tx *models.Transaction) (string, string, string, bool) {
// IsAddressContract checks if an address is a contract by querying the contractCode collection
// and falling back to RPC getCode call if not found
func IsAddressContract(address string) bool {
- // Normalize address to lowercase for consistent lookup
- address = strings.ToLower(address)
+ // Normalize address to canonical Z-prefix form
+ address = validation.ConvertToZAddress(address)
// First check our database
contract := getContractFromDB(address)
@@ -316,7 +316,11 @@ func getContractFromDB(address string) *models.ContractInfo {
// If not found in main collection, check the contractCode collection
collection := configs.GetCollection(configs.DB, "contractCode")
var contract models.ContractInfo
- err = collection.FindOne(context.Background(), bson.M{"address": address}).Decode(&contract)
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ err = collection.FindOne(ctx, bson.M{"address": address}).Decode(&contract)
if err != nil {
return nil
}
diff --git a/Zond2mongoDB/db/tokenbalances.go b/Zond2mongoDB/db/tokenbalances.go
index 38762c9..303d58d 100644
--- a/Zond2mongoDB/db/tokenbalances.go
+++ b/Zond2mongoDB/db/tokenbalances.go
@@ -4,9 +4,9 @@ import (
"Zond2mongoDB/configs"
"Zond2mongoDB/models"
"Zond2mongoDB/rpc"
+ "Zond2mongoDB/validation"
"context"
"fmt"
- "strings"
"time"
"go.mongodb.org/mongo-driver/bson"
@@ -16,24 +16,25 @@ import (
// StoreTokenBalance updates the token balance for a given address
func StoreTokenBalance(contractAddress string, holderAddress string, amount string, blockNumber string) error {
- // Normalize contract address to lowercase to match contractCode collection
- contractAddress = strings.ToLower(contractAddress)
+ // Normalize addresses to canonical Z-prefix form
+ contractAddress = validation.ConvertToZAddress(contractAddress)
- configs.Logger.Info("Attempting to store token balance",
+ // Debug-level log for per-record operations; Info is reserved for batch summaries.
+ configs.Logger.Debug("Attempting to store token balance",
zap.String("contractAddress", contractAddress),
zap.String("holderAddress", holderAddress),
zap.String("transferAmount", amount),
zap.String("blockNumber", blockNumber))
- // Normalize holder address to lowercase for consistent storage
- holderAddress = strings.ToLower(holderAddress)
+ // Normalize holder address to canonical Z-prefix form
+ holderAddress = validation.ConvertToZAddress(holderAddress)
// Special handling for zero address (QRL uses Z prefix)
- if holderAddress == "z0" ||
- holderAddress == strings.ToLower(configs.QRLZeroAddress) ||
+ if holderAddress == "Z0" ||
+ holderAddress == configs.QRLZeroAddress ||
holderAddress == "0x0" ||
holderAddress == "0x0000000000000000000000000000000000000000" {
- configs.Logger.Info("Skipping token balance update for zero address",
+ configs.Logger.Debug("Skipping token balance update for zero address",
zap.String("holderAddress", holderAddress))
return nil
}
@@ -54,10 +55,10 @@ func StoreTokenBalance(contractAddress string, holderAddress string, amount stri
zap.Error(err))
// Continue with a zero balance if we can't get the actual balance
// This allows us to at least record that we tried to update this token balance
- configs.Logger.Info("Using default zero balance after RPC failure")
+ configs.Logger.Debug("Using default zero balance after RPC failure")
balance = "0"
} else {
- configs.Logger.Info("Retrieved current token balance",
+ configs.Logger.Debug("Retrieved current token balance",
zap.String("contractAddress", contractAddress),
zap.String("holderAddress", holderAddress),
zap.String("balance", balance))
@@ -83,9 +84,11 @@ func StoreTokenBalance(contractAddress string, holderAddress string, amount stri
"holderAddress": holderAddress,
}
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
// Perform upsert
- configs.Logger.Debug("Performing upsert operation for token balance")
- result, err := collection.UpdateOne(context.Background(), filter, update, opts)
+ result, err := collection.UpdateOne(ctx, filter, update, opts)
if err != nil {
configs.Logger.Error("Failed to update token balance in database",
zap.String("contractAddress", contractAddress),
@@ -94,7 +97,7 @@ func StoreTokenBalance(contractAddress string, holderAddress string, amount stri
return fmt.Errorf("failed to update token balance: %v", err)
}
- configs.Logger.Info("Token balance update completed",
+ configs.Logger.Debug("Token balance update completed",
zap.String("contractAddress", contractAddress),
zap.String("holderAddress", holderAddress),
zap.Int64("matchedCount", result.MatchedCount),
@@ -114,7 +117,10 @@ func GetTokenBalance(contractAddress string, holderAddress string) (*models.Toke
"holderAddress": holderAddress,
}
- err := collection.FindOne(context.Background(), filter).Decode(&balance)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ err := collection.FindOne(ctx, filter).Decode(&balance)
if err != nil {
return nil, err
}
@@ -128,13 +134,17 @@ func GetTokenHolders(contractAddress string) ([]models.TokenBalance, error) {
var balances []models.TokenBalance
filter := bson.M{"contractAddress": contractAddress}
- cursor, err := collection.Find(context.Background(), filter)
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ cursor, err := collection.Find(ctx, filter)
if err != nil {
return nil, err
}
- defer cursor.Close(context.Background())
+ defer cursor.Close(ctx)
- err = cursor.All(context.Background(), &balances)
+ err = cursor.All(ctx, &balances)
if err != nil {
return nil, err
}
diff --git a/Zond2mongoDB/db/tokentransfers.go b/Zond2mongoDB/db/tokentransfers.go
index 0ae0415..bb382b3 100644
--- a/Zond2mongoDB/db/tokentransfers.go
+++ b/Zond2mongoDB/db/tokentransfers.go
@@ -4,8 +4,10 @@ import (
"Zond2mongoDB/configs"
"Zond2mongoDB/models"
"Zond2mongoDB/rpc"
+ "Zond2mongoDB/validation"
"context"
"strings"
+ "time"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
@@ -17,21 +19,9 @@ import (
func StoreTokenTransfer(transfer models.TokenTransfer) error {
// Get explicit reference to the tokenTransfers collection
collection := configs.GetTokenTransfersCollection()
- ctx := context.Background()
- // Log the collection name
- configs.Logger.Info("Using collection for token transfers",
- zap.String("collection", "tokenTransfers"))
-
- // Note: indexes are created once during initialization in InitializeTokenTransfersCollection()
- // We don't create indexes here to avoid "IndexKeySpecsConflict" errors on high-frequency calls
-
- // Store the transfer
- configs.Logger.Info("Inserting token transfer document",
- zap.String("token", transfer.TokenSymbol),
- zap.String("from", transfer.From),
- zap.String("to", transfer.To),
- zap.String("txHash", transfer.TxHash))
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
// Additional validation and normalization before inserting
if transfer.From == "" {
@@ -42,10 +32,17 @@ func StoreTokenTransfer(transfer models.TokenTransfer) error {
transfer.To = configs.QRLZeroAddress // Normalize empty to address to zero address
}
- // Normalize addresses to lowercase for consistent storage
- transfer.From = strings.ToLower(transfer.From)
- transfer.To = strings.ToLower(transfer.To)
- transfer.ContractAddress = strings.ToLower(transfer.ContractAddress)
+ // Normalize addresses to canonical Z-prefix form
+ transfer.From = validation.ConvertToZAddress(transfer.From)
+ transfer.To = validation.ConvertToZAddress(transfer.To)
+ transfer.ContractAddress = validation.ConvertToZAddress(transfer.ContractAddress)
+
+ // Debug-level log for per-record operations; Info is reserved for batch summaries.
+ configs.Logger.Debug("Inserting token transfer document",
+ zap.String("token", transfer.TokenSymbol),
+ zap.String("from", transfer.From),
+ zap.String("to", transfer.To),
+ zap.String("txHash", transfer.TxHash))
_, err := collection.InsertOne(ctx, transfer)
if err != nil {
@@ -56,7 +53,7 @@ func StoreTokenTransfer(transfer models.TokenTransfer) error {
return err
}
- configs.Logger.Info("Successfully stored token transfer in database",
+ configs.Logger.Debug("Successfully stored token transfer in database",
zap.String("token", transfer.TokenSymbol),
zap.String("txHash", transfer.TxHash))
return nil
@@ -65,7 +62,9 @@ func StoreTokenTransfer(transfer models.TokenTransfer) error {
// GetTokenTransfersByContract retrieves all transfers for a specific token contract
func GetTokenTransfersByContract(contractAddress string, skip, limit int64) ([]models.TokenTransfer, error) {
collection := configs.GetCollection(configs.DB, "tokenTransfers")
- ctx := context.Background()
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
opts := options.Find().
SetSort(bson.D{{Key: "blockNumber", Value: -1}}).
@@ -92,7 +91,9 @@ func GetTokenTransfersByContract(contractAddress string, skip, limit int64) ([]m
// GetTokenTransfersByAddress retrieves all transfers involving a specific address (as sender or receiver)
func GetTokenTransfersByAddress(address string, skip, limit int64) ([]models.TokenTransfer, error) {
collection := configs.GetCollection(configs.DB, "tokenTransfers")
- ctx := context.Background()
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
opts := options.Find().
SetSort(bson.D{{Key: "blockNumber", Value: -1}}).
@@ -124,7 +125,9 @@ func GetTokenTransfersByAddress(address string, skip, limit int64) ([]models.Tok
// TokenTransferExists checks if a token transfer already exists in the database
func TokenTransferExists(txHash string, contractAddress string, from string, to string) (bool, error) {
collection := configs.GetCollection(configs.DB, "tokenTransfers")
- ctx := context.Background()
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
filter := bson.M{
"txHash": txHash,
@@ -196,9 +199,11 @@ func ProcessBlockTokenTransfers(blockNumber string, blockTimestamp string) error
continue
}
- // Extract from and to addresses (use Z prefix for QRL addresses)
- from := "Z" + rpc.TrimLeftZeros(log.Topics[1][26:])
- to := "Z" + rpc.TrimLeftZeros(log.Topics[2][26:])
+ // Extract from and to addresses using canonical Z-prefix form.
+ // topics[1] and topics[2] are 32-byte padded addresses; strip the 12-byte
+ // zero-padding (24 hex chars) to recover the 20-byte address.
+ from := "Z" + strings.ToLower(rpc.TrimLeftZeros(log.Topics[1][26:]))
+ to := "Z" + strings.ToLower(rpc.TrimLeftZeros(log.Topics[2][26:]))
configs.Logger.Debug("Token transfer details",
zap.String("from", from),
@@ -224,17 +229,17 @@ func ProcessBlockTokenTransfers(blockNumber string, blockTimestamp string) error
continue
}
- // Normalize addresses to ensure consistency
- if from == "" || from == "Z" {
+ // Normalize addresses to ensure consistency.
+ if from == "" || from == "z" || from == "Z" {
from = configs.QRLZeroAddress
}
- if to == "" || to == "Z" {
+ if to == "" || to == "z" || to == "Z" {
to = configs.QRLZeroAddress
}
- // Log token transfer identified
- configs.Logger.Info("Identified token transfer",
+ // Debug-level log for per-record operations
+ configs.Logger.Debug("Identified token transfer",
zap.String("token", contract.Symbol),
zap.String("from", from),
zap.String("to", to),
@@ -263,48 +268,26 @@ func ProcessBlockTokenTransfers(blockNumber string, blockTimestamp string) error
zap.String("txHash", log.TransactionHash),
zap.Error(err))
continue
- } else {
- tokenTransfersFound++
- configs.Logger.Info("Successfully stored token transfer",
- zap.String("txHash", log.TransactionHash),
- zap.String("token", contract.Symbol),
- zap.String("from", from),
- zap.String("to", to))
}
+ tokenTransfersFound++
// Update token balances
- configs.Logger.Info("Attempting to update token balances for transfer",
- zap.String("txHash", log.TransactionHash),
- zap.String("contractAddress", contractAddress),
- zap.String("from", from),
- zap.String("to", to),
- zap.String("amount", amount))
-
- err = StoreTokenBalance(contractAddress, from, amount, blockNumber)
- if err != nil {
+ if err = StoreTokenBalance(contractAddress, from, amount, blockNumber); err != nil {
configs.Logger.Error("Failed to update sender token balance",
zap.String("address", from),
zap.String("contractAddress", contractAddress),
zap.Error(err))
- } else {
- configs.Logger.Info("Successfully updated sender token balance",
- zap.String("address", from),
- zap.String("contractAddress", contractAddress))
}
- err = StoreTokenBalance(contractAddress, to, amount, blockNumber)
- if err != nil {
+ if err = StoreTokenBalance(contractAddress, to, amount, blockNumber); err != nil {
configs.Logger.Error("Failed to update recipient token balance",
zap.String("address", to),
zap.String("contractAddress", contractAddress),
zap.Error(err))
- } else {
- configs.Logger.Info("Successfully updated recipient token balance",
- zap.String("address", to),
- zap.String("contractAddress", contractAddress))
}
}
+ // Batch summary at Info level
configs.Logger.Info("Finished processing token transfers",
zap.String("blockNumber", blockNumber),
zap.Int("transfersProcessed", tokenTransfersFound))
@@ -312,14 +295,18 @@ func ProcessBlockTokenTransfers(blockNumber string, blockTimestamp string) error
return nil
}
-// InitializeTokenTransfersCollection ensures the token transfers collection is set up with proper indexes
+// InitializeTokenTransfersCollection ensures the token transfers collection is set up with proper indexes.
+// Uses CreateMany which is a no-op for indexes that already exist — safe to call on every restart.
func InitializeTokenTransfersCollection() error {
collection := configs.GetTokenTransfersCollection()
- ctx := context.Background()
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
configs.Logger.Info("Initializing tokenTransfers collection and indexes")
- // Create indexes for token transfers collection
+ // Create indexes for token transfers collection.
+ // CreateMany does not drop existing indexes and is idempotent.
indexes := []mongo.IndexModel{
{
Keys: bson.D{
@@ -348,15 +335,7 @@ func InitializeTokenTransfersCollection() error {
},
}
- // First drop any existing indexes to avoid conflicts
- _, err := collection.Indexes().DropAll(ctx)
- if err != nil {
- configs.Logger.Warn("Failed to drop existing indexes, attempting to continue",
- zap.Error(err))
- }
-
- // Create the new indexes
- _, err = collection.Indexes().CreateMany(ctx, indexes)
+ _, err := collection.Indexes().CreateMany(ctx, indexes)
if err != nil {
configs.Logger.Error("Failed to create indexes for token transfers",
zap.Error(err))
@@ -367,14 +346,18 @@ func InitializeTokenTransfersCollection() error {
return nil
}
-// InitializeTokenBalancesCollection ensures the token balances collection is set up with proper indexes
+// InitializeTokenBalancesCollection ensures the token balances collection is set up with proper indexes.
+// Uses CreateMany which is a no-op for indexes that already exist — safe to call on every restart.
func InitializeTokenBalancesCollection() error {
collection := configs.GetTokenBalancesCollection()
- ctx := context.Background()
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
configs.Logger.Info("Initializing tokenBalances collection and indexes")
- // Create indexes for token balances collection
+ // Create indexes for token balances collection.
+ // CreateMany does not drop existing indexes and is idempotent.
indexes := []mongo.IndexModel{
{
Keys: bson.D{
@@ -397,15 +380,7 @@ func InitializeTokenBalancesCollection() error {
},
}
- // First drop any existing indexes to avoid conflicts
- _, err := collection.Indexes().DropAll(ctx)
- if err != nil {
- configs.Logger.Warn("Failed to drop existing indexes for token balances, attempting to continue",
- zap.Error(err))
- }
-
- // Create the new indexes
- _, err = collection.Indexes().CreateMany(ctx, indexes)
+ _, err := collection.Indexes().CreateMany(ctx, indexes)
if err != nil {
configs.Logger.Error("Failed to create indexes for token balances",
zap.Error(err))
diff --git a/Zond2mongoDB/db/transactions.go b/Zond2mongoDB/db/transactions.go
index e8400f7..e6d7bc5 100644
--- a/Zond2mongoDB/db/transactions.go
+++ b/Zond2mongoDB/db/transactions.go
@@ -4,10 +4,10 @@ import (
"Zond2mongoDB/configs"
"Zond2mongoDB/models"
"Zond2mongoDB/rpc"
+ "Zond2mongoDB/validation"
"context"
"fmt"
"math/big"
- "strings"
"time"
"go.mongodb.org/mongo-driver/bson"
@@ -51,7 +51,8 @@ func QueuePotentialTokenContract(address string, tx *models.Transaction, blockTi
return
}
- ctx := context.Background()
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
// Create the document to insert
doc := bson.M{
@@ -84,47 +85,40 @@ func QueuePotentialTokenContract(address string, tx *models.Transaction, blockTi
}
// ProcessTokenTransfersFromTransactions processes token transfers for queued contracts
-// This should be called after transaction processing is complete
+// This should be called after transaction processing is complete.
+// Uses FindOneAndUpdate to atomically claim each work item, preventing duplicate
+// processing if multiple goroutines call this function concurrently.
func ProcessTokenTransfersFromTransactions() {
configs.Logger.Info("Processing of queued token contracts")
collection := configs.GetCollection(configs.DB, "pending_token_contracts")
- ctx := context.Background()
- // Find unprocessed contract addresses
- filter := bson.M{"processed": false}
-
- // Diagnostic logging - add timestamp to help identify potential race conditions
- queryStartTime := time.Now().UnixNano()
- configs.Logger.Debug("Querying for unprocessed contracts",
- zap.Int64("queryTimestamp", queryStartTime))
-
- count, err := collection.CountDocuments(ctx, filter)
+ // Count unprocessed items for logging
+ countCtx, countCancel := context.WithTimeout(context.Background(), 30*time.Second)
+ count, err := collection.CountDocuments(countCtx, bson.M{"processed": false})
+ countCancel()
if err != nil {
configs.Logger.Error("Failed to count pending token contracts", zap.Error(err))
return
}
configs.Logger.Info("Found pending token contracts to process", zap.Int64("count", count))
-
if count == 0 {
configs.Logger.Info("No pending token contracts to process")
return
}
- // Add find options to help reduce race conditions by sorting consistently
- findOptions := options.Find().SetSort(bson.D{{Key: "contractAddress", Value: 1}, {Key: "txHash", Value: 1}})
-
- cursor, err := collection.Find(ctx, filter, findOptions)
- if err != nil {
- configs.Logger.Error("Failed to query pending token contracts", zap.Error(err))
- return
- }
- defer cursor.Close(ctx)
-
- // Process each pending contract
+ // Process each item by atomically claiming it with FindOneAndUpdate.
+ // This prevents race conditions: only the goroutine that successfully flips
+ // processed=false→true will execute processTokenContract for that item.
processed := 0
- for cursor.Next(ctx) {
+ claimFilter := bson.M{"processed": false}
+ claimUpdate := bson.M{"$set": bson.M{"processed": true}}
+ findOneOpts := options.FindOneAndUpdate().
+ SetSort(bson.D{{Key: "contractAddress", Value: 1}, {Key: "txHash", Value: 1}}).
+ SetReturnDocument(options.Before)
+
+ for {
var pending struct {
ContractAddress string `bson:"contractAddress"`
TxHash string `bson:"txHash"`
@@ -132,51 +126,26 @@ func ProcessTokenTransfersFromTransactions() {
BlockTimestamp string `bson:"blockTimestamp"`
}
- if err := cursor.Decode(&pending); err != nil {
- configs.Logger.Error("Failed to decode pending token contract", zap.Error(err))
- continue
+ claimCtx, claimCancel := context.WithTimeout(context.Background(), 30*time.Second)
+ err := collection.FindOneAndUpdate(claimCtx, claimFilter, claimUpdate, findOneOpts).Decode(&pending)
+ claimCancel()
+
+ if err == mongo.ErrNoDocuments {
+ // No more unprocessed items
+ break
+ }
+ if err != nil {
+ configs.Logger.Error("Failed to claim pending token contract", zap.Error(err))
+ break
}
- // Process the token contract
- configs.Logger.Info("Processing token contract",
+ configs.Logger.Debug("Processing token contract",
zap.String("address", pending.ContractAddress),
zap.String("txHash", pending.TxHash),
- zap.String("blockNumber", pending.BlockNumber),
- zap.Int64("processingTimestamp", time.Now().UnixNano()),
- zap.Int64("queryTimestamp", queryStartTime))
+ zap.String("blockNumber", pending.BlockNumber))
processTokenContract(pending.ContractAddress, pending.TxHash, pending.BlockNumber, pending.BlockTimestamp)
processed++
-
- // Mark as processed
- updateFilter := bson.M{
- "contractAddress": pending.ContractAddress,
- "txHash": pending.TxHash,
- }
-
- // Use an additional filter to ensure we only update if it's still unprocessed
- // This helps detect race conditions
- updateFilter["processed"] = false
-
- result, err := collection.UpdateOne(ctx, updateFilter, bson.M{"$set": bson.M{"processed": true}})
-
- if err != nil {
- configs.Logger.Error("Failed to mark token contract as processed",
- zap.String("address", pending.ContractAddress),
- zap.String("txHash", pending.TxHash),
- zap.Error(err))
- } else if result.ModifiedCount == 0 {
- // This indicates a potential race condition - another process marked it as processed already
- configs.Logger.Warn("Race condition detected: contract was already marked as processed by another process",
- zap.String("address", pending.ContractAddress),
- zap.String("txHash", pending.TxHash),
- zap.Int64("processingTimestamp", time.Now().UnixNano()),
- zap.Int64("queryTimestamp", queryStartTime))
- } else {
- configs.Logger.Debug("Successfully marked contract as processed",
- zap.String("address", pending.ContractAddress),
- zap.String("txHash", pending.TxHash))
- }
}
configs.Logger.Info("Completed batch processing of token contracts", zap.Int("processed", processed))
@@ -359,9 +328,24 @@ func processTransactionData(tx *models.Transaction, blockTimestamp string, to st
}
}
- transactionType, callType, fromInternal, toInternal, inputInternal, outputInternal, InternalTracerAddress, valueInternal, gasInternal, gasUsedInternal, addressFunctionIdentifier, amountFunctionIdentifier := rpc.CallDebugTraceTransaction(tx.Hash)
- if transactionType == "CALL" || InternalTracerAddress != nil {
- InternalTransactionByAddressCollection(transactionType, callType, txHash, fromInternal, toInternal, fmt.Sprintf("0x%x", inputInternal), fmt.Sprintf("0x%x", outputInternal), InternalTracerAddress, float64(valueInternal), fmt.Sprintf("0x%x", gasInternal), fmt.Sprintf("0x%x", gasUsedInternal), addressFunctionIdentifier, fmt.Sprintf("0x%x", amountFunctionIdentifier), blockTimestamp)
+ trace := rpc.CallDebugTraceTransaction(tx.Hash)
+ if trace.TransactionType == "CALL" || trace.TraceAddress != nil {
+ InternalTransactionByAddressCollection(
+ trace.TransactionType,
+ trace.CallType,
+ txHash,
+ trace.From,
+ trace.To,
+ fmt.Sprintf("0x%x", trace.Input),
+ fmt.Sprintf("0x%x", trace.Output),
+ trace.TraceAddress,
+ float64(trace.Value),
+ fmt.Sprintf("0x%x", trace.Gas),
+ fmt.Sprintf("0x%x", trace.GasUsed),
+ trace.AddressFunctionIdentifier,
+ fmt.Sprintf("0x%x", trace.AmountFunctionIdentifier),
+ blockTimestamp,
+ )
}
// Calculate fees using hex strings
@@ -369,8 +353,8 @@ func processTransactionData(tx *models.Transaction, blockTimestamp string, to st
gasPriceBig.SetString(gasPrice[2:], 16)
gasUsedBig := new(big.Int)
- // If gasUsedInternal is 0, try to use gasUsed from the transaction receipt
- if gasUsedInternal == 0 {
+ // If trace.GasUsed is 0, try to use gasUsed from the transaction receipt
+ if trace.GasUsed == 0 {
// Get transaction receipt to obtain actual gas used
receipt, err := rpc.GetTransactionReceipt(txHash)
if err == nil && receipt != nil && receipt.Result.GasUsed != "" && len(receipt.Result.GasUsed) > 2 {
@@ -387,11 +371,11 @@ func processTransactionData(tx *models.Transaction, blockTimestamp string, to st
zap.String("txHash", txHash),
zap.String("gas", tx.Gas))
} else {
- gasUsedBig.SetString(fmt.Sprintf("%x", gasUsedInternal), 16)
+ gasUsedBig.SetString(fmt.Sprintf("%x", trace.GasUsed), 16)
}
}
} else {
- gasUsedBig.SetString(fmt.Sprintf("%x", gasUsedInternal), 16)
+ gasUsedBig.SetString(fmt.Sprintf("%x", trace.GasUsed), 16)
}
feesBig := new(big.Int).Mul(gasPriceBig, gasUsedBig)
@@ -414,10 +398,14 @@ func processTransactionData(tx *models.Transaction, blockTimestamp string, to st
}
func TransferCollection(blockNumber string, blockTimestamp string, from string, to string, hash string, pk string, signature string, nonce string, value float64, data string, contractAddress string, status string, size string, paidFees float64) (*mongo.InsertOneResult, error) {
- // Normalize addresses to lowercase for consistent storage
- from = strings.ToLower(from)
- to = strings.ToLower(to)
- contractAddress = strings.ToLower(contractAddress)
+ // Normalize addresses to canonical Z-prefix form
+ from = validation.ConvertToZAddress(from)
+ if to != "" {
+ to = validation.ConvertToZAddress(to)
+ }
+ if contractAddress != "" {
+ contractAddress = validation.ConvertToZAddress(contractAddress)
+ }
var doc bson.D
@@ -447,7 +435,10 @@ func TransferCollection(blockNumber string, blockTimestamp string, from string,
}
}
- result, err := configs.TransferCollections.InsertOne(context.TODO(), doc)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ result, err := configs.TransferCollections.InsertOne(ctx, doc)
if err != nil {
configs.Logger.Warn("Failed to insert in the transactionByAddress collection: ", zap.Error(err))
}
@@ -456,10 +447,16 @@ func TransferCollection(blockNumber string, blockTimestamp string, from string,
}
func InternalTransactionByAddressCollection(transactionType string, callType string, hash string, from string, to string, input string, output string, traceAddress []int, value float64, gas string, gasUsed string, addressFunctionIdentifier string, amountFunctionIdentifier string, blockTimestamp string) (*mongo.InsertOneResult, error) {
- // Normalize addresses to lowercase for consistent storage
- from = strings.ToLower(from)
- to = strings.ToLower(to)
- addressFunctionIdentifier = strings.ToLower(addressFunctionIdentifier)
+ // Normalize addresses to canonical Z-prefix form
+ if from != "" {
+ from = validation.ConvertToZAddress(from)
+ }
+ if to != "" {
+ to = validation.ConvertToZAddress(to)
+ }
+ if addressFunctionIdentifier != "" {
+ addressFunctionIdentifier = validation.ConvertToZAddress(addressFunctionIdentifier)
+ }
doc := bson.D{
{Key: "type", Value: transactionType},
@@ -478,7 +475,10 @@ func InternalTransactionByAddressCollection(transactionType string, callType str
{Key: "blockTimestamp", Value: blockTimestamp},
}
- result, err := configs.InternalTransactionByAddressCollections.InsertOne(context.TODO(), doc)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ result, err := configs.InternalTransactionByAddressCollections.InsertOne(ctx, doc)
if err != nil {
configs.Logger.Warn("Failed to insert in the internalTransactionByAddress collection:", zap.Error(err))
return nil, err
@@ -488,9 +488,11 @@ func InternalTransactionByAddressCollection(transactionType string, callType str
}
func TransactionByAddressCollection(timeStamp string, txType string, from string, to string, hash string, amount float64, paidFees float64, blockNumber string) (*mongo.InsertOneResult, error) {
- // Normalize addresses to lowercase for consistent storage
- from = strings.ToLower(from)
- to = strings.ToLower(to)
+ // Normalize addresses to canonical Z-prefix form
+ from = validation.ConvertToZAddress(from)
+ if to != "" {
+ to = validation.ConvertToZAddress(to)
+ }
doc := bson.D{
{Key: "txType", Value: txType},
@@ -503,7 +505,10 @@ func TransactionByAddressCollection(timeStamp string, txType string, from string
{Key: "blockNumber", Value: blockNumber},
}
- result, err := configs.TransactionByAddressCollections.InsertOne(context.TODO(), doc)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ result, err := configs.TransactionByAddressCollections.InsertOne(ctx, doc)
if err != nil {
configs.Logger.Warn("Failed to insert in the transactionByAddress collection: ", zap.Error(err))
}
@@ -512,11 +517,13 @@ func TransactionByAddressCollection(timeStamp string, txType string, from string
}
func UpsertTransactions(address string, value float64, isContract bool) (*mongo.UpdateResult, error) {
- // Normalize address to lowercase to ensure consistent storage
- // This matches the backend API's normalization in ReturnSingleAddress
- address = strings.ToLower(address)
+ // Normalize address to canonical Z-prefix form
+ address = validation.ConvertToZAddress(address)
filter := bson.D{{Key: "id", Value: address}}
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
// If this is flagged as a contract, update with that information
if isContract {
update := bson.D{
@@ -527,7 +534,7 @@ func UpsertTransactions(address string, value float64, isContract bool) (*mongo.
}},
}
opts := options.Update().SetUpsert(true)
- result, err := configs.AddressesCollections.UpdateOne(context.TODO(), filter, update, opts)
+ result, err := configs.AddressesCollections.UpdateOne(ctx, filter, update, opts)
if err != nil {
configs.Logger.Warn("Failed to update address collection: ", zap.Error(err))
}
@@ -540,7 +547,7 @@ func UpsertTransactions(address string, value float64, isContract bool) (*mongo.
IsContract bool `bson:"isContract"`
}
- err := configs.AddressesCollections.FindOne(context.TODO(), filter).Decode(&existingDoc)
+ err := configs.AddressesCollections.FindOne(ctx, filter).Decode(&existingDoc)
if err == nil && existingDoc.IsContract {
// It's already marked as a contract, so keep that information
update := bson.D{
@@ -551,7 +558,7 @@ func UpsertTransactions(address string, value float64, isContract bool) (*mongo.
}},
}
opts := options.Update().SetUpsert(true)
- result, err := configs.AddressesCollections.UpdateOne(context.TODO(), filter, update, opts)
+ result, err := configs.AddressesCollections.UpdateOne(ctx, filter, update, opts)
if err != nil {
configs.Logger.Warn("Failed to update address collection: ", zap.Error(err))
}
@@ -567,7 +574,7 @@ func UpsertTransactions(address string, value float64, isContract bool) (*mongo.
}},
}
opts := options.Update().SetUpsert(true)
- result, err := configs.AddressesCollections.UpdateOne(context.TODO(), filter, update, opts)
+ result, err := configs.AddressesCollections.UpdateOne(ctx, filter, update, opts)
if err != nil {
configs.Logger.Warn("Failed to update address collection: ", zap.Error(err))
}
@@ -577,21 +584,29 @@ func UpsertTransactions(address string, value float64, isContract bool) (*mongo.
func GetContractByAddress(address string) *models.ContractInfo {
collection := configs.GetCollection(configs.DB, "contractCode")
var contract models.ContractInfo
- err := collection.FindOne(context.Background(), bson.M{"address": address}).Decode(&contract)
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ err := collection.FindOne(ctx, bson.M{"address": address}).Decode(&contract)
if err != nil {
return nil
}
return &contract
}
-// InitializePendingTokenContractsCollection ensures the pending token contracts collection is set up with proper indexes
+// InitializePendingTokenContractsCollection ensures the pending token contracts collection is set up with proper indexes.
+// Uses CreateMany which is a no-op for indexes that already exist, avoiding destructive DropAll.
func InitializePendingTokenContractsCollection() error {
collection := configs.GetCollection(configs.DB, "pending_token_contracts")
- ctx := context.Background()
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
configs.Logger.Info("Initializing pending_token_contracts collection and indexes")
- // Create indexes for pending token contracts collection
+ // Create indexes for pending token contracts collection.
+ // CreateMany is a no-op if the index already exists, so this is safe to call on restart.
indexes := []mongo.IndexModel{
{
Keys: bson.D{
@@ -608,15 +623,7 @@ func InitializePendingTokenContractsCollection() error {
},
}
- // First drop any existing indexes to avoid conflicts
- _, err := collection.Indexes().DropAll(ctx)
- if err != nil {
- configs.Logger.Warn("Failed to drop existing indexes, attempting to continue",
- zap.Error(err))
- }
-
- // Create the new indexes
- _, err = collection.Indexes().CreateMany(ctx, indexes)
+ _, err := collection.Indexes().CreateMany(ctx, indexes)
if err != nil {
configs.Logger.Error("Failed to create indexes for pending token contracts",
zap.Error(err))
diff --git a/Zond2mongoDB/db/validators.go b/Zond2mongoDB/db/validators.go
index f84585a..65c0bd7 100644
--- a/Zond2mongoDB/db/validators.go
+++ b/Zond2mongoDB/db/validators.go
@@ -8,10 +8,12 @@ import (
"time"
"go.mongodb.org/mongo-driver/bson"
+ "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"go.uber.org/zap"
)
+// UpdateValidators updates the previousHash field on a block document.
func UpdateValidators(blockNumber string, previousHash string) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
@@ -25,99 +27,172 @@ func UpdateValidators(blockNumber string, previousHash string) {
}
}
+// InsertValidators stores each validator as its own document using BulkWrite upserts.
+// The document _id is the validator index string. This replaces the legacy single
+// mega-document approach and avoids MongoDB's 16 MB document size limit.
func InsertValidators(beaconResponse models.BeaconValidatorResponse, currentEpoch string) error {
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ if len(beaconResponse.ValidatorList) == 0 {
+ return nil
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
- // Convert beacon response to storage format
- storage := models.ValidatorStorage{
- ID: "validators", // Single document ID for easy updates
- Epoch: currentEpoch,
- UpdatedAt: fmt.Sprintf("%d", time.Now().Unix()),
- Validators: make([]models.ValidatorRecord, 0, len(beaconResponse.ValidatorList)),
- }
+ updatedAt := fmt.Sprintf("%d", time.Now().Unix())
- // Convert each validator
+ writeModels := make([]mongo.WriteModel, 0, len(beaconResponse.ValidatorList))
for _, v := range beaconResponse.ValidatorList {
- record := models.ValidatorRecord{
- Index: v.Index,
- PublicKeyHex: models.Base64ToHex(v.Validator.PublicKey),
- WithdrawalCredentialsHex: models.Base64ToHex(v.Validator.WithdrawalCredentials),
- EffectiveBalance: v.Validator.EffectiveBalance,
- Slashed: v.Validator.Slashed,
- ActivationEligibilityEpoch: v.Validator.ActivationEligibilityEpoch,
- ActivationEpoch: v.Validator.ActivationEpoch,
- ExitEpoch: v.Validator.ExitEpoch,
- WithdrawableEpoch: v.Validator.WithdrawableEpoch,
- SlotNumber: v.Index, // Using index as slot number
- IsLeader: true, // Set based on your leader selection logic
- }
- storage.Validators = append(storage.Validators, record)
+ doc := buildValidatorDocument(v, currentEpoch, updatedAt)
+ filter := bson.M{"_id": doc.ID}
+ update := bson.M{"$set": doc}
+ writeModels = append(writeModels, mongo.NewUpdateOneModel().
+ SetFilter(filter).
+ SetUpdate(update).
+ SetUpsert(true))
}
- // Upsert the document
- opts := options.Update().SetUpsert(true)
- filter := bson.M{"_id": "validators"}
- update := bson.M{"$set": storage}
-
- _, err := configs.ValidatorsCollections.UpdateOne(ctx, filter, update, opts)
+ opts := options.BulkWrite().SetOrdered(false)
+ result, err := configs.ValidatorsCollections.BulkWrite(ctx, writeModels, opts)
if err != nil {
- configs.Logger.Error("Failed to update validator document", zap.Error(err))
+ configs.Logger.Error("Failed to bulk-write validator documents", zap.Error(err))
return err
}
- configs.Logger.Info("Successfully updated validators",
- zap.Int("count", len(storage.Validators)),
+ configs.Logger.Info("Successfully upserted validators",
+ zap.Int64("upserted", result.UpsertedCount),
+ zap.Int64("modified", result.ModifiedCount),
zap.String("epoch", currentEpoch))
return nil
}
+// buildValidatorDocument converts a BeaconValidator into a ValidatorDocument.
+func buildValidatorDocument(v models.BeaconValidator, epoch, updatedAt string) models.ValidatorDocument {
+ slotNum := v.Index
+ isLeader := false
+ // Simplified leader selection: every 128th index slot is a leader.
+ var idx int64
+ fmt.Sscanf(v.Index, "%d", &idx)
+ isLeader = idx%128 == 0
+
+ return models.ValidatorDocument{
+ ID: v.Index,
+ PublicKeyHex: models.Base64ToHex(v.Validator.PublicKey),
+ WithdrawalCredentialsHex: models.Base64ToHex(v.Validator.WithdrawalCredentials),
+ EffectiveBalance: v.Validator.EffectiveBalance,
+ Slashed: v.Validator.Slashed,
+ ActivationEligibilityEpoch: v.Validator.ActivationEligibilityEpoch,
+ ActivationEpoch: v.Validator.ActivationEpoch,
+ ExitEpoch: v.Validator.ExitEpoch,
+ WithdrawableEpoch: v.Validator.WithdrawableEpoch,
+ SlotNumber: slotNum,
+ IsLeader: isLeader,
+ Epoch: epoch,
+ UpdatedAt: updatedAt,
+ }
+}
+
+// GetValidators retrieves all validator documents from the collection and assembles them
+// into the legacy ValidatorStorage shape so the rest of the syncer pipeline is unaffected.
func GetValidators() (*models.ValidatorStorage, error) {
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
- var storage models.ValidatorStorage
- err := configs.ValidatorsCollections.FindOne(ctx, bson.M{"_id": "validators"}).Decode(&storage)
+ cursor, err := configs.ValidatorsCollections.Find(ctx, bson.M{})
if err != nil {
- configs.Logger.Error("Failed to get validator document", zap.Error(err))
+ configs.Logger.Error("Failed to find validator documents", zap.Error(err))
return nil, err
}
+ defer cursor.Close(ctx)
- return &storage, nil
+ var docs []models.ValidatorDocument
+ if err := cursor.All(ctx, &docs); err != nil {
+ configs.Logger.Error("Failed to decode validator documents", zap.Error(err))
+ return nil, err
+ }
+
+ // Convert []ValidatorDocument → []ValidatorRecord for callers that still use ValidatorStorage.
+ records := make([]models.ValidatorRecord, 0, len(docs))
+ epoch := ""
+ updatedAt := ""
+ for _, d := range docs {
+ records = append(records, validatorDocToRecord(d))
+ if epoch == "" {
+ epoch = d.Epoch
+ updatedAt = d.UpdatedAt
+ }
+ }
+
+ return &models.ValidatorStorage{
+ ID: "validators",
+ Epoch: epoch,
+ UpdatedAt: updatedAt,
+ Validators: records,
+ }, nil
}
+// GetValidatorByPublicKey retrieves a single validator by its hex public key.
func GetValidatorByPublicKey(publicKeyHex string) (*models.ValidatorRecord, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
- var storage models.ValidatorStorage
- err := configs.ValidatorsCollections.FindOne(ctx, bson.M{
- "validators.publicKeyHex": publicKeyHex,
- }).Decode(&storage)
-
+ var doc models.ValidatorDocument
+ err := configs.ValidatorsCollections.FindOne(ctx, bson.M{"publicKeyHex": publicKeyHex}).Decode(&doc)
if err != nil {
+ if err == mongo.ErrNoDocuments {
+ return nil, fmt.Errorf("validator not found")
+ }
return nil, err
}
- // Find the matching validator
- for _, v := range storage.Validators {
- if v.PublicKeyHex == publicKeyHex {
- return &v, nil
+ record := validatorDocToRecord(doc)
+ return &record, nil
+}
+
+// GetValidatorByIndex retrieves a validator by its index string.
+func GetValidatorByIndex(index string) (*models.ValidatorRecord, error) {
+ ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ defer cancel()
+
+ var doc models.ValidatorDocument
+ err := configs.ValidatorsCollections.FindOne(ctx, bson.M{"_id": index}).Decode(&doc)
+ if err != nil {
+ if err == mongo.ErrNoDocuments {
+ return nil, fmt.Errorf("validator not found")
}
+ return nil, err
}
- return nil, fmt.Errorf("validator not found")
+ record := validatorDocToRecord(doc)
+ return &record, nil
}
+// validatorDocToRecord maps a ValidatorDocument to the legacy ValidatorRecord type.
+func validatorDocToRecord(d models.ValidatorDocument) models.ValidatorRecord {
+ return models.ValidatorRecord{
+ Index: d.ID,
+ PublicKeyHex: d.PublicKeyHex,
+ WithdrawalCredentialsHex: d.WithdrawalCredentialsHex,
+ EffectiveBalance: d.EffectiveBalance,
+ Slashed: d.Slashed,
+ ActivationEligibilityEpoch: d.ActivationEligibilityEpoch,
+ ActivationEpoch: d.ActivationEpoch,
+ ExitEpoch: d.ExitEpoch,
+ WithdrawableEpoch: d.WithdrawableEpoch,
+ SlotNumber: d.SlotNumber,
+ IsLeader: d.IsLeader,
+ }
+}
+
+// GetBlockNumberFromHash returns the block number for a given block hash.
func GetBlockNumberFromHash(hash string) string {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
filter := bson.M{"result.hash": hash}
- options := options.FindOne().SetProjection(bson.M{"result.number": 1})
+ findOpts := options.FindOne().SetProjection(bson.M{"result.number": 1})
var block models.ZondDatabaseBlock
- err := configs.BlocksCollections.FindOne(ctx, filter, options).Decode(&block)
+ err := configs.BlocksCollections.FindOne(ctx, filter, findOpts).Decode(&block)
if err != nil {
configs.Logger.Info("Failed to get block number from hash", zap.Error(err))
return "0x0"
@@ -126,7 +201,7 @@ func GetBlockNumberFromHash(hash string) string {
return block.Result.Number
}
-// UpsertEpochInfo stores or updates the current epoch information
+// UpsertEpochInfo stores or updates the current epoch information.
func UpsertEpochInfo(epochInfo *models.EpochInfo) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
@@ -150,7 +225,7 @@ func UpsertEpochInfo(epochInfo *models.EpochInfo) error {
return nil
}
-// GetEpochInfo retrieves the current epoch information
+// GetEpochInfo retrieves the current epoch information.
func GetEpochInfo() (*models.EpochInfo, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
@@ -165,12 +240,11 @@ func GetEpochInfo() (*models.EpochInfo, error) {
return &epochInfo, nil
}
-// InsertValidatorHistory inserts a validator history record for a specific epoch
+// InsertValidatorHistory inserts a validator history record for a specific epoch.
func InsertValidatorHistory(record *models.ValidatorHistoryRecord) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
- // Use epoch as unique identifier to prevent duplicate entries
opts := options.Update().SetUpsert(true)
filter := bson.M{"epoch": record.Epoch}
update := bson.M{"$set": record}
@@ -187,7 +261,7 @@ func InsertValidatorHistory(record *models.ValidatorHistoryRecord) error {
return nil
}
-// GetValidatorHistory retrieves historical validator data, optionally limited
+// GetValidatorHistory retrieves historical validator data, optionally limited.
func GetValidatorHistory(limit int) ([]models.ValidatorHistoryRecord, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
@@ -211,27 +285,3 @@ func GetValidatorHistory(limit int) ([]models.ValidatorHistoryRecord, error) {
return history, nil
}
-
-// GetValidatorByIndex retrieves a validator by their index
-func GetValidatorByIndex(index string) (*models.ValidatorRecord, error) {
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
- defer cancel()
-
- var storage models.ValidatorStorage
- err := configs.ValidatorsCollections.FindOne(ctx, bson.M{
- "validators.index": index,
- }).Decode(&storage)
-
- if err != nil {
- return nil, err
- }
-
- // Find the matching validator
- for _, v := range storage.Validators {
- if v.Index == index {
- return &v, nil
- }
- }
-
- return nil, fmt.Errorf("validator not found")
-}
diff --git a/Zond2mongoDB/main.go b/Zond2mongoDB/main.go
index d6470c8..21e149b 100644
--- a/Zond2mongoDB/main.go
+++ b/Zond2mongoDB/main.go
@@ -3,10 +3,12 @@ package main
import (
"Zond2mongoDB/configs"
"Zond2mongoDB/synchroniser"
+ "context"
"net/http"
"os"
"os/signal"
"syscall"
+ "time"
"go.uber.org/zap"
)
@@ -18,14 +20,45 @@ func main() {
configs.Logger.Info("Initializing QRL to MongoDB synchronizer...")
configs.Logger.Info("Connecting to MongoDB and RPC node...")
- // Create a buffered channel to avoid signal notification drops
- c := make(chan os.Signal, 1)
- signal.Notify(c, os.Interrupt, syscall.SIGTERM)
+ // stopCh is closed when a termination signal is received. Sync() and other
+ // long-running loops should watch this channel so they can finish their current
+ // unit of work and exit cleanly.
+ stopCh := make(chan struct{})
+
+ // doneCh is closed by the main sync goroutine once it has finished.
+ doneCh := make(chan struct{})
+
+ // Create a buffered channel to avoid signal notification drops.
+ sigCh := make(chan os.Signal, 1)
+ signal.Notify(sigCh, os.Interrupt, syscall.SIGTERM)
+
go func() {
- <-c
- configs.Logger.Info("Gracefully shutting down synchronizer...")
- configs.Logger.Info("Stopped syncing")
- os.Exit(1)
+ sig := <-sigCh
+ configs.Logger.Info("Received shutdown signal, initiating graceful shutdown...",
+ zap.String("signal", sig.String()))
+
+ // Signal all workers to stop accepting new work.
+ close(stopCh)
+
+ // Wait up to 30 seconds for in-flight processing to complete.
+ select {
+ case <-doneCh:
+ configs.Logger.Info("All sync work completed, shutting down cleanly")
+ case <-time.After(30 * time.Second):
+ configs.Logger.Warn("Graceful shutdown timed out after 30s, forcing exit")
+ }
+
+ // Disconnect MongoDB cleanly.
+ disconnectCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ defer cancel()
+ if err := configs.DB.Disconnect(disconnectCtx); err != nil {
+ configs.Logger.Error("Error disconnecting from MongoDB", zap.Error(err))
+ } else {
+ configs.Logger.Info("MongoDB disconnected cleanly")
+ }
+
+ configs.Logger.Info("Synchronizer stopped")
+ os.Exit(0)
}()
configs.Logger.Info("Starting blockchain synchronization process...")
@@ -40,7 +73,7 @@ func main() {
})
healthPort := os.Getenv("HEALTH_PORT")
if healthPort == "" {
- healthPort = "8081"
+ healthPort = "8083"
}
configs.Logger.Info("Starting health check server on port " + healthPort)
if err := http.ListenAndServe(":"+healthPort, nil); err != nil {
@@ -51,7 +84,21 @@ func main() {
// Start pending transaction sync (this is not started in sync.go)
configs.Logger.Info("Starting pending transaction sync service...")
synchroniser.StartPendingTransactionSync()
- // Sync will now handle starting wallet count and contract reprocessing
- // services after initial sync is complete
- synchroniser.Sync()
+
+ // Run the main sync in a goroutine so the signal handler above can observe doneCh.
+ go func() {
+ defer close(doneCh)
+ // Sync will now handle starting wallet count and contract reprocessing
+ // services after initial sync is complete
+ synchroniser.Sync()
+ }()
+
+ // Block until either sync finishes naturally or a shutdown signal arrives.
+ select {
+ case <-doneCh:
+ configs.Logger.Info("Sync completed, exiting normally")
+ case <-stopCh:
+ // Signal was received; the goroutine above will handle exit after doneCh closes.
+ <-doneCh
+ }
}
diff --git a/Zond2mongoDB/models/validators.go b/Zond2mongoDB/models/validators.go
index f144a7b..baf1af1 100644
--- a/Zond2mongoDB/models/validators.go
+++ b/Zond2mongoDB/models/validators.go
@@ -160,6 +160,24 @@ type ValidatorHistoryRecord struct {
TotalStaked string `bson:"totalStaked" json:"totalStaked"` // Sum of effective balances
}
+// ValidatorDocument is the per-validator MongoDB document.
+// Each validator is stored as its own document with _id == validator index.
+type ValidatorDocument struct {
+ ID string `bson:"_id" json:"_id"`
+ PublicKeyHex string `bson:"publicKeyHex" json:"publicKeyHex"`
+ WithdrawalCredentialsHex string `bson:"withdrawalCredentialsHex" json:"withdrawalCredentialsHex"`
+ EffectiveBalance string `bson:"effectiveBalance" json:"effectiveBalance"`
+ Slashed bool `bson:"slashed" json:"slashed"`
+ ActivationEligibilityEpoch string `bson:"activationEligibilityEpoch" json:"activationEligibilityEpoch"`
+ ActivationEpoch string `bson:"activationEpoch" json:"activationEpoch"`
+ ExitEpoch string `bson:"exitEpoch" json:"exitEpoch"`
+ WithdrawableEpoch string `bson:"withdrawableEpoch" json:"withdrawableEpoch"`
+ SlotNumber string `bson:"slotNumber" json:"slotNumber"`
+ IsLeader bool `bson:"isLeader" json:"isLeader"`
+ Epoch string `bson:"epoch" json:"epoch"`
+ UpdatedAt string `bson:"updatedAt" json:"updatedAt"`
+}
+
// GetValidatorStatus computes the validator status based on current epoch
func GetValidatorStatus(activationEpoch, exitEpoch string, slashed bool, currentEpoch int64) string {
activation, _ := strconv.ParseInt(activationEpoch, 10, 64)
diff --git a/Zond2mongoDB/models/zond.go b/Zond2mongoDB/models/zond.go
index 36e89fa..f41c44c 100644
--- a/Zond2mongoDB/models/zond.go
+++ b/Zond2mongoDB/models/zond.go
@@ -24,6 +24,16 @@ type ZondDatabaseBlock struct {
Result Result `json:"result"`
}
+// ZondDatabaseBlockWithInt is a wrapper used when inserting blocks into MongoDB.
+// It adds a blockNumberInt field so range queries can use proper numeric comparison
+// instead of lexicographic hex string comparison (which sorts "0x9" after "0x10").
+type ZondDatabaseBlockWithInt struct {
+ Jsonrpc string `json:"jsonrpc" bson:"jsonrpc"`
+ ID int `json:"id" bson:"id"`
+ Result Result `json:"result" bson:"result"`
+ BlockNumberInt int64 `bson:"blockNumberInt"`
+}
+
type Withdrawal struct {
Index string `json:"index"`
ValidatorIndex string `json:"validatorIndex"`
diff --git a/Zond2mongoDB/rpc/calls.go b/Zond2mongoDB/rpc/calls.go
index feaa723..ee1619d 100644
--- a/Zond2mongoDB/rpc/calls.go
+++ b/Zond2mongoDB/rpc/calls.go
@@ -10,7 +10,6 @@ import (
"encoding/json"
"fmt"
"io"
- "io/ioutil"
"math/big"
"net/http"
"os"
@@ -224,11 +223,51 @@ func GetContractAddress(txHash string) (string, string, error) {
return ContractAddress.Result.ContractAddress, ContractAddress.Result.Status, nil
}
-func CallDebugTraceTransaction(hash string) (transactionType string, callType string, from string, to string, input uint64, output uint64, traceAddress []int, value float32, gas uint64, gasUsed uint64, addressFunctionidentifier string, amountFunctionIdentifier uint64) {
+// DebugTraceResult holds all data returned by CallDebugTraceTransaction.
+// Using a struct avoids the fragility of 12 positional return values and
+// makes call-sites readable without relying on positional assignment.
+type DebugTraceResult struct {
+ // TransactionType is the EVM call type string (e.g. "CALL", "CREATE").
+ TransactionType string
+ // CallType is the sub-call type (e.g. "call", "delegatecall").
+ CallType string
+ // From is the caller address in Z-prefix format.
+ From string
+ // To is the callee address in Z-prefix format.
+ To string
+ // Input is always 0 in the current implementation (field reserved for future use).
+ Input uint64
+ // Output is a uint64 representation of the call output (1 = success/non-empty, 0 = empty/failure).
+ Output uint64
+ // TraceAddress is the call-tree position of this call frame.
+ TraceAddress []int
+ // Value is the ETH/QRL value transferred, scaled by QUANTA.
+ Value float32
+ // Gas is the gas supplied to the call frame.
+ Gas uint64
+ // GasUsed is the gas consumed by the call frame.
+ GasUsed uint64
+ // AddressFunctionIdentifier is the recipient address decoded from the transfer() input data.
+ AddressFunctionIdentifier string
+ // AmountFunctionIdentifier is the transfer amount decoded from the transfer() input data.
+ AmountFunctionIdentifier uint64
+ // Err is non-nil when the RPC call itself failed (network error, unmarshal error, etc.).
+ // A nil Err with empty TransactionType means the trace has no relevant call data.
+ Err error
+}
+
+// emptyTrace returns a zero-value DebugTraceResult, optionally carrying an error.
+func emptyTrace(err error) DebugTraceResult {
+ return DebugTraceResult{Err: err}
+}
+
+// CallDebugTraceTransaction calls debug_traceTransaction and returns the parsed
+// result as a DebugTraceResult struct.
+func CallDebugTraceTransaction(hash string) DebugTraceResult {
// Validate transaction hash
if err := validation.ValidateHexString(hash, validation.HashLength); err != nil {
zap.L().Error("Invalid transaction hash", zap.Error(err))
- return "", "", "", "", 0, 0, nil, 0, 0, 0, "", 0
+ return emptyTrace(err)
}
var tracerResponse models.TraceResponse
@@ -248,46 +287,42 @@ func CallDebugTraceTransaction(hash string) (transactionType string, callType st
b, err := json.Marshal(group)
if err != nil {
zap.L().Error("Failed JSON marshal", zap.Error(err))
- return "", "", "", "", 0, 0, nil, 0, 0, 0, "", 0
+ return emptyTrace(err)
}
req, err := http.NewRequest("POST", os.Getenv("NODE_URL"), bytes.NewBuffer([]byte(b)))
if err != nil {
zap.L().Error("Failed to create request", zap.Error(err))
- return "", "", "", "", 0, 0, nil, 0, 0, 0, "", 0
+ return emptyTrace(err)
}
req.Header.Set("Content-Type", "application/json")
resp, err := GetHTTPClient().Do(req)
if err != nil {
zap.L().Error("Failed to execute request", zap.Error(err))
- return "", "", "", "", 0, 0, nil, 0, 0, 0, "", 0
+ return emptyTrace(err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
zap.L().Error("Failed to read response body", zap.Error(err))
- return "", "", "", "", 0, 0, nil, 0, 0, 0, "", 0
+ return emptyTrace(err)
}
- err = json.Unmarshal([]byte(string(body)), &tracerResponse)
- if err != nil {
+ if err = json.Unmarshal(body, &tracerResponse); err != nil {
zap.L().Error("Failed to unmarshal response", zap.Error(err))
- return "", "", "", "", 0, 0, nil, 0, 0, 0, "", 0
+ return emptyTrace(err)
}
- // Initialize default values for gas and gasUsed
- gas = 0
- gasUsed = 0
- value = 0 // Initialize value to 0
+ var res DebugTraceResult
// Validate and parse gas values
if tracerResponse.Result.Gas != "" {
if !validation.IsValidHexString(tracerResponse.Result.Gas) {
zap.L().Error("Invalid gas format", zap.String("gas", tracerResponse.Result.Gas))
} else if parsed, err := strconv.ParseUint(tracerResponse.Result.Gas[2:], 16, 64); err == nil {
- gas = parsed
+ res.Gas = parsed
} else {
zap.L().Warn("Failed to parse gas value", zap.Error(err))
}
@@ -297,7 +332,7 @@ func CallDebugTraceTransaction(hash string) (transactionType string, callType st
if !validation.IsValidHexString(tracerResponse.Result.GasUsed) {
zap.L().Error("Invalid gasUsed format", zap.String("gasUsed", tracerResponse.Result.GasUsed))
} else if parsed, err := strconv.ParseUint(tracerResponse.Result.GasUsed[2:], 16, 64); err == nil {
- gasUsed = parsed
+ res.GasUsed = parsed
} else {
zap.L().Warn("Failed to parse gasUsed value", zap.Error(err))
}
@@ -308,20 +343,18 @@ func CallDebugTraceTransaction(hash string) (transactionType string, callType st
if !validation.IsValidHexString(tracerResponse.Result.Value) {
zap.L().Error("Invalid value format", zap.String("value", tracerResponse.Result.Value))
} else {
- // Convert hex value to big.Int
valueBigInt := new(big.Int)
valueBigInt.SetString(tracerResponse.Result.Value[2:], 16)
- // Convert to float32 (with proper scaling)
divisor := new(big.Float).SetFloat64(float64(configs.QUANTA))
bigIntAsFloat := new(big.Float).SetInt(valueBigInt)
resultBigFloat := new(big.Float).Quo(bigIntAsFloat, divisor)
valueFloat64, _ := resultBigFloat.Float64()
- value = float32(valueFloat64)
+ res.Value = float32(valueFloat64)
zap.L().Debug("Parsed transaction value",
zap.String("hex_value", tracerResponse.Result.Value),
- zap.Float32("parsed_value", value))
+ zap.Float32("parsed_value", res.Value))
}
}
@@ -334,61 +367,54 @@ func CallDebugTraceTransaction(hash string) (transactionType string, callType st
tracerResponse.Result.Type == "CALL"
if !hasValidCallData {
- return "", "", "", "", 0, 0, nil, 0, 0, 0, "", 0
+ return emptyTrace(nil)
}
// Validate addresses and convert to Z format
if tracerResponse.Result.From != "" {
if err := validation.ValidateAddress(tracerResponse.Result.From); err != nil {
zap.L().Error("Invalid from address", zap.Error(err))
- // Continue processing despite error
}
- from = validation.ConvertToZAddress(tracerResponse.Result.From)
+ res.From = validation.ConvertToZAddress(tracerResponse.Result.From)
}
if tracerResponse.Result.To != "" {
if err := validation.ValidateAddress(tracerResponse.Result.To); err != nil {
zap.L().Error("Invalid to address", zap.Error(err))
- // Continue processing despite error
}
- to = validation.ConvertToZAddress(tracerResponse.Result.To)
+ res.To = validation.ConvertToZAddress(tracerResponse.Result.To)
}
// Validate and process output
- output = 1
+ res.Output = 1
if tracerResponse.Result.Output != "" {
if !validation.IsValidHexString(tracerResponse.Result.Output) {
zap.L().Error("Invalid output format", zap.String("output", tracerResponse.Result.Output))
- output = 0
+ res.Output = 0
} else if tracerResponse.Result.Output != "0x" && len(tracerResponse.Result.Output) > 2 {
- // Remove "0x" prefix and leading zeros
hexStr := strings.TrimPrefix(tracerResponse.Result.Output, "0x")
hexStr = strings.TrimLeft(hexStr, "0")
- // If it's an address (40 characters), just store 1 to indicate success
- if len(tracerResponse.Result.Output) == 42 { // "0x" + 40 chars
- output = 1
+ if len(tracerResponse.Result.Output) == 42 { // "0x" + 40 chars — an address
+ res.Output = 1
} else if hexStr == "" {
- output = 0
+ res.Output = 0
} else {
- // Try to parse as uint64 if it's a small enough number
if parsed, err := strconv.ParseUint(hexStr, 16, 64); err == nil {
- output = parsed
+ res.Output = parsed
} else {
- // For larger numbers, just store 1 to indicate success
zap.L().Debug("Output value too large for uint64, storing 1",
zap.String("output", tracerResponse.Result.Output))
- output = 1
+ res.Output = 1
}
}
}
}
// Safely handle TraceAddress
- traceAddress = nil
if tracerResponse.Result.TraceAddress != nil {
- traceAddress = make([]int, len(tracerResponse.Result.TraceAddress))
- copy(traceAddress, tracerResponse.Result.TraceAddress)
+ res.TraceAddress = make([]int, len(tracerResponse.Result.TraceAddress))
+ copy(res.TraceAddress, tracerResponse.Result.TraceAddress)
}
// Process input data if it exists and has sufficient length
@@ -397,28 +423,21 @@ func CallDebugTraceTransaction(hash string) (transactionType string, callType st
const addressLength = 64
const minimumLength = prefixLength + methodIDLength + addressLength
- addressFunctionidentifier = ""
- amountFunctionIdentifier = 0
-
if len(tracerResponse.Result.Input) > minimumLength {
- // Validate input format
if !validation.IsValidHexString(tracerResponse.Result.Input) {
zap.L().Error("Invalid input format", zap.String("input", tracerResponse.Result.Input))
} else {
- // Strip the '0x' prefix and method ID
+ // Strip the '0x' prefix and method ID (first 4 bytes = 8 hex chars)
data := tracerResponse.Result.Input[10:]
- // Extract and validate address
if len(data) >= 64 {
extractedAddr := "0x" + data[24:64]
if err := validation.ValidateAddress(extractedAddr); err == nil {
- // Convert to Z format before returning
- addressFunctionidentifier = validation.ConvertToZAddress(extractedAddr)
+ res.AddressFunctionIdentifier = validation.ConvertToZAddress(extractedAddr)
} else {
zap.L().Error("Invalid extracted address", zap.Error(err))
}
- // Extract and validate amount
if len(data) >= 128 {
amountHex := data[64:128]
if !validation.IsValidHexString("0x" + amountHex) {
@@ -428,7 +447,7 @@ func CallDebugTraceTransaction(hash string) (transactionType string, callType st
return ok
}() {
if amountBigInt.IsUint64() {
- amountFunctionIdentifier = amountBigInt.Uint64()
+ res.AmountFunctionIdentifier = amountBigInt.Uint64()
} else {
zap.L().Warn("Amount exceeds uint64 range")
}
@@ -440,18 +459,12 @@ func CallDebugTraceTransaction(hash string) (transactionType string, callType st
}
}
- return tracerResponse.Result.Type,
- tracerResponse.Result.CallType,
- from,
- to,
- 0, // input is not used in the current implementation
- output,
- traceAddress,
- value, // Now using the parsed value instead of hardcoded 0
- gas,
- gasUsed,
- addressFunctionidentifier,
- amountFunctionIdentifier
+ res.TransactionType = tracerResponse.Result.Type
+ res.CallType = tracerResponse.Result.CallType
+ // Input field is reserved but not populated in the current implementation.
+ res.Input = 0
+
+ return res
}
func GetBalance(address string) (string, error) {
@@ -724,7 +737,7 @@ func ZondCall(contractAddress string) (*models.ZondResponse, error) {
}
defer resp.Body.Close()
- body, err := ioutil.ReadAll(resp.Body)
+ body, err := io.ReadAll(resp.Body)
if err != nil {
zap.L().Info("Failed to read response body", zap.Error(err))
return nil, err
diff --git a/Zond2mongoDB/rpc/tokenscalls.go b/Zond2mongoDB/rpc/tokenscalls.go
index 828a49e..eff65ea 100644
--- a/Zond2mongoDB/rpc/tokenscalls.go
+++ b/Zond2mongoDB/rpc/tokenscalls.go
@@ -46,8 +46,10 @@ func CallContractMethod(contractAddress string, methodSig string) (string, error
zap.String("contractAddress", contractAddress),
zap.String("methodSig", methodSig[:10]+"...")) // Log just the beginning of the signature for brevity
- // Ensure contract address has Z prefix for Zond blockchain
- if !strings.HasPrefix(contractAddress, "Z") {
+ // Ensure contract address has Z prefix for Zond RPC
+ if strings.HasPrefix(contractAddress, "0x") {
+ contractAddress = "Z" + contractAddress[2:]
+ } else if !strings.HasPrefix(contractAddress, "Z") {
contractAddress = "Z" + contractAddress
}
@@ -329,30 +331,25 @@ func GetTokenBalance(contractAddress string, holderAddress string) (string, erro
if holderAddress == "Z0" ||
holderAddress == "Z0000000000000000000000000000000000000000" ||
holderAddress == "0x0" ||
- holderAddress == "0x0000000000000000000000000000000000000000" ||
- strings.ToLower(holderAddress) == "z0000000000000000000000000000000000000000" ||
- strings.ToLower(holderAddress) == "0x0000000000000000000000000000000000000000" {
+ holderAddress == "0x0000000000000000000000000000000000000000" {
zap.L().Info("Zero address detected, returning zero balance",
zap.String("contractAddress", contractAddress),
zap.String("holderAddress", holderAddress))
return "0", nil
}
- // Ensure contract address has Z prefix for Zond blockchain
- if !strings.HasPrefix(contractAddress, "Z") {
- if strings.HasPrefix(contractAddress, "0x") {
- contractAddress = "Z" + strings.TrimPrefix(contractAddress, "0x")
- } else {
- contractAddress = "Z" + contractAddress
- }
+ // Ensure contract address has Z prefix for Zond RPC
+ if strings.HasPrefix(contractAddress, "0x") {
+ contractAddress = "Z" + contractAddress[2:]
+ } else if !strings.HasPrefix(contractAddress, "Z") {
+ contractAddress = "Z" + contractAddress
}
- // First, normalize the holder address
+ // Ensure holder address has Z prefix for RPC
originalHolderAddress := holderAddress // Keep original for logging
- // Convert 0x prefix to Z prefix if present
if strings.HasPrefix(holderAddress, "0x") {
- holderAddress = "Z" + strings.TrimPrefix(holderAddress, "0x")
+ holderAddress = "Z" + holderAddress[2:]
} else if !strings.HasPrefix(holderAddress, "Z") {
holderAddress = "Z" + holderAddress
}
@@ -437,8 +434,8 @@ func DecodeTransferEvent(data string) (string, string, string) {
return "", "", ""
}
- // Extract recipient address (remove leading zeros)
- recipient := "Z" + TrimLeftZeros(data[34:74])
+ // Extract recipient address (remove leading zeros), canonical Z-prefix form
+ recipient := "Z" + strings.ToLower(TrimLeftZeros(data[34:74]))
if len(recipient) != 41 { // Check if it's a valid address length (Z + 40 hex chars)
return "", "", ""
}
@@ -551,20 +548,12 @@ func IsValidRecipient(recipient string) bool {
return validation.IsValidAddress(recipient)
}
-// ParseTransferEvent parses a transfer event log
+// ParseTransferEvent parses a transfer event log.
+// Addresses are returned in canonical Z-prefix form.
func ParseTransferEvent(log models.Log) (string, string, *big.Int, error) {
- // Extract addresses from topics
- from := log.Topics[1]
- to := log.Topics[2]
-
- // Ensure addresses have proper format with Z prefix (not 0x)
- if !strings.HasPrefix(from, "Z") {
- from = "Z" + TrimLeftZeros(from)
- }
-
- if !strings.HasPrefix(to, "Z") {
- to = "Z" + TrimLeftZeros(to)
- }
+ // Extract addresses from topics (32-byte padded, strip leading zeros)
+ from := "Z" + strings.ToLower(TrimLeftZeros(log.Topics[1][26:]))
+ to := "Z" + strings.ToLower(TrimLeftZeros(log.Topics[2][26:]))
// Validate addresses
if !validation.IsValidAddress(from) {
@@ -626,8 +615,8 @@ func GetCustomTokenInfo(contractAddress string) (map[string]string, error) {
if err == nil && owner != "" && owner != "0x" && len(owner) >= 42 {
// Extract address - typically format is 0x + 32 bytes (64 chars) with address in last 20 bytes
if len(owner) >= 66 {
- // Extract the address from the last 40 characters (20 bytes)
- addressHex := owner[len(owner)-40:]
+ // Extract the address from the last 40 characters (20 bytes), canonical Z-prefix
+ addressHex := strings.ToLower(owner[len(owner)-40:])
result["tokenOwner"] = "Z" + addressHex
}
}
diff --git a/Zond2mongoDB/services/validator_service.go b/Zond2mongoDB/services/validator_service.go
index 0398aee..dee24b0 100644
--- a/Zond2mongoDB/services/validator_service.go
+++ b/Zond2mongoDB/services/validator_service.go
@@ -15,147 +15,233 @@ import (
"go.uber.org/zap"
)
-// StoreValidators stores validator data from the beacon chain response
+// StoreValidators stores validator data from the beacon chain response.
+// Each validator is written as its own MongoDB document keyed by its index.
func StoreValidators(beaconResponse models.BeaconValidatorResponse, currentEpoch string) error {
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
- defer cancel()
+ if err := bulkUpsertValidators(beaconResponse, currentEpoch); err != nil {
+ return err
+ }
- // Parse current epoch for status calculation
currentEpochInt, _ := strconv.ParseInt(currentEpoch, 10, 64)
+ if err := storeValidatorHistoryFromDB(currentEpoch, currentEpochInt); err != nil {
+ configs.Logger.Warn("Failed to store validator history", zap.Error(err))
+ // Do not fail the main operation for history errors.
+ }
+
+ configs.Logger.Info("Successfully updated validators",
+ zap.Int("count", len(beaconResponse.ValidatorList)),
+ zap.String("epoch", currentEpoch))
+ return nil
+}
- // Convert each validator
- newValidators := make([]models.ValidatorRecord, 0, len(beaconResponse.ValidatorList))
+// bulkUpsertValidators writes each validator as its own document using BulkWrite upserts.
+func bulkUpsertValidators(beaconResponse models.BeaconValidatorResponse, currentEpoch string) error {
+ if len(beaconResponse.ValidatorList) == 0 {
+ return nil
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ updatedAt := fmt.Sprintf("%d", time.Now().Unix())
+
+ writeModels := make([]mongo.WriteModel, 0, len(beaconResponse.ValidatorList))
for _, v := range beaconResponse.ValidatorList {
- // Determine if this validator is the leader for their slot (simplified: based on index mod)
- slotNum, _ := strconv.ParseInt(v.Index, 10, 64)
- isLeader := slotNum%128 == 0 // Simplified leader selection
-
- record := models.ValidatorRecord{
- Index: v.Index,
- PublicKeyHex: models.Base64ToHex(v.Validator.PublicKey),
- WithdrawalCredentialsHex: models.Base64ToHex(v.Validator.WithdrawalCredentials),
- EffectiveBalance: v.Validator.EffectiveBalance,
- Slashed: v.Validator.Slashed,
- ActivationEligibilityEpoch: v.Validator.ActivationEligibilityEpoch,
- ActivationEpoch: v.Validator.ActivationEpoch,
- ExitEpoch: v.Validator.ExitEpoch,
- WithdrawableEpoch: v.Validator.WithdrawableEpoch,
- SlotNumber: v.Index,
- IsLeader: isLeader,
- }
- newValidators = append(newValidators, record)
+ doc := buildValidatorDocument(v, currentEpoch, updatedAt)
+ filter := bson.M{"_id": doc.ID}
+ update := bson.M{"$set": doc}
+ writeModels = append(writeModels, mongo.NewUpdateOneModel().
+ SetFilter(filter).
+ SetUpdate(update).
+ SetUpsert(true))
}
- // First try to get existing document
- var storage models.ValidatorStorage
- err := configs.GetValidatorCollection().FindOne(ctx, bson.M{"_id": "validators"}).Decode(&storage)
- if err != nil && err != mongo.ErrNoDocuments {
- configs.Logger.Error("Failed to get existing validator document", zap.Error(err))
+ opts := options.BulkWrite().SetOrdered(false)
+ result, err := configs.ValidatorsCollections.BulkWrite(ctx, writeModels, opts)
+ if err != nil {
+ configs.Logger.Error("Failed to bulk-write validator documents", zap.Error(err))
return err
}
- if err == mongo.ErrNoDocuments {
- // Create new document if it doesn't exist
- storage = models.ValidatorStorage{
- ID: "validators",
- Epoch: currentEpoch,
- UpdatedAt: fmt.Sprintf("%d", time.Now().Unix()),
- Validators: newValidators,
- }
- } else {
- // Update existing validators and add new ones
- // Create a map of existing validators by public key for quick lookup
- existingValidatorIndex := make(map[string]int)
- for i, v := range storage.Validators {
- existingValidatorIndex[v.PublicKeyHex] = i
- }
+ configs.Logger.Info("Bulk-upserted validators",
+ zap.Int64("upserted", result.UpsertedCount),
+ zap.Int64("modified", result.ModifiedCount),
+ zap.String("epoch", currentEpoch))
+ return nil
+}
+
+// buildValidatorDocument converts a BeaconValidator into a ValidatorDocument.
+func buildValidatorDocument(v models.BeaconValidator, epoch, updatedAt string) models.ValidatorDocument {
+ var idx int64
+ fmt.Sscanf(v.Index, "%d", &idx)
+ isLeader := idx%128 == 0 // Simplified leader selection
+
+ return models.ValidatorDocument{
+ ID: v.Index,
+ PublicKeyHex: models.Base64ToHex(v.Validator.PublicKey),
+ WithdrawalCredentialsHex: models.Base64ToHex(v.Validator.WithdrawalCredentials),
+ EffectiveBalance: v.Validator.EffectiveBalance,
+ Slashed: v.Validator.Slashed,
+ ActivationEligibilityEpoch: v.Validator.ActivationEligibilityEpoch,
+ ActivationEpoch: v.Validator.ActivationEpoch,
+ ExitEpoch: v.Validator.ExitEpoch,
+ WithdrawableEpoch: v.Validator.WithdrawableEpoch,
+ SlotNumber: v.Index,
+ IsLeader: isLeader,
+ Epoch: epoch,
+ UpdatedAt: updatedAt,
+ }
+}
+
+// storeValidatorHistoryFromDB computes validator statistics by scanning the
+// per-document collection and persists them to validator_history.
+func storeValidatorHistoryFromDB(epoch string, currentEpochInt int64) error {
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
- // Update existing validators or append new ones
- for _, v := range newValidators {
- if idx, exists := existingValidatorIndex[v.PublicKeyHex]; exists {
- // Update existing validator's mutable fields
- storage.Validators[idx].EffectiveBalance = v.EffectiveBalance
- storage.Validators[idx].Slashed = v.Slashed
- storage.Validators[idx].ExitEpoch = v.ExitEpoch
- storage.Validators[idx].WithdrawableEpoch = v.WithdrawableEpoch
- storage.Validators[idx].SlotNumber = v.SlotNumber
- storage.Validators[idx].IsLeader = v.IsLeader
- } else {
- // Add new validator
- storage.Validators = append(storage.Validators, v)
- }
+ totalCount, err := configs.ValidatorsCollections.CountDocuments(ctx, bson.M{})
+ if err != nil {
+ return fmt.Errorf("count validators: %w", err)
+ }
+
+ // Project only the fields needed for status calculation and balance sum.
+ cursor, err := configs.ValidatorsCollections.Find(ctx, bson.M{},
+ options.Find().SetProjection(bson.M{
+ "slashed": 1,
+ "activationEpoch": 1,
+ "exitEpoch": 1,
+ "effectiveBalance": 1,
+ }))
+ if err != nil {
+ return fmt.Errorf("find validators for history: %w", err)
+ }
+ defer cursor.Close(ctx)
+
+ var docs []models.ValidatorDocument
+ if err := cursor.All(ctx, &docs); err != nil {
+ return fmt.Errorf("decode validators for history: %w", err)
+ }
+
+ var activeCount, pendingCount, exitedCount, slashedCount int
+ totalStaked := big.NewInt(0)
+
+ for _, d := range docs {
+ status := models.GetValidatorStatus(d.ActivationEpoch, d.ExitEpoch, d.Slashed, currentEpochInt)
+ switch status {
+ case "active":
+ activeCount++
+ case "pending":
+ pendingCount++
+ case "exited":
+ exitedCount++
+ case "slashed":
+ slashedCount++
}
+ if balance, ok := new(big.Int).SetString(d.EffectiveBalance, 10); ok {
+ totalStaked.Add(totalStaked, balance)
+ }
+ }
- // Update epoch and timestamp
- storage.Epoch = currentEpoch
- storage.UpdatedAt = fmt.Sprintf("%d", time.Now().Unix())
+ record := &models.ValidatorHistoryRecord{
+ Epoch: epoch,
+ Timestamp: time.Now().Unix(),
+ ValidatorsCount: int(totalCount),
+ ActiveCount: activeCount,
+ PendingCount: pendingCount,
+ ExitedCount: exitedCount,
+ SlashedCount: slashedCount,
+ TotalStaked: totalStaked.String(),
}
- // Upsert the document
opts := options.Update().SetUpsert(true)
- filter := bson.M{"_id": "validators"}
- update := bson.M{"$set": storage}
+ filter := bson.M{"epoch": record.Epoch}
+ update := bson.M{"$set": record}
- _, err = configs.GetValidatorCollection().UpdateOne(ctx, filter, update, opts)
+ _, err = configs.ValidatorHistoryCollections.UpdateOne(ctx, filter, update, opts)
if err != nil {
- configs.Logger.Error("Failed to update validator document", zap.Error(err))
- return err
- }
-
- // Store validator history for this epoch AFTER updating storage
- // Use the accumulated validators (storage.Validators) not just the new ones
- if err := StoreValidatorHistory(storage.Validators, currentEpoch, currentEpochInt); err != nil {
- configs.Logger.Warn("Failed to store validator history", zap.Error(err))
- // Don't fail the main operation
+ return fmt.Errorf("insert validator history: %w", err)
}
- configs.Logger.Info("Successfully updated validators",
- zap.Int("newCount", len(newValidators)),
- zap.Int("totalCount", len(storage.Validators)),
- zap.String("epoch", currentEpoch))
+ configs.Logger.Debug("Stored validator history",
+ zap.String("epoch", record.Epoch),
+ zap.Int("validatorsCount", record.ValidatorsCount))
return nil
}
-// GetValidators retrieves all validators from storage
+// GetValidators retrieves all validators from the per-document collection
+// and returns them assembled in the legacy ValidatorStorage shape.
func GetValidators() (*models.ValidatorStorage, error) {
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
- var storage models.ValidatorStorage
- err := configs.GetValidatorCollection().FindOne(ctx, bson.M{"_id": "validators"}).Decode(&storage)
+ cursor, err := configs.ValidatorsCollections.Find(ctx, bson.M{})
if err != nil {
- configs.Logger.Error("Failed to get validator document", zap.Error(err))
+ configs.Logger.Error("Failed to find validator documents", zap.Error(err))
return nil, err
}
+ defer cursor.Close(ctx)
- return &storage, nil
+ var docs []models.ValidatorDocument
+ if err := cursor.All(ctx, &docs); err != nil {
+ configs.Logger.Error("Failed to decode validator documents", zap.Error(err))
+ return nil, err
+ }
+
+ records := make([]models.ValidatorRecord, 0, len(docs))
+ epoch := ""
+ updatedAt := ""
+ for _, d := range docs {
+ records = append(records, validatorDocToRecord(d))
+ if epoch == "" {
+ epoch = d.Epoch
+ updatedAt = d.UpdatedAt
+ }
+ }
+
+ return &models.ValidatorStorage{
+ ID: "validators",
+ Epoch: epoch,
+ UpdatedAt: updatedAt,
+ Validators: records,
+ }, nil
}
-// GetValidatorByPublicKey retrieves a specific validator by their public key
+// GetValidatorByPublicKey retrieves a specific validator by their public key hex.
func GetValidatorByPublicKey(publicKeyHex string) (*models.ValidatorRecord, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
- var storage models.ValidatorStorage
- err := configs.GetValidatorCollection().FindOne(ctx, bson.M{
- "validators.publicKeyHex": publicKeyHex,
- }).Decode(&storage)
-
+ var doc models.ValidatorDocument
+ err := configs.ValidatorsCollections.FindOne(ctx, bson.M{"publicKeyHex": publicKeyHex}).Decode(&doc)
if err != nil {
+ if err == mongo.ErrNoDocuments {
+ return nil, fmt.Errorf("validator not found")
+ }
return nil, err
}
- // Find the matching validator
- for _, v := range storage.Validators {
- if v.PublicKeyHex == publicKeyHex {
- return &v, nil
- }
- }
+ record := validatorDocToRecord(doc)
+ return &record, nil
+}
- return nil, fmt.Errorf("validator not found")
+// validatorDocToRecord maps a ValidatorDocument to the legacy ValidatorRecord type.
+func validatorDocToRecord(d models.ValidatorDocument) models.ValidatorRecord {
+ return models.ValidatorRecord{
+ Index: d.ID,
+ PublicKeyHex: d.PublicKeyHex,
+ WithdrawalCredentialsHex: d.WithdrawalCredentialsHex,
+ EffectiveBalance: d.EffectiveBalance,
+ Slashed: d.Slashed,
+ ActivationEligibilityEpoch: d.ActivationEligibilityEpoch,
+ ActivationEpoch: d.ActivationEpoch,
+ ExitEpoch: d.ExitEpoch,
+ WithdrawableEpoch: d.WithdrawableEpoch,
+ SlotNumber: d.SlotNumber,
+ IsLeader: d.IsLeader,
+ }
}
-// StoreEpochInfo stores the current epoch information from beacon chain head
+// StoreEpochInfo stores the current epoch information from beacon chain head.
func StoreEpochInfo(chainHead *models.BeaconChainHeadResponse) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
@@ -188,6 +274,7 @@ func StoreEpochInfo(chainHead *models.BeaconChainHeadResponse) error {
}
// StoreValidatorHistory computes and stores validator statistics for the current epoch
+// from a supplied []ValidatorRecord slice (kept for callers that already have the data).
func StoreValidatorHistory(validators []models.ValidatorRecord, epoch string, currentEpochInt int64) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
@@ -196,9 +283,7 @@ func StoreValidatorHistory(validators []models.ValidatorRecord, epoch string, cu
totalStaked := big.NewInt(0)
for _, v := range validators {
- // Calculate status
status := models.GetValidatorStatus(v.ActivationEpoch, v.ExitEpoch, v.Slashed, currentEpochInt)
-
switch status {
case "active":
activeCount++
@@ -209,8 +294,6 @@ func StoreValidatorHistory(validators []models.ValidatorRecord, epoch string, cu
case "slashed":
slashedCount++
}
-
- // Sum effective balance
if balance, ok := new(big.Int).SetString(v.EffectiveBalance, 10); ok {
totalStaked.Add(totalStaked, balance)
}
@@ -227,7 +310,6 @@ func StoreValidatorHistory(validators []models.ValidatorRecord, epoch string, cu
TotalStaked: totalStaked.String(),
}
- // Use epoch as unique identifier to prevent duplicate entries
opts := options.Update().SetUpsert(true)
filter := bson.M{"epoch": record.Epoch}
update := bson.M{"$set": record}
diff --git a/Zond2mongoDB/synchroniser/gap_detection.go b/Zond2mongoDB/synchroniser/gap_detection.go
index 4077155..bf3484b 100644
--- a/Zond2mongoDB/synchroniser/gap_detection.go
+++ b/Zond2mongoDB/synchroniser/gap_detection.go
@@ -71,14 +71,16 @@ func detectGaps(fromBlock, toBlock string) []string {
zap.String("adjusted_from", fromBlock))
}
- // Get all existing block numbers in the range
+ // Get all existing block numbers in the range.
+ // Query on blockNumberInt (int64) so the $gte/$lte comparison is numeric
+ // rather than lexicographic hex string ordering.
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
filter := bson.M{
- "result.number": bson.M{
- "$gte": fromBlock,
- "$lte": toBlock,
+ "blockNumberInt": bson.M{
+ "$gte": fromNum,
+ "$lte": toNum,
},
}
diff --git a/Zond2mongoDB/synchroniser/producer_consumer.go b/Zond2mongoDB/synchroniser/producer_consumer.go
index ab21a19..4cd6cf0 100644
--- a/Zond2mongoDB/synchroniser/producer_consumer.go
+++ b/Zond2mongoDB/synchroniser/producer_consumer.go
@@ -18,10 +18,10 @@ import (
// Batch size constants for consistent use across sync methods
const (
// DefaultBatchSize is the standard batch size for normal sync operations
- DefaultBatchSize = 64
+ DefaultBatchSize = 128
// LargeBatchSize is used when syncing a large number of blocks (>1000)
- LargeBatchSize = 128
+ LargeBatchSize = 256
// BatchSyncThreshold is the number of blocks behind after which we switch to batch sync
BatchSyncThreshold = 64
@@ -30,7 +30,7 @@ const (
LargeSyncThreshold = 1000 // 0x3e8 in hex
// MaxProducerConcurrency limits concurrent block fetching goroutines
- MaxProducerConcurrency = 8
+ MaxProducerConcurrency = 16
)
// producerSem is a semaphore to limit concurrent producer goroutines
diff --git a/Zond2mongoDB/synchroniser/token_sync.go b/Zond2mongoDB/synchroniser/token_sync.go
index 3fb98af..5c7b6ea 100644
--- a/Zond2mongoDB/synchroniser/token_sync.go
+++ b/Zond2mongoDB/synchroniser/token_sync.go
@@ -62,18 +62,23 @@ func processTokensWithConfig(initialSyncStart string, maxHex string, config Toke
processTokenTransferBatches(blocksWithTxs, config)
}
-// getBlocksWithTransactions queries the database for blocks that have at least one transaction.
-// It fetches all blocks with transactions and filters by numeric comparison in Go,
-// because hex strings are not zero-padded and MongoDB's lexicographic $gte/$lte
-// comparison produces incorrect results across different hex string lengths.
+// getBlocksWithTransactions queries the database for blocks that have at least one
+// transaction and fall within [fromBlock, toBlock] (inclusive).
+// It uses the blockNumberInt field for numeric range comparison so that MongoDB's
+// $gte/$lte operators work correctly regardless of hex string length.
func getBlocksWithTransactions(fromBlock, toBlock string, timeoutSec int) ([]string, error) {
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeoutSec)*time.Second)
defer cancel()
- // Only filter on having transactions - range filtering is done in Go
- // to avoid lexicographic hex string comparison issues in MongoDB
+ fromInt := db.HexToInt64(fromBlock)
+ toInt := db.HexToInt64(toBlock)
+
filter := bson.M{
"result.transactions.0": bson.M{"$exists": true},
+ "blockNumberInt": bson.M{
+ "$gte": fromInt,
+ "$lte": toInt,
+ },
}
projection := bson.M{"result.number": 1, "_id": 0}
@@ -97,11 +102,7 @@ func getBlocksWithTransactions(fromBlock, toBlock string, timeoutSec int) ([]str
continue
}
- // Numeric range check using proper hex comparison
- if utils.CompareHexNumbers(block.Result.Number, fromBlock) >= 0 &&
- utils.CompareHexNumbers(block.Result.Number, toBlock) <= 0 {
- blocksWithTxs = append(blocksWithTxs, block.Result.Number)
- }
+ blocksWithTxs = append(blocksWithTxs, block.Result.Number)
}
return blocksWithTxs, nil
diff --git a/Zond2mongoDB/validation/hex.go b/Zond2mongoDB/validation/hex.go
index 51aeee3..f6bfe22 100644
--- a/Zond2mongoDB/validation/hex.go
+++ b/Zond2mongoDB/validation/hex.go
@@ -104,18 +104,16 @@ func StripAddressPrefix(address string) string {
return address
}
-// ConvertToZAddress converts a 0x address to Z format if needed
+// ConvertToZAddress converts any address format to canonical Z-prefix form.
+// The canonical storage format is "Z" + lowercase hex.
func ConvertToZAddress(address string) string {
- // If already in Z format (uppercase or lowercase), return as is
if strings.HasPrefix(address, "Z") || strings.HasPrefix(address, "z") {
- return address
+ return "Z" + strings.ToLower(address[1:])
}
- // If in 0x format, convert to Z format
- if strings.HasPrefix(address, "0x") {
- return "Z" + address[2:]
+ if strings.HasPrefix(address, "0x") || strings.HasPrefix(address, "0X") {
+ return "Z" + strings.ToLower(address[2:])
}
- // If no prefix, add Z prefix
- return "Z" + address
+ return "Z" + strings.ToLower(address)
}
diff --git a/backendAPI/configs/setup.go b/backendAPI/configs/setup.go
index 5eca1b3..ebdb294 100644
--- a/backendAPI/configs/setup.go
+++ b/backendAPI/configs/setup.go
@@ -59,7 +59,7 @@ func ConnectDB() *mongo.Client {
func createIndexes(db *mongo.Database) {
ctx := context.Background()
- // Define required indexes
+ // blocks collection indexes
blocksIndexes := []mongo.IndexModel{
{
Keys: bson.D{
@@ -76,26 +76,97 @@ func createIndexes(db *mongo.Database) {
},
}
- // Transactions collection indexes
+ // transactionByAddress collection indexes
transactionsIndexes := []mongo.IndexModel{
+ {
+ Keys: bson.D{{Key: "timeStamp", Value: -1}},
+ Options: options.Index().SetName("timestamp_desc"),
+ },
+ {
+ Keys: bson.D{{Key: "txHash", Value: 1}},
+ Options: options.Index().SetName("tx_hash").SetUnique(true),
+ },
{
Keys: bson.D{
+ {Key: "from", Value: 1},
{Key: "timeStamp", Value: -1},
},
- Options: options.Index().SetName("timestamp_desc"),
+ Options: options.Index().SetName("from_timestamp_desc"),
},
{
Keys: bson.D{
- {Key: "txHash", Value: 1},
+ {Key: "to", Value: 1},
+ {Key: "timeStamp", Value: -1},
},
- Options: options.Index().SetName("tx_hash"),
+ Options: options.Index().SetName("to_timestamp_desc"),
+ },
+ }
+
+ // addresses collection indexes
+ addressesIndexes := []mongo.IndexModel{
+ {
+ Keys: bson.D{{Key: "id", Value: 1}},
+ Options: options.Index().SetName("id_unique").SetUnique(true),
+ },
+ {
+ Keys: bson.D{{Key: "balance", Value: -1}},
+ Options: options.Index().SetName("balance_desc"),
+ },
+ }
+
+ // internalTransactionByAddress collection indexes
+ internalTransactionsIndexes := []mongo.IndexModel{
+ {
+ Keys: bson.D{{Key: "from", Value: 1}},
+ Options: options.Index().SetName("internal_from"),
+ },
+ {
+ Keys: bson.D{{Key: "to", Value: 1}},
+ Options: options.Index().SetName("internal_to"),
+ },
+ }
+
+ // contractCode collection indexes
+ contractCodeIndexes := []mongo.IndexModel{
+ {
+ Keys: bson.D{{Key: "address", Value: 1}},
+ Options: options.Index().SetName("contract_address_unique").SetUnique(true),
+ },
+ }
+
+ // transfer collection indexes
+ transferIndexes := []mongo.IndexModel{
+ {
+ Keys: bson.D{{Key: "txHash", Value: 1}},
+ Options: options.Index().SetName("transfer_txhash_unique").SetUnique(true),
+ },
+ }
+
+ // validators collection indexes (per-document model)
+ validatorsIndexes := []mongo.IndexModel{
+ {
+ Keys: bson.D{{Key: "publicKeyHex", Value: 1}},
+ Options: options.Index().SetName("validators_pubkey_idx"),
+ },
+ {
+ Keys: bson.D{{Key: "status", Value: 1}},
+ Options: options.Index().SetName("validators_status_idx"),
+ },
+ {
+ Keys: bson.D{{Key: "effectiveBalance", Value: -1}},
+ Options: options.Index().SetName("validators_balance_desc_idx"),
},
}
- // Check and create indexes if needed
+ // Map of collection name -> indexes to create
collections := map[string][]mongo.IndexModel{
- "blocks": blocksIndexes,
- "transactionByAddress": transactionsIndexes,
+ "blocks": blocksIndexes,
+ "transactionByAddress": transactionsIndexes,
+ "addresses": addressesIndexes,
+ "internalTransactionByAddress": internalTransactionsIndexes,
+ "contractCode": contractCodeIndexes,
+ "transfer": transferIndexes,
+ "validators": validatorsIndexes,
}
for collName, indexes := range collections {
diff --git a/backendAPI/db/address.go b/backendAPI/db/address.go
index 923e972..f034784 100644
--- a/backendAPI/db/address.go
+++ b/backendAPI/db/address.go
@@ -5,14 +5,12 @@ import (
"backendAPI/models"
"bytes"
"context"
- "encoding/hex"
"encoding/json"
"fmt"
"io"
"math/big"
"net/http"
"os"
- "reflect"
"strings"
"time"
@@ -27,9 +25,8 @@ func ReturnSingleAddress(query string) (models.Address, error) {
var result models.Address
defer cancel()
- // Normalize address by converting to lowercase
- // This ensures case-insensitive lookup and storage
- addressHex := strings.ToLower(query)
+ // Normalize address to canonical Z-prefix form
+ addressHex := normalizeAddress(query)
// Try to find existing address
filter := bson.D{{Key: "id", Value: addressHex}}
@@ -95,59 +92,40 @@ func ReturnRichlist() []models.Address {
func ReturnRankAddress(address string) (int64, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
- var addresses []models.Address
defer cancel()
- // Normalize address by converting to lowercase
- addressHex := strings.ToLower(address)
-
- query, err := hex.DecodeString(strings.TrimPrefix(addressHex, "z"))
- if err != nil {
- fmt.Println(err)
- }
-
- projection := bson.D{
- {Key: "id", Value: 1},
- {Key: "balance", Value: 1},
- }
-
- opts := options.Find().
- SetProjection(projection).
- SetSort(bson.D{{Key: "balance", Value: -1}})
+ // Normalize address to canonical Z-prefix form (matches storage format)
+ addressHex := normalizeAddress(address)
- results, err := configs.AddressesCollections.Find(ctx, bson.D{}, opts)
+ // Look up the target address to get its balance
+ var target models.Address
+ err := configs.AddressesCollections.FindOne(ctx, bson.D{{Key: "id", Value: addressHex}}).Decode(&target)
if err != nil {
- fmt.Println(err)
- }
-
- defer results.Close(ctx)
- for results.Next(ctx) {
- var singleAddress models.Address
- if err = results.Decode(&singleAddress); err != nil {
- fmt.Println(err)
+ if err == mongo.ErrNoDocuments {
+ // Address not found — return 0 to signal unknown rank
+ return 0, nil
}
- addresses = append(addresses, singleAddress)
+ return 0, fmt.Errorf("error looking up address for rank: %v", err)
}
- var i int64
- i = 0
- for i = 0; i < GetWalletCount(); i++ {
- if reflect.DeepEqual(addresses[i].ID, query) {
- fmt.Println(query)
- break
- }
+ // Count how many addresses have a strictly higher balance; rank = that count + 1
+ count, err := configs.AddressesCollections.CountDocuments(ctx, bson.M{"balance": bson.M{"$gt": target.Balance}})
+ if err != nil {
+ return 0, fmt.Errorf("error counting addresses for rank: %v", err)
}
- return i + 1, nil
+ return count + 1, nil
}
func GetBalance(address string) (float64, string) {
var result models.Balance
- // Ensure address has uppercase Z prefix for RPC calls
+ // Ensure address has Z prefix for RPC calls
rpcAddress := address
- if strings.HasPrefix(rpcAddress, "z") {
- rpcAddress = "Z" + rpcAddress[1:]
+ if strings.HasPrefix(rpcAddress, "0x") {
+ rpcAddress = "Z" + rpcAddress[2:]
+ } else if !strings.HasPrefix(rpcAddress, "Z") {
+ rpcAddress = "Z" + rpcAddress
}
group := models.JsonRPC{
diff --git a/backendAPI/db/block.go b/backendAPI/db/block.go
index 000821a..e4cc7a5 100644
--- a/backendAPI/db/block.go
+++ b/backendAPI/db/block.go
@@ -19,18 +19,16 @@ func ReturnSingleBlock(block uint64) (models.ZondUint64Version, error) {
var result models.ZondUint64Version
- // Convert decimal block number to hex format with 0x prefix
- hexBlock := fmt.Sprintf("0x%x", block)
- filter := primitive.D{{Key: "result.number", Value: hexBlock}}
-
+ // Primary lookup: use blockNumberInt (int64) for an exact numeric match.
+ // This is reliable regardless of whether the hex string was stored with or
+ // without zero-padding, and uses the blockNumberInt_desc_idx index.
+ filter := primitive.D{{Key: "blockNumberInt", Value: int64(block)}}
err := configs.BlocksCollection.FindOne(ctx, filter).Decode(&result)
if err != nil {
- // Try with zero-padded hex if the first attempt failed
- hexBlockPadded := fmt.Sprintf("0x%02x", block)
- if hexBlockPadded != hexBlock {
- filter = primitive.D{{Key: "result.number", Value: hexBlockPadded}}
- err = configs.BlocksCollection.FindOne(ctx, filter).Decode(&result)
- }
+ // Fallback for documents written before blockNumberInt was added.
+ hexBlock := fmt.Sprintf("0x%x", block)
+ filter = primitive.D{{Key: "result.number", Value: hexBlock}}
+ err = configs.BlocksCollection.FindOne(ctx, filter).Decode(&result)
if err != nil {
return result, fmt.Errorf("block %d not found", block)
}
@@ -144,7 +142,7 @@ func ReturnBlockSizes() ([]primitive.M, error) {
cursor, err := configs.BlockSizesCollection.Find(ctx, primitive.D{}, opts)
if err != nil {
- panic(err)
+ return nil, fmt.Errorf("failed to query block sizes: %w", err)
}
var episodes []primitive.M
diff --git a/backendAPI/db/contract.go b/backendAPI/db/contract.go
index 729debc..3378f77 100644
--- a/backendAPI/db/contract.go
+++ b/backendAPI/db/contract.go
@@ -30,21 +30,15 @@ func ReturnContracts(page int64, limit int64, search string, isTokenFilter *bool
// Add search if provided, using correct field names
if search != "" {
- // Normalize the search address to lowercase for case-insensitive lookup
- normalizedSearch := strings.ToLower(search)
+ // Normalize the search address to canonical Z-prefix form
+ normalizedSearch := normalizeAddress(search)
- // Also try with Z prefix in case user pastes address without it
- searchWithZ := normalizedSearch
- if !strings.HasPrefix(normalizedSearch, "z") {
- searchWithZ = "z" + normalizedSearch
- }
-
- // Zond addresses start with 'Z'. Search for both with and without Z prefix.
+ // Zond addresses start with 'Z'. Search by normalized address or token name.
searchFilter := bson.D{
{Key: "$or", Value: bson.A{
- bson.D{{Key: "address", Value: bson.M{"$in": bson.A{normalizedSearch, searchWithZ}}}}, // Match contract address
- bson.D{{Key: "creatorAddress", Value: bson.M{"$in": bson.A{normalizedSearch, searchWithZ}}}}, // Match creator address
- bson.D{{Key: "name", Value: bson.D{{Key: "$regex", Value: normalizedSearch}, {Key: "$options", Value: "i"}}}}, // Match token name
+ bson.D{{Key: "address", Value: normalizedSearch}}, // Match contract address
+ bson.D{{Key: "creatorAddress", Value: normalizedSearch}}, // Match creator address
+ bson.D{{Key: "name", Value: bson.D{{Key: "$regex", Value: search}, {Key: "$options", Value: "i"}}}}, // Match token name
}},
}
// Combine with existing filter
@@ -93,20 +87,17 @@ func ReturnContractCode(address string) (models.ContractInfo, error) {
var result models.ContractInfo
- // Normalize address - try both Z and z prefixes since the syncer
- // stores with lowercase z while URLs/API use uppercase Z
- hexPart := strings.ToLower(strings.TrimPrefix(strings.TrimPrefix(address, "Z"), "z"))
- upperAddr := "Z" + hexPart
- lowerAddr := "z" + hexPart
+ // Normalize address to canonical Z-prefix form
+ normalizedAddr := normalizeAddress(address)
- // Query for contract code trying both prefix cases
- filter := bson.M{"address": bson.M{"$in": []string{upperAddr, lowerAddr}}}
+ // Query for contract code
+ filter := bson.M{"address": normalizedAddr}
err := configs.ContractInfoCollection.FindOne(ctx, filter).Decode(&result)
if err != nil {
if err == mongo.ErrNoDocuments {
// Log that we couldn't find the contract
- log.Printf("No contract found for address: %s (variants: %s, %s)", address, upperAddr, lowerAddr)
+ log.Printf("No contract found for address: %s (normalized: %s)", address, normalizedAddr)
// Return empty contract code with expected structure
return models.ContractInfo{
ContractAddress: "",
diff --git a/backendAPI/db/token.go b/backendAPI/db/token.go
index e4896f8..ddcb5af 100644
--- a/backendAPI/db/token.go
+++ b/backendAPI/db/token.go
@@ -18,7 +18,7 @@ func GetTokenBalancesByAddress(address string) ([]models.TokenBalance, error) {
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)
defer cancel()
- // Search both Z-prefix and z-prefix variants (syncer stores lowercase z)
+ // Normalize address to canonical Z-prefix form
searchAddresses := normalizeAddressBoth(address)
collection := configs.GetCollection(configs.DB, "tokenBalances")
@@ -108,20 +108,22 @@ func GetTokenBalancesByAddress(address string) ([]models.TokenBalance, error) {
return results, nil
}
-// normalizeAddress converts an address to lowercase z-prefix format (matching DB storage)
+// normalizeAddress converts any address format to the canonical Z-prefix
+// form that the syncer stores in MongoDB (uppercase Z + lowercase hex).
func normalizeAddress(address string) string {
- if strings.HasPrefix(strings.ToLower(address), "0x") {
- return "z" + strings.ToLower(address[2:])
- } else if strings.HasPrefix(strings.ToLower(address), "z") {
- return "z" + strings.ToLower(address[1:])
+ if strings.HasPrefix(address, "0x") || strings.HasPrefix(address, "0X") {
+ return "Z" + strings.ToLower(address[2:])
}
- return "z" + strings.ToLower(address)
+ if strings.HasPrefix(address, "Z") || strings.HasPrefix(address, "z") {
+ return "Z" + strings.ToLower(address[1:])
+ }
+ return "Z" + strings.ToLower(address)
}
-// normalizeAddressBoth returns both Z and z prefix variants for querying
+// normalizeAddressBoth returns the canonical Z-prefix address as a slice.
+// The slice form is kept so callers using "$in" can remain unchanged.
func normalizeAddressBoth(address string) []string {
- hex := normalizeAddress(address)
- return []string{hex, "Z" + hex[1:]}
+ return []string{normalizeAddress(address)}
}
// GetTokenHolders returns all holders of a specific token contract with pagination
@@ -264,22 +266,22 @@ func GetTokenInfo(contractAddress string) (*models.TokenInfo, error) {
}, nil
}
-// GetTokenTransferByTxHash returns token transfer info for a given transaction hash
-// Returns nil if no token transfer is associated with this transaction
+// GetTokenTransferByTxHash returns token transfer info for a given transaction hash.
+// Returns nil, nil if no token transfer is associated with this transaction.
+// The syncer stores txHash with a lowercase 0x prefix; we normalize to that form.
func GetTokenTransferByTxHash(txHash string) (*models.TokenTransfer, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
collection := configs.GetCollection(configs.DB, "tokenTransfers")
- // Normalize the transaction hash - syncer stores with 0x prefix in lowercase
+ // Canonical storage format: lowercase with 0x prefix.
normalizedHash := strings.ToLower(txHash)
if !strings.HasPrefix(normalizedHash, "0x") {
normalizedHash = "0x" + normalizedHash
}
var transfer models.TokenTransfer
- // First try with 0x prefix (standard storage format)
err := collection.FindOne(ctx, bson.M{"txHash": normalizedHash}).Decode(&transfer)
if err == nil {
return &transfer, nil
@@ -288,24 +290,5 @@ func GetTokenTransferByTxHash(txHash string) (*models.TokenTransfer, error) {
return nil, err
}
- // Try without 0x prefix in case storage format varies
- hashWithoutPrefix := strings.TrimPrefix(normalizedHash, "0x")
- err = collection.FindOne(ctx, bson.M{"txHash": hashWithoutPrefix}).Decode(&transfer)
- if err == nil {
- return &transfer, nil
- }
- if err != mongo.ErrNoDocuments {
- return nil, err
- }
-
- // Try with original hash as-is
- err = collection.FindOne(ctx, bson.M{"txHash": txHash}).Decode(&transfer)
- if err == nil {
- return &transfer, nil
- }
- if err != mongo.ErrNoDocuments {
- return nil, err
- }
-
return nil, nil
}
diff --git a/backendAPI/db/transaction.go b/backendAPI/db/transaction.go
index 76c647a..0a3d1e2 100644
--- a/backendAPI/db/transaction.go
+++ b/backendAPI/db/transaction.go
@@ -3,12 +3,10 @@ package db
import (
"backendAPI/configs"
"backendAPI/models"
- "bytes"
"context"
"encoding/hex"
"fmt"
"math"
- "regexp"
"strconv"
"strings"
"time"
@@ -19,6 +17,7 @@ import (
"go.mongodb.org/mongo-driver/mongo/options"
)
+
func ReturnLatestTransactions() ([]models.TransactionByAddress, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
var transactions []models.TransactionByAddress
@@ -39,7 +38,8 @@ func ReturnLatestTransactions() ([]models.TransactionByAddress, error) {
opts := options.Find().
SetProjection(projection).
- SetSort(primitive.D{{Key: "timeStamp", Value: -1}})
+ SetSort(primitive.D{{Key: "timeStamp", Value: -1}}).
+ SetLimit(100)
results, err := configs.TransactionByAddressCollection.Find(ctx, primitive.D{}, opts)
if err != nil {
@@ -66,96 +66,56 @@ func ReturnAllInternalTransactionsByAddress(address string) ([]models.TraceResul
var transactions []models.TraceResult
- // Format the address for query
- formattedAddress := address
- if !strings.HasPrefix(formattedAddress, "Z") {
- formattedAddress = "Z" + formattedAddress
- }
+ // Normalize to canonical Z-prefix format used by the syncer.
+ normalizedAddress := normalizeAddress(address)
- // For internal transactions, we need to strip the Z prefix
- addressWithoutPrefix := strings.TrimPrefix(formattedAddress, "Z")
+ filter := primitive.D{{Key: "$or", Value: []primitive.D{
+ {{Key: "from", Value: normalizedAddress}},
+ {{Key: "to", Value: normalizedAddress}},
+ }}}
- // Try different case variants of the address
- addressVariants := []string{
- addressWithoutPrefix,
- strings.ToLower(addressWithoutPrefix),
- strings.ToUpper(addressWithoutPrefix),
+ projection := primitive.D{
+ {Key: "type", Value: 1},
+ {Key: "callType", Value: 1},
+ {Key: "hash", Value: 1},
+ {Key: "from", Value: 1},
+ {Key: "to", Value: 1},
+ {Key: "input", Value: 1},
+ {Key: "output", Value: 1},
+ {Key: "traceAddress", Value: 1},
+ {Key: "value", Value: 1},
+ {Key: "gas", Value: 1},
+ {Key: "gasUsed", Value: 1},
+ {Key: "addressFunctionIdentifier", Value: 1},
+ {Key: "amountFunctionIdentifier", Value: 1},
+ {Key: "blockTimestamp", Value: 1},
}
- for _, addrVariant := range addressVariants {
- decoded, err := hex.DecodeString(addrVariant)
- if err != nil {
- continue // Skip invalid variants
- }
-
- filter := primitive.D{{Key: "$or", Value: []primitive.D{
- {{Key: "from", Value: decoded}},
- {{Key: "to", Value: decoded}},
- }}}
-
- projection := primitive.D{
- {Key: "type", Value: 1},
- {Key: "callType", Value: 1},
- {Key: "hash", Value: 1},
- {Key: "from", Value: 1},
- {Key: "to", Value: 1},
- {Key: "input", Value: 1},
- {Key: "output", Value: 1},
- {Key: "traceAddress", Value: 1},
- {Key: "value", Value: 1},
- {Key: "gas", Value: 1},
- {Key: "gasUsed", Value: 1},
- {Key: "addressFunctionIdentifier", Value: 1},
- {Key: "amountFunctionIdentifier", Value: 1},
- {Key: "blockTimestamp", Value: 1},
- }
+ opts := options.Find().
+ SetProjection(projection).
+ SetSort(primitive.D{{Key: "blockTimestamp", Value: -1}}).
+ SetLimit(200)
- opts := options.Find().
- SetProjection(projection).
- SetSort(primitive.D{{Key: "blockTimestamp", Value: -1}})
+ results, err := configs.InternalTransactionByAddressCollection.Find(ctx, filter, opts)
+ if err != nil {
+ return nil, err
+ }
+ defer results.Close(ctx)
- results, err := configs.InternalTransactionByAddressCollection.Find(ctx, filter, opts)
- if err != nil {
- continue // Try next variant
+ for results.Next(ctx) {
+ var singleTransaction models.TraceResult
+ if err := results.Decode(&singleTransaction); err != nil {
+ continue
}
- for results.Next(ctx) {
- var singleTransaction models.TraceResult
- if err := results.Decode(&singleTransaction); err != nil {
- continue
- }
-
- from := hex.EncodeToString([]byte(singleTransaction.From))
-
- // Determine transaction direction based on matching from/to
- if strings.EqualFold(from, addressWithoutPrefix) {
- singleTransaction.InOut = 0 // Outgoing
- singleTransaction.Address = []byte(singleTransaction.To)
- } else {
- singleTransaction.InOut = 1 // Incoming
- singleTransaction.Address = []byte(singleTransaction.From)
- }
-
- // Check if this transaction is already in our list (to avoid duplicates)
- isDuplicate := false
- for _, tx := range transactions {
- // Use bytes.Equal to compare byte slices properly
- if bytes.Equal(tx.Hash, singleTransaction.Hash) {
- isDuplicate = true
- break
- }
- }
-
- if !isDuplicate {
- transactions = append(transactions, singleTransaction)
- }
+ // Determine transaction direction based on matching from/to
+ if strings.EqualFold(string(singleTransaction.From), normalizedAddress) {
+ singleTransaction.InOut = 0 // Outgoing
+ } else {
+ singleTransaction.InOut = 1 // Incoming
}
- results.Close(ctx)
- // If we found transactions, no need to try other case variants
- if len(transactions) > 0 {
- break
- }
+ transactions = append(transactions, singleTransaction)
}
return transactions, nil
@@ -167,21 +127,11 @@ func ReturnAllTransactionsByAddress(address string) ([]models.TransactionByAddre
var transactions []models.TransactionByAddress
- // Ensure address has Z prefix
- formattedAddress := address
- if !strings.HasPrefix(formattedAddress, "Z") {
- formattedAddress = "Z" + formattedAddress
- }
-
- // Use regex with case insensitivity for address matching
- // This will find addresses regardless of case
- fromRegex := primitive.Regex{Pattern: "^" + regexp.QuoteMeta(formattedAddress) + "$", Options: "i"}
- toRegex := primitive.Regex{Pattern: "^" + regexp.QuoteMeta(formattedAddress) + "$", Options: "i"}
-
- // Query for transactions where the address is either the sender or receiver
+ // Normalize to the canonical Z-prefix form stored by the syncer.
+ normalizedAddress := normalizeAddress(address)
filter := primitive.D{{Key: "$or", Value: []primitive.D{
- {{Key: "from", Value: fromRegex}},
- {{Key: "to", Value: toRegex}},
+ {{Key: "from", Value: normalizedAddress}},
+ {{Key: "to", Value: normalizedAddress}},
}}}
projection := primitive.D{
@@ -198,7 +148,8 @@ func ReturnAllTransactionsByAddress(address string) ([]models.TransactionByAddre
opts := options.Find().
SetProjection(projection).
- SetSort(primitive.D{{Key: "timeStamp", Value: -1}})
+ SetSort(primitive.D{{Key: "timeStamp", Value: -1}}).
+ SetLimit(200)
results, err := configs.TransactionByAddressCollection.Find(ctx, filter, opts)
if err != nil {
@@ -214,8 +165,7 @@ func ReturnAllTransactionsByAddress(address string) ([]models.TransactionByAddre
continue
}
- // Use case-insensitive comparison for determining transaction direction
- if strings.EqualFold(singleTransaction.From, formattedAddress) {
+ if strings.EqualFold(singleTransaction.From, normalizedAddress) {
singleTransaction.InOut = 0 // Outgoing
singleTransaction.Address = singleTransaction.To
} else {
@@ -227,9 +177,9 @@ func ReturnAllTransactionsByAddress(address string) ([]models.TransactionByAddre
}
if len(transactions) == 0 {
- fmt.Printf("No transactions found for address: %s\n", formattedAddress)
+ fmt.Printf("No transactions found for address: %s\n", normalizedAddress)
} else {
- fmt.Printf("Found %d transactions for address: %s\n", len(transactions), formattedAddress)
+ fmt.Printf("Found %d transactions for address: %s\n", len(transactions), normalizedAddress)
}
return transactions, nil
@@ -350,19 +300,12 @@ func CountTransactions(address string) (int, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
- // Ensure address has Z prefix
- formattedAddress := address
- if !strings.HasPrefix(formattedAddress, "Z") {
- formattedAddress = "Z" + formattedAddress
- }
-
- // Use regex with case insensitivity for address matching
- fromRegex := primitive.Regex{Pattern: "^" + regexp.QuoteMeta(formattedAddress) + "$", Options: "i"}
- toRegex := primitive.Regex{Pattern: "^" + regexp.QuoteMeta(formattedAddress) + "$", Options: "i"}
+ // Normalize to canonical Z-prefix — matches syncer write format.
+ normalizedAddress := normalizeAddress(address)
filter := primitive.D{{Key: "$or", Value: []primitive.D{
- {{Key: "from", Value: fromRegex}},
- {{Key: "to", Value: toRegex}},
+ {{Key: "from", Value: normalizedAddress}},
+ {{Key: "to", Value: normalizedAddress}},
}}}
count, err := configs.TransactionByAddressCollection.CountDocuments(ctx, filter)
@@ -559,23 +502,14 @@ func ReturnNonZeroTransactions(address string, page, limit int) ([]models.Transa
SetProjection(projection).
SetSort(primitive.D{{Key: "timeStamp", Value: -1}})
- // Format the address for query
- formattedAddress := address
- if !strings.HasPrefix(formattedAddress, "Z") {
- formattedAddress = "Z" + formattedAddress
- }
-
- // Use regex with case insensitivity for address matching
- fromRegex := primitive.Regex{Pattern: "^" + regexp.QuoteMeta(formattedAddress) + "$", Options: "i"}
- toRegex := primitive.Regex{Pattern: "^" + regexp.QuoteMeta(formattedAddress) + "$", Options: "i"}
-
- // Create a filter for both from and to with this address and non-zero amount
+ // Normalize to canonical Z-prefix form stored by the syncer.
+ normalizedAddress := normalizeAddress(address)
filter := bson.M{
"$and": []bson.M{
{
"$or": []bson.M{
- {"from": fromRegex},
- {"to": toRegex},
+ {"from": normalizedAddress},
+ {"to": normalizedAddress},
},
},
{"amount": bson.M{"$gt": 0}}, // Only return transactions with amount > 0
@@ -606,7 +540,7 @@ func ReturnNonZeroTransactions(address string, page, limit int) ([]models.Transa
}
// Set the inOut flag based on the address's relation to the transaction
- if strings.EqualFold(singleTransaction.From, formattedAddress) {
+ if strings.EqualFold(singleTransaction.From, normalizedAddress) {
singleTransaction.InOut = 0 // Outgoing
singleTransaction.Address = singleTransaction.To
} else {
diff --git a/backendAPI/db/validator.go b/backendAPI/db/validator.go
index 1d4ffe2..cda2c62 100644
--- a/backendAPI/db/validator.go
+++ b/backendAPI/db/validator.go
@@ -19,62 +19,62 @@ const (
SecondsPerSlot = 60
)
+// ReturnValidators returns all validators with computed status and totals.
+// It queries the per-document validators collection directly instead of loading
+// a single mega-document and iterating in Go.
func ReturnValidators(pageToken string) (*models.ValidatorResponse, error) {
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
- // Get the validator document
- var storage models.ValidatorStorage
- err := configs.ValidatorsCollections.FindOne(ctx, bson.M{"_id": "validators"}).Decode(&storage)
+ // Get current epoch from latest block once; reuse for all validators.
+ latestBlock, err := GetLatestBlockFromSyncState()
+ if err != nil {
+ return nil, fmt.Errorf("failed to get latest block: %v", err)
+ }
+ currentEpoch := HexToInt(latestBlock) / 128
+
+ findOpts := options.Find().SetSort(bson.D{{Key: "_id", Value: 1}})
+
+ cursor, err := configs.ValidatorsCollections.Find(ctx, bson.M{}, findOpts)
if err != nil {
if err == mongo.ErrNoDocuments {
- // Return empty response if no validators found
return &models.ValidatorResponse{
Validators: make([]models.Validator, 0),
TotalStaked: "0",
}, nil
}
- return nil, fmt.Errorf("failed to get validator document: %v", err)
+ return nil, fmt.Errorf("failed to query validators: %v", err)
}
+ defer cursor.Close(ctx)
- // Get current epoch from latest block
- latestBlock, err := GetLatestBlockFromSyncState()
- if err != nil {
- return nil, fmt.Errorf("failed to get latest block: %v", err)
+ var docs []models.ValidatorDocument
+ if err := cursor.All(ctx, &docs); err != nil {
+ return nil, fmt.Errorf("failed to decode validators: %v", err)
}
- // Convert hex block number to int for epoch calculation
- currentEpoch := HexToInt(latestBlock) / 128 // Each epoch is 128 blocks
-
- // Process validators
- validators := make([]models.Validator, 0)
+ validators := make([]models.Validator, 0, len(docs))
totalStaked := int64(0)
- for _, v := range storage.Validators {
- // Calculate status
- status := getValidatorStatus(v.ActivationEpoch, v.ExitEpoch, v.Slashed, currentEpoch)
+ for _, d := range docs {
+ status := getValidatorStatus(d.ActivationEpoch, d.ExitEpoch, d.Slashed, currentEpoch)
isActive := status == "active"
- // Calculate age in epochs
- activationEpoch := parseEpoch(v.ActivationEpoch)
+ activationEpoch := parseEpoch(d.ActivationEpoch)
age := int64(0)
if activationEpoch <= currentEpoch {
age = currentEpoch - activationEpoch
}
- // Add validator to response
- validator := models.Validator{
- Index: v.Index,
- Address: v.PublicKeyHex,
+ validators = append(validators, models.Validator{
+ Index: d.ID,
+ Address: d.PublicKeyHex,
Status: status,
Age: age,
- StakedAmount: v.EffectiveBalance,
+ StakedAmount: d.EffectiveBalance,
IsActive: isActive,
- }
- validators = append(validators, validator)
+ })
- // Add to total staked (parse as decimal since syncer stores decimal)
- if balance, err := strconv.ParseInt(v.EffectiveBalance, 10, 64); err == nil {
+ if balance, err := strconv.ParseInt(d.EffectiveBalance, 10, 64); err == nil {
totalStaked += balance
}
}
@@ -85,54 +85,43 @@ func ReturnValidators(pageToken string) (*models.ValidatorResponse, error) {
}, nil
}
-// CountValidators returns the total number of validators
+// CountValidators returns the total number of validator documents in the collection.
func CountValidators() (int64, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
- var storage models.ValidatorStorage
- err := configs.ValidatorsCollections.FindOne(ctx, bson.M{"_id": "validators"}).Decode(&storage)
+ count, err := configs.ValidatorsCollections.CountDocuments(ctx, bson.M{})
if err != nil {
- if err == mongo.ErrNoDocuments {
- return 0, nil
- }
- return 0, fmt.Errorf("failed to get validator document: %v", err)
+ return 0, fmt.Errorf("failed to count validators: %v", err)
}
-
- return int64(len(storage.Validators)), nil
+ return count, nil
}
-// Helper function to convert hex string to int64
+// Helper function to convert hex string to int64.
func HexToInt(hex string) int64 {
- // Remove "0x" prefix if present
if len(hex) > 2 && hex[0:2] == "0x" {
hex = hex[2:]
}
-
- // Parse hex string
var result int64
fmt.Sscanf(hex, "%x", &result)
return result
}
-// parseEpoch parses epoch string (handles both hex and decimal formats)
-// FAR_FUTURE_EPOCH represents a validator that hasn't exited
+// FAR_FUTURE_EPOCH represents a validator that hasn't exited.
const FAR_FUTURE_EPOCH = "18446744073709551615"
+// parseEpoch parses an epoch string (handles decimal format and FAR_FUTURE_EPOCH).
func parseEpoch(epochStr string) int64 {
- // FAR_FUTURE_EPOCH is special - return max int64 to indicate "never"
if epochStr == FAR_FUTURE_EPOCH {
return math.MaxInt64
}
- // Try decimal first
if epoch, err := strconv.ParseInt(epochStr, 10, 64); err == nil {
return epoch
}
- // Try hex
return HexToInt(epochStr)
}
-// getValidatorStatus computes the validator status based on current epoch
+// getValidatorStatus computes the validator status based on current epoch.
func getValidatorStatus(activationEpoch, exitEpoch string, slashed bool, currentEpoch int64) string {
activation := parseEpoch(activationEpoch)
exit := parseEpoch(exitEpoch)
@@ -149,7 +138,7 @@ func getValidatorStatus(activationEpoch, exitEpoch string, slashed bool, current
return "active"
}
-// GetEpochInfo retrieves the current epoch information
+// GetEpochInfo retrieves the current epoch information.
func GetEpochInfo() (*models.EpochInfoResponse, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
@@ -163,7 +152,6 @@ func GetEpochInfo() (*models.EpochInfoResponse, error) {
return nil, fmt.Errorf("failed to get epoch info: %v", err)
}
- // Calculate slot within epoch and time to next epoch
headSlot := parseEpoch(epochInfo.HeadSlot)
slotInEpoch := headSlot % SlotsPerEpoch
slotsRemaining := SlotsPerEpoch - slotInEpoch
@@ -182,7 +170,7 @@ func GetEpochInfo() (*models.EpochInfoResponse, error) {
}, nil
}
-// GetValidatorHistory retrieves historical validator data
+// GetValidatorHistory retrieves historical validator data.
func GetValidatorHistory(limit int) (*models.ValidatorHistoryResponse, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
@@ -208,101 +196,179 @@ func GetValidatorHistory(limit int) (*models.ValidatorHistoryResponse, error) {
}, nil
}
-// GetValidatorByID retrieves a validator by index or public key
+// GetValidatorByID retrieves a validator by index (decimal string) or public key hex.
+// Uses a direct document lookup instead of loading all validators into memory.
func GetValidatorByID(id string) (*models.ValidatorDetailResponse, error) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
- var storage models.ValidatorStorage
- err := configs.ValidatorsCollections.FindOne(ctx, bson.M{"_id": "validators"}).Decode(&storage)
- if err != nil {
- if err == mongo.ErrNoDocuments {
- return nil, fmt.Errorf("validators not found")
- }
- return nil, fmt.Errorf("failed to get validators: %v", err)
- }
-
- // Get current epoch
latestBlock, err := GetLatestBlockFromSyncState()
if err != nil {
return nil, fmt.Errorf("failed to get latest block: %v", err)
}
currentEpoch := HexToInt(latestBlock) / 128
- // Find validator by index or public key
- for _, v := range storage.Validators {
- if v.Index == id || v.PublicKeyHex == id {
- status := getValidatorStatus(v.ActivationEpoch, v.ExitEpoch, v.Slashed, currentEpoch)
- activationEpoch := parseEpoch(v.ActivationEpoch)
- age := int64(0)
- if activationEpoch <= currentEpoch {
- age = currentEpoch - activationEpoch
- }
-
- return &models.ValidatorDetailResponse{
- Index: v.Index,
- PublicKeyHex: v.PublicKeyHex,
- WithdrawalCredentialsHex: v.WithdrawalCredentialsHex,
- EffectiveBalance: v.EffectiveBalance,
- Slashed: v.Slashed,
- ActivationEligibilityEpoch: v.ActivationEligibilityEpoch,
- ActivationEpoch: v.ActivationEpoch,
- ExitEpoch: v.ExitEpoch,
- WithdrawableEpoch: v.WithdrawableEpoch,
- Status: status,
- Age: age,
- CurrentEpoch: fmt.Sprintf("%d", currentEpoch),
- }, nil
+ // Try lookup by _id (index) first, then fall back to publicKeyHex.
+ filter := bson.M{"$or": []bson.M{
+ {"_id": id},
+ {"publicKeyHex": id},
+ }}
+
+ var doc models.ValidatorDocument
+ err = configs.ValidatorsCollections.FindOne(ctx, filter).Decode(&doc)
+ if err != nil {
+ if err == mongo.ErrNoDocuments {
+ return nil, fmt.Errorf("validator not found")
}
+ return nil, fmt.Errorf("failed to get validator: %v", err)
}
- return nil, fmt.Errorf("validator not found")
+ status := getValidatorStatus(doc.ActivationEpoch, doc.ExitEpoch, doc.Slashed, currentEpoch)
+ activationEpoch := parseEpoch(doc.ActivationEpoch)
+ age := int64(0)
+ if activationEpoch <= currentEpoch {
+ age = currentEpoch - activationEpoch
+ }
+
+ return &models.ValidatorDetailResponse{
+ Index: doc.ID,
+ PublicKeyHex: doc.PublicKeyHex,
+ WithdrawalCredentialsHex: doc.WithdrawalCredentialsHex,
+ EffectiveBalance: doc.EffectiveBalance,
+ Slashed: doc.Slashed,
+ ActivationEligibilityEpoch: doc.ActivationEligibilityEpoch,
+ ActivationEpoch: doc.ActivationEpoch,
+ ExitEpoch: doc.ExitEpoch,
+ WithdrawableEpoch: doc.WithdrawableEpoch,
+ Status: status,
+ Age: age,
+ CurrentEpoch: fmt.Sprintf("%d", currentEpoch),
+ }, nil
}
-// GetValidatorStats returns aggregated validator statistics
+// GetValidatorStats returns aggregated validator statistics using a MongoDB aggregation
+// pipeline instead of loading all validators into Go memory.
func GetValidatorStats() (*models.ValidatorStatsResponse, error) {
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
- var storage models.ValidatorStorage
- err := configs.ValidatorsCollections.FindOne(ctx, bson.M{"_id": "validators"}).Decode(&storage)
- if err != nil {
- if err == mongo.ErrNoDocuments {
- return &models.ValidatorStatsResponse{}, nil
- }
- return nil, fmt.Errorf("failed to get validators: %v", err)
- }
-
- // Get current epoch
latestBlock, err := GetLatestBlockFromSyncState()
if err != nil {
return nil, fmt.Errorf("failed to get latest block: %v", err)
}
currentEpoch := HexToInt(latestBlock) / 128
- var activeCount, pendingCount, exitedCount, slashedCount int
- totalStaked := int64(0)
+ // Check whether the collection has any documents at all.
+ totalCount, err := configs.ValidatorsCollections.CountDocuments(ctx, bson.M{})
+ if err != nil {
+ return nil, fmt.Errorf("failed to count validators: %v", err)
+ }
+ if totalCount == 0 {
+ return &models.ValidatorStatsResponse{
+ CurrentEpoch: fmt.Sprintf("%d", currentEpoch),
+ }, nil
+ }
- for _, v := range storage.Validators {
- status := getValidatorStatus(v.ActivationEpoch, v.ExitEpoch, v.Slashed, currentEpoch)
- switch status {
+ // Use aggregation to compute per-status counts and total staked in one pass.
+ // Status computation requires knowing currentEpoch, which MongoDB doesn't know,
+ // so we project the fields needed and compute buckets in a $group stage using
+ // $cond expressions that mirror getValidatorStatus logic.
+ currentEpochStr := fmt.Sprintf("%d", currentEpoch)
+
+ pipeline := mongo.Pipeline{
+ // Add a computed "status" field using the same rules as getValidatorStatus.
+ bson.D{{Key: "$addFields", Value: bson.M{
+ "_computedStatus": bson.M{
+ "$switch": bson.M{
+ "branches": []bson.M{
+ {
+ // slashed
+ "case": bson.M{"$eq": []interface{}{"$slashed", true}},
+ "then": "slashed",
+ },
+ {
+ // pending: activationEpoch > currentEpoch
+ "case": bson.M{"$gt": []interface{}{"$activationEpoch", currentEpochStr}},
+ "then": "pending",
+ },
+ {
+ // exited: exitEpoch <= currentEpoch AND exitEpoch != FAR_FUTURE_EPOCH
+ "case": bson.M{"$and": []bson.M{
+ {"$lte": []interface{}{"$exitEpoch", currentEpochStr}},
+ {"$ne": []interface{}{"$exitEpoch", FAR_FUTURE_EPOCH}},
+ }},
+ "then": "exited",
+ },
+ },
+ "default": "active",
+ },
+ },
+ }}},
+ bson.D{{Key: "$group", Value: bson.M{
+ "_id": "$_computedStatus",
+ "count": bson.M{"$sum": 1},
+ // We sum effective balance as strings; MongoDB can't do numeric sum on
+ // decimal-string fields, so we fall back to a cursor scan for totalStaked.
+ }}},
+ }
+
+ cursor, err := configs.ValidatorsCollections.Aggregate(ctx, pipeline)
+ if err != nil {
+ return nil, fmt.Errorf("failed to aggregate validator stats: %v", err)
+ }
+ defer cursor.Close(ctx)
+
+ var activeCount, pendingCount, exitedCount, slashedCount int
+ for cursor.Next(ctx) {
+ var row struct {
+ ID string `bson:"_id"`
+ Count int `bson:"count"`
+ }
+ if err := cursor.Decode(&row); err != nil {
+ continue
+ }
+ switch row.ID {
case "active":
- activeCount++
+ activeCount = row.Count
case "pending":
- pendingCount++
+ pendingCount = row.Count
case "exited":
- exitedCount++
+ exitedCount = row.Count
case "slashed":
- slashedCount++
+ slashedCount = row.Count
}
+ }
+ if err := cursor.Err(); err != nil {
+ return nil, fmt.Errorf("cursor error while reading validator stats: %v", err)
+ }
- if balance, err := strconv.ParseInt(v.EffectiveBalance, 10, 64); err == nil {
- totalStaked += balance
+ // Compute total staked via a second aggregation (sum of effectiveBalance).
+ // MongoDB $sum works on numeric types; balances are stored as decimal strings,
+ // so we convert with $toLong inside the pipeline.
+ sumPipeline := mongo.Pipeline{
+ bson.D{{Key: "$group", Value: bson.M{
+ "_id": nil,
+ "totalStaked": bson.M{"$sum": bson.M{"$toLong": "$effectiveBalance"}},
+ }}},
+ }
+ sumCursor, err := configs.ValidatorsCollections.Aggregate(ctx, sumPipeline)
+ if err != nil {
+ return nil, fmt.Errorf("failed to aggregate total staked: %v", err)
+ }
+ defer sumCursor.Close(ctx)
+
+ totalStaked := int64(0)
+ if sumCursor.Next(ctx) {
+ var sumRow struct {
+ TotalStaked int64 `bson:"totalStaked"`
+ }
+ if err := sumCursor.Decode(&sumRow); err == nil {
+ totalStaked = sumRow.TotalStaked
}
}
return &models.ValidatorStatsResponse{
- TotalValidators: len(storage.Validators),
+ TotalValidators: int(totalCount),
ActiveCount: activeCount,
PendingCount: pendingCount,
ExitedCount: exitedCount,
diff --git a/backendAPI/handler/handler.go b/backendAPI/handler/handler.go
index ae0d9d2..8621450 100644
--- a/backendAPI/handler/handler.go
+++ b/backendAPI/handler/handler.go
@@ -61,7 +61,7 @@ func RequestHandler() {
AllowMethods: []string{"GET", "POST"},
AllowHeaders: []string{"Origin", "Content-Length", "Content-Type", "Authorization"},
ExposeHeaders: []string{"Content-Length"},
- AllowCredentials: true,
+ AllowCredentials: false, // Must be false when AllowOrigins is wildcard (CORS spec)
MaxAge: 12 * time.Hour,
}))
log.Println("CORS configuration initialized successfully")
diff --git a/backendAPI/models/validator.go b/backendAPI/models/validator.go
index 618da1d..ff387f8 100644
--- a/backendAPI/models/validator.go
+++ b/backendAPI/models/validator.go
@@ -1,6 +1,8 @@
package models
-// ValidatorStorage represents the validator document in MongoDB
+// ValidatorStorage represents the legacy single mega-document in MongoDB.
+// Kept for backward-compatibility with any remaining code paths; new writes
+// use ValidatorDocument (one document per validator) instead.
type ValidatorStorage struct {
ID string `bson:"_id" json:"_id"`
Epoch string `bson:"epoch" json:"epoch"` // Stored as hex
@@ -8,6 +10,24 @@ type ValidatorStorage struct {
UpdatedAt string `bson:"updatedAt" json:"updatedAt"` // Timestamp in hex
}
+// ValidatorDocument is the per-validator MongoDB document written by the syncer.
+// _id is the validator index (decimal string).
+type ValidatorDocument struct {
+ ID string `bson:"_id" json:"_id"`
+ PublicKeyHex string `bson:"publicKeyHex" json:"publicKeyHex"`
+ WithdrawalCredentialsHex string `bson:"withdrawalCredentialsHex" json:"withdrawalCredentialsHex"`
+ EffectiveBalance string `bson:"effectiveBalance" json:"effectiveBalance"`
+ Slashed bool `bson:"slashed" json:"slashed"`
+ ActivationEligibilityEpoch string `bson:"activationEligibilityEpoch" json:"activationEligibilityEpoch"`
+ ActivationEpoch string `bson:"activationEpoch" json:"activationEpoch"`
+ ExitEpoch string `bson:"exitEpoch" json:"exitEpoch"`
+ WithdrawableEpoch string `bson:"withdrawableEpoch" json:"withdrawableEpoch"`
+ SlotNumber string `bson:"slotNumber" json:"slotNumber"`
+ IsLeader bool `bson:"isLeader" json:"isLeader"`
+ Epoch string `bson:"epoch" json:"epoch"`
+ UpdatedAt string `bson:"updatedAt" json:"updatedAt"`
+}
+
// ValidatorRecord represents a single validator's data
type ValidatorRecord struct {
Index string `bson:"index" json:"index"` // Stored as hex
diff --git a/backendAPI/routes/routes.go b/backendAPI/routes/routes.go
index 0ed3619..238f1bb 100644
--- a/backendAPI/routes/routes.go
+++ b/backendAPI/routes/routes.go
@@ -25,6 +25,9 @@ func UserRoute(router *gin.Engine) {
// Parse pagination parameters
page, _ := strconv.Atoi(c.DefaultQuery("page", "1"))
limit, _ := strconv.Atoi(c.DefaultQuery("limit", "10"))
+ if limit > 100 {
+ limit = 100
+ }
result, err := db.GetPendingTransactions(page, limit)
if err != nil {
@@ -283,11 +286,7 @@ func UserRoute(router *gin.Engine) {
router.GET("/address/aggregate/:query", func(c *gin.Context) {
param := c.Param("query")
-
- // Normalize address: convert lowercase z prefix to uppercase Z
- if strings.HasPrefix(param, "z") && !strings.HasPrefix(param, "z0") {
- param = "Z" + param[1:]
- }
+ // db functions normalize the address to canonical lowercase z-prefix internally.
// Single Address data
addressData, err := db.ReturnSingleAddress(param)
@@ -546,6 +545,9 @@ func UserRoute(router *gin.Engine) {
if l, err := strconv.Atoi(limitStr); err == nil && l > 0 {
limit = l
}
+ if limit > 100 {
+ limit = 100
+ }
history, err := db.GetValidatorHistory(limit)
if err != nil {
@@ -594,6 +596,9 @@ func UserRoute(router *gin.Engine) {
// Parse pagination parameters
page, _ := strconv.ParseInt(c.DefaultQuery("page", "0"), 10, 64)
limit, _ := strconv.ParseInt(c.DefaultQuery("limit", "10"), 10, 64)
+ if limit > 100 {
+ limit = 100
+ }
search := c.Query("search")
// Parse isToken filter (optional)
@@ -611,15 +616,8 @@ func UserRoute(router *gin.Engine) {
return
}
- // Normalize addresses: ensure Z prefix is uppercase for display
- for i := range query {
- if strings.HasPrefix(query[i].ContractAddress, "z") {
- query[i].ContractAddress = "Z" + query[i].ContractAddress[1:]
- }
- if strings.HasPrefix(query[i].ContractCreatorAddress, "z") {
- query[i].ContractCreatorAddress = "Z" + query[i].ContractCreatorAddress[1:]
- }
- }
+ // Addresses are stored and returned as lowercase z-prefix (canonical form).
+ // No presentation-layer conversion needed.
c.JSON(http.StatusOK, gin.H{
"response": query,
@@ -627,6 +625,9 @@ func UserRoute(router *gin.Engine) {
})
})
+ // NOTE: /debug/blocks exposes internal sync state. In production this endpoint
+ // MUST be placed behind authentication middleware or removed entirely to prevent
+ // information disclosure to unauthenticated callers.
router.GET("/debug/blocks", func(c *gin.Context) {
count, err := db.CountBlocksNetwork()
if err != nil {
@@ -702,6 +703,9 @@ func UserRoute(router *gin.Engine) {
address := c.Param("address")
page, _ := strconv.Atoi(c.DefaultQuery("page", "1"))
limit, _ := strconv.Atoi(c.DefaultQuery("limit", "5")) // Default to 5
+ if limit > 100 {
+ limit = 100
+ }
transactions, err := db.ReturnNonZeroTransactions(address, page, limit)
if err != nil {