diff --git a/.devcontainer/Dockerfile.dev b/.devcontainer/Dockerfile.dev
index 8f7c104f..f45f2ff8 100644
--- a/.devcontainer/Dockerfile.dev
+++ b/.devcontainer/Dockerfile.dev
@@ -8,7 +8,7 @@ RUN apt-get update && apt-get upgrade -y
RUN apt-get install -y build-essential cmake tmux clang-tidy autoconf libtool pkg-config libabsl-dev libboost-all-dev libc-ares-dev libcrypto++-dev libgrpc-dev libgrpc++-dev librocksdb-dev libscrypt-dev libsnappy-dev libssl-dev zlib1g-dev openssl protobuf-compiler protobuf-compiler-grpc nano vim unison git gdb ninja-build
# Set the working directory in the Docker container
-WORKDIR /orbitersdk-cpp
+WORKDIR /bdk-cpp
# Copy Unison configuration file
COPY sync.prf /root/.unison/sync.prf
\ No newline at end of file
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 541a19a2..e28f3de5 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -1,5 +1,5 @@
{
- "name": "OrbiterSDK C++ Dev Container",
+ "name": "BDK C++ Dev Container",
"build": {
"dockerfile": "Dockerfile.dev"
},
@@ -8,11 +8,11 @@
"terminal.integrated.shell.linux": "/bin/bash"
},
"mounts": [
- "source=${localWorkspaceFolder},target=/orbitersdk-cpp,type=bind,consistency=cached"
+ "source=${localWorkspaceFolder},target=/bdk-cpp,type=bind,consistency=cached"
],
"runArgs": ["-it", "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"],
"extensions": ["ms-vscode.cpptools", "ms-vscode.cmake-tools"],
- "postCreateCommand": "mkdir /orbitersdk-data && nohup unison -repeat 1 /orbitersdk-cpp /orbitersdk-data -auto -batch \
+ "postCreateCommand": "mkdir /bdk-data && nohup unison -repeat 1 /bdk-cpp /bdk-data -auto -batch \
-ignore 'Name {build}' \
-ignore 'Name {build_local_testnet}' \
-ignore 'Name {.vscode}' \
@@ -38,5 +38,5 @@
-ignore 'Name {kateproject}' \
-ignore 'Name {*.o}' \
-ignore 'Name {*.gch}' \
- > /dev/null 2>&1 && cp -r /orbitersdk-cpp/* /orbitersdk-data"
+ > /dev/null 2>&1 && cp -r /bdk-cpp/* /bdk-data"
}
diff --git a/.devcontainer/sync.prf b/.devcontainer/sync.prf
index c02b36b1..f1da84ef 100644
--- a/.devcontainer/sync.prf
+++ b/.devcontainer/sync.prf
@@ -1,6 +1,6 @@
# Unison synchronization profile
-root = /orbitersdk-cpp
-root = /orbitersdk-data
+root = /bdk-cpp
+root = /bdk-data
# Specify synchronization options
auto = true
diff --git a/.dockerignore b/.dockerignore
index b6a7208b..5da5368f 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,4 +1,3 @@
-build/
*.o
*.gch
proto/metrics.pb.cc
@@ -18,6 +17,7 @@ depends/x86_64-pc-linux-gnu/
scripts/AIO-setup.log
compile_commands.json
.cache/
+.backup/
#IDEA's IDE
.idea/
@@ -27,6 +27,9 @@ cmake-build-release/
# Files generated by CMake
src/utils/options.h
+# build directory
+build/
+
# Files generated by automated scripts
local_testnet/
-build_local_testnet/
\ No newline at end of file
+build_local_testnet/
diff --git a/.github/workflows/PULL_REQUEST_TEMPLATE.md b/.github/workflows/PULL_REQUEST_TEMPLATE.md
index 99f3c736..a1819943 100644
--- a/.github/workflows/PULL_REQUEST_TEMPLATE.md
+++ b/.github/workflows/PULL_REQUEST_TEMPLATE.md
@@ -40,7 +40,7 @@ https://docs.github.com/en/free-pro-team@latest/github/managing-your-work-on-git
## Added to documentation?
- [ ] 📜 README.md
-- [ ] 📓 [Sparq Docs](https://github.com/SparqNet/sparq-docs)
+- [ ] 📓 [Sparq Docs](https://github.com/AppLayer/sparq-docs)
- [ ] 🙅 no documentation needed
## [optional] Are there any post-deployment tasks we need to perform?
diff --git a/.github/workflows/c-cpp.yml b/.github/workflows/c-cpp.yml
index 11b72672..3fb8fd31 100644
--- a/.github/workflows/c-cpp.yml
+++ b/.github/workflows/c-cpp.yml
@@ -11,96 +11,69 @@ on:
- development
jobs:
- setup:
- runs-on: ubuntu-latest
-
- container:
- image: debian:bookworm
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name: Update apt-get
- run: apt-get update
-
- - name: Install project dependencies
- run: DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential cmake tmux clang-tidy autoconf libtool pkg-config libabsl-dev libboost-all-dev libc-ares-dev libcrypto++-dev libgrpc-dev libgrpc++-dev librocksdb-dev libscrypt-dev libsnappy-dev libssl-dev zlib1g-dev openssl protobuf-compiler protobuf-compiler-grpc libprotobuf-dev git doxygen curl unzip
-
- - name: Print GCC version
- run: gcc --version
-
- - name: Install CA certificates
- run: apt-get install -y ca-certificates
-
build_test_and_analyse:
- needs: setup
- runs-on: ubuntu-latest
-
- container:
- image: debian:bookworm
+ runs-on: [self-hosted, linux, x64]
env:
# https://docs.sonarqube.org/latest/analysis/scan/sonarscanner/
- BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed
+ # Directory where build-wrapper output will be placed
+ BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory
+ SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
+ SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }}
+ TARGET_DIR: "/home/actions/actions-runner/_work/bdk-cpp/bdk-cpp"
+ BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name: Update apt-get
- run: apt-get update
-
- - name: Install project dependencies
- run: DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential cmake tmux clang-tidy autoconf libtool pkg-config libabsl-dev libboost-all-dev libc-ares-dev libcrypto++-dev libgrpc-dev libgrpc++-dev librocksdb-dev libscrypt-dev libsnappy-dev libssl-dev zlib1g-dev openssl protobuf-compiler protobuf-compiler-grpc libprotobuf-dev git curl unzip gcovr
-
- - name: Install sonar-scanner and build-wrapper
- uses: SonarSource/sonarcloud-github-c-cpp@v2
-
- - name: Configure CMake
- run: cmake -S . -B build -DSONARQUBE_ANALYSIS=ON -DDEBUG=OFF
-
+ - name: Clone the repository
+ run: git clone https://github.com/${{github.repository}} ${{env.TARGET_DIR}} || true
+ - name: Sync with remote repository
+ run: git -C ${{env.TARGET_DIR}} fetch
+ - name: Checkout to current branch
+ run: git -C ${{env.TARGET_DIR}} checkout ${{env.BRANCH_NAME}}
+ - name: Set user.email
+ run: git -C ${{env.TARGET_DIR}} config --global user.email "github-actions[bot]@users.noreply.github.com"
+ - name: Set user.name
+ run: git -C ${{env.TARGET_DIR}} config --global user.name "github-actions"
+ - name: Update local repository
+ run: git -C ${{env.TARGET_DIR}} pull
+ - name: Build the container
+ run: ./scripts/auto.sh -s bdk build
+ - name: Stop the container
+ run: ./scripts/auto.sh -s bdk stop
+ - name: Restart the container
+ run: ./scripts/auto.sh -s bdk up
+ - name: Clean previous build (if there is one)
+ run: ./scripts/auto.sh -s bdk exec 'make -C build clean' || true
+ - name: Configure MOLD linker
+ run: ./scripts/auto.sh -s bdk exec 'cmake -S . -B build
+ -DSONARQUBE_ANALYSIS=ON -DDEBUG=OFF
+ -DCMAKE_EXE_LINKER_FLAGS=\"-fuse-ld=mold\"
+ -DCMAKE_SHARED_LINKER_FLAGS=\"-fuse-ld=mold\"'
- name: Build with SonarQube BuildWrapper + CMake
- run: build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build build/ --config Release -- -j $(nproc)
-
- - name: Give execute permissions
- run: chmod +x ./build/orbitersdkd-tests
-
- - name: Run Catch2 Tests
- run: ./build/orbitersdkd-tests -d yes
-
+ run: ./scripts/auto.sh -s bdk exec \
+ 'build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }}
+ cmake --build build --config Release -- -j $(nproc)'
+ - name: Run Tests
+ run: ./scripts/auto.sh -s bdk exec \
+ './build/src/bins/bdkd-tests/bdkd-tests -d yes'
+ - name: Delete coverage XML report (if any)
+ run: ./scripts/auto.sh -s bdk exec 'rm coverage.xml || true'
- name: Collect coverage into one XML report
- run: |
- gcovr --gcov-ignore-parse-errors --sonarqube > coverage.xml
-
+ run: ./scripts/auto.sh -s bdk exec \
+ 'gcovr -d --gcov-ignore-parse-errors --exclude-throw-branches --sonarqube -o coverage.xml'
- name: Run SonarQube Scanner
- env:
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }}
- run: |
- sonar-scanner --define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" --define sonar.coverageReportPaths=coverage.xml
+ run: ./scripts/auto.sh -s bdk exec \
+ 'env SONAR_TOKEN=${{ env.SONAR_TOKEN }}
+ SONAR_HOST_URL=${{ env.SONAR_HOST_URL }}
+ sonar-scanner
+ --define sonar.cfamily.build-wrapper-output=${{ env.BUILD_WRAPPER_OUT_DIR }}
+ --define sonar.coverageReportPaths=coverage.xml'
documentation:
+ runs-on: [self-hosted, linux, x64]
needs: build_test_and_analyse
- runs-on: ubuntu-latest
-
- container:
- image: debian:bookworm
-
steps:
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name : Update apt-get
- run: apt-get update
-
- - name : Install Doxygen
- run: apt-get install -y doxygen
-
- name: Generate Doxygen Documentation
- run: |
- mkdir docs
- doxygen Doxyfile
-
+ run: ./scripts/auto.sh -s bdk exec 'doxygen Doxyfile'
- name: Publish Documentation
uses: actions/upload-artifact@v4
with:
name: Documentation
- path: docs
\ No newline at end of file
+ path: docs
diff --git a/.gitignore b/.gitignore
index c3c34df9..f8858807 100644
--- a/.gitignore
+++ b/.gitignore
@@ -33,6 +33,11 @@ compile_commands.json
cmake-build-debug/
cmake-build-release/
+# emacs
+TAGS
+.dir-locals.el
+.backup
+
# VILARINHO's ignored files
liner.sh
prepare-lib.sh
@@ -48,4 +53,5 @@ build_local_testnet/
# Files generated by Doxygen
docs/html/
+# Documentation
sparq-docs
diff --git a/.kateproject b/.kateproject
index 79fd0844..02cbdb86 100644
--- a/.kateproject
+++ b/.kateproject
@@ -1,4 +1,4 @@
{
- "name": "orbitersdk",
+ "name": "bdk",
"files": [ { "git": 1 } ]
}
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 175f68b4..dc0adf93 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -16,24 +16,39 @@ if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.24.0")
cmake_policy(SET CMP0135 NEW)
endif()
+# TODO: avoid FindBoost deprecation message in CMake 3.30+ (cmake --help-policy CMP0167)
+
# Project data
-project(orbitersdk VERSION 0.2.0 DESCRIPTION "Sparq subnet")
+project(bdk VERSION 0.2.0 DESCRIPTION "AppLayer Blockchain Development Kit")
set(CMAKE_CXX_STANDARD 23)
set(CMAKE_CXX_STANDARD_REQUIRED TRUE)
set(CMAKE_CXX_EXTENSIONS OFF)
SET(DEBUG ON CACHE BOOL "Debug mode")
+set(CMAKE_POSITION_INDEPENDENT_CODE ON)
+set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") # Always look for static libraries - "ZLIB_USE_STATIC_LIBS" was added in 3.24
+set(CMAKE_EXPORT_COMPILE_COMMANDS ON) # For clang-tidy
+
+# Set compiler flags
+# TODO: -Wno-c++26-extensions is included because zpp_libs uses name-independent declarations (vars named "_").
+# This should be resolved at a later date, but was turned off for now because it doesn't affect us
if(DEBUG)
- set(CMAKE_CXX_FLAGS "-O0 -g -fsanitize=address -fno-inline -fno-eliminate-unused-debug-types -fstack-protector -Werror=unused-variable") # Provides faster compile time.
+ set(CMAKE_CXX_FLAGS "-O0 -g -Wno-c++26-extensions -fsanitize=address -fno-inline -fno-eliminate-unused-debug-types -fstack-protector") # Provides faster compile time.
elseif(SONARQUBE_ANALYSIS)
- set(CMAKE_CXX_FLAGS "-O0 -g --coverage")
+ set(CMAKE_CXX_FLAGS "-O0 -g -Wno-c++26-extensions --coverage")
else()
- set(CMAKE_CXX_FLAGS "-O2 -Werror=unused-variable")
+ set(CMAKE_CXX_FLAGS "-O2 -Wno-c++26-extensions -Werror=unused-variable")
+endif()
+find_program(MOLD "mold") # Use mold by default if it is installed
+if(MOLD)
+ message(STATUS "Using mold as linker")
+ set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=mold")
+ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=mold")
endif()
-set(CMAKE_POSITION_INDEPENDENT_CODE ON)
-set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") # Always look for static libraries - "ZLIB_USE_STATIC_LIBS" was added in 3.24
-set(CMAKE_EXPORT_COMPILE_COMMANDS ON) # For clang-tidy
-# Set project version inside the code
+# Set project version inside the code (forcefully so changes in the .in file are always reflected correctly to the compiler)
+# if (EXISTS ${CMAKE_SOURCE_DIR}/src/utils/options.h)
+# file(REMOVE ${CMAKE_SOURCE_DIR}/src/utils/options.h)
+# endif()
configure_file(
${CMAKE_SOURCE_DIR}/src/utils/options.h.in
${CMAKE_SOURCE_DIR}/src/utils/options.h
@@ -43,13 +58,20 @@ configure_file(
# External project data
set(BUILD_TESTS ON CACHE BOOL "Build helper unit testing program")
set(BUILD_DISCOVERY ON CACHE BOOL "Build helper discovery node program")
-set(BUILD_AVALANCHEGO OFF CACHE BOOL "Build with AvalancheGo wrapping")
set(BUILD_TOOLS OFF CACHE BOOL "Build tools related to subnet")
+set(BUILD_TESTNET OFF CACHE BOOL "Build the project for testnet")
+set(BUILD_BENCHMARK OFF CACHE BOOL "Build with the benchmark tests")
+set(BUILD_BTVSERVER OFF CACHE BOOL "Build the BTV (Build the Void) websocket server")
+set(BUILD_VARIABLES_TESTS ON CACHE BOOL "Build tests for SafeVar (Contract variables)")
set(USE_LINT OFF CACHE BOOL "Run linter on compile (clang-tidy)")
if(USE_LINT)
set(CMAKE_CXX_CLANG_TIDY "clang-tidy;-header-filter=.;-checks=-*,abseil-*,boost-*,bugprone-*,cert-*,clang-analyzer-*,concurrency-*,cppcoreguidelines-*,hicpp-*,misc-*,modernize-*,performance-*,portability-*,readability-*")
endif()
+if(BUILD_TESTNET)
+ add_definitions(-DBUILD_TESTNET)
+endif()
+
# Echo CMake vars during config
message(STATUS "C++ standard: ${CMAKE_CXX_STANDARD}")
message(STATUS "C++ standard is required: ${CMAKE_CXX_STANDARD_REQUIRED}")
@@ -59,11 +81,13 @@ message(STATUS "Using PIC: ${CMAKE_POSITION_INDEPENDENT_CODE}")
message(STATUS "Find libs with suffix: ${CMAKE_FIND_LIBRARY_SUFFIXES}")
message("Building tests: ${BUILD_TESTS}")
message("Building Discovery Node: ${BUILD_DISCOVERY}")
-message("Building AvalancheGo support: ${BUILD_AVALANCHEGO}")
message("Building tools: ${BUILD_TOOLS}")
+message("Building testnet: ${BUILD_TESTNET}")
+message("Building benchmark tests: ${BUILD_BENCHMARK}")
+message("Building SafeVar tests: ${BUILD_VARIABLES_TESTS}")
message("Using lint: ${USE_LINT}")
-cable_add_buildinfo_library(PROJECT_NAME orbitersdk)
+cable_add_buildinfo_library(PROJECT_NAME bdk)
# System package configs (built-in)
set(Boost_USE_STATIC_LIBS ON)
@@ -71,20 +95,23 @@ set(OPENSSL_USE_STATIC_LIBS ON)
set(Protobuf_USE_STATIC_LIBS ON)
# Find system packages (built-in)
-find_package(Threads)
-find_package(Boost 1.74.0 REQUIRED COMPONENTS chrono filesystem program_options system thread nowide)
+find_package(Boost 1.83.0 REQUIRED COMPONENTS chrono filesystem program_options system thread nowide)
find_package(OpenSSL 1.1.1 REQUIRED)
-find_package(ZLIB REQUIRED)
+find_package(Protobuf REQUIRED) # TODO: not used yet but will be, keep it for now
+find_package(Threads)
# Find system packages (custom)
+find_package(Cares REQUIRED) # TODO: not used yet but will be, keep it for now
find_package(CryptoPP 8.2.0 REQUIRED)
+find_package(Ethash REQUIRED)
+find_package(Evmc REQUIRED)
+find_package(Evmone REQUIRED)
+find_package(GRPC REQUIRED) # TODO: not used yet but will be, keep it for now
+find_package(Keccak REQUIRED)
find_package(Scrypt REQUIRED)
-
-# Add external modules
-include(cmake/ProjectBoostCertify.cmake) # Boost Certify
-include(cmake/ProjectEthash.cmake) # Ethash
-include(cmake/ProjectSecp256k1.cmake) # Bitcoin core fast implementation
-include(cmake/ProjectSpeedb.cmake) # Speedb (Level/RocksDB drop-in replacement)
+find_package(Secp256k1 REQUIRED)
+find_package(Speedb REQUIRED)
+find_package(SQLiteCpp REQUIRED)
# Add catch2 as a library
add_library(catch2
@@ -92,18 +119,11 @@ add_library(catch2
${CMAKE_SOURCE_DIR}/src/libs/catch2/catch_amalgamated.cpp
)
target_include_directories(catch2 PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/catch2)
+target_compile_definitions(catch2 PRIVATE CATCH_AMALGAMATED_CUSTOM_MAIN)
# Check compiler variable sizes
include(cmake/CheckSizes.cmake)
-# Add AvalancheGo wrapper dependencies if compiling it
-if(BUILD_AVALANCHEGO)
- find_package(Absl REQUIRED) # Built-in is hardcoded to SHARED, this one to STATIC
- find_package(Cares REQUIRED)
- find_package(Protobuf 3.12 REQUIRED)
- find_package(GRPC REQUIRED)
-endif()
-
# Include directories for headers and libs
include_directories(
"${CMAKE_SOURCE_DIR}"
@@ -121,364 +141,29 @@ link_directories(
"${CMAKE_SOURCE_DIR}/build/deps/lib"
)
-# Organize, compile and link orbitersdk libs
+# Organize, compile and link bdk libs
add_subdirectory(src/contract)
add_subdirectory(src/core)
add_subdirectory(src/net)
add_subdirectory(src/utils)
add_subdirectory(tests)
-# Generate gRPC files if building with support for AvalancheGo.
-# Headers/sources are always cleaned at configure so they can be regenerated at build
-if(BUILD_AVALANCHEGO)
- file(REMOVE
- "${CMAKE_SOURCE_DIR}/proto/vm.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/appsender.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/keystore.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/messenger.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/vm.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/appsender.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/keystore.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/messenger.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/vm.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/appsender.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/keystore.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/messenger.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/vm.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/appsender.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/keystore.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/messenger.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.pb.h"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/aliasreader.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.grpc.pb.h"
- COMMAND "protoc"
- ARGS --grpc_out="${CMAKE_SOURCE_DIR}/proto"
- --plugin=protoc-gen-grpc="${GRPC_CPP_PLUGIN}"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/appsender.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/appsender.grpc.pb.h"
- COMMAND "protoc"
- ARGS --grpc_out="${CMAKE_SOURCE_DIR}/proto"
- --plugin=protoc-gen-grpc="${GRPC_CPP_PLUGIN}"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/appsender.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/keystore.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/keystore.grpc.pb.h"
- COMMAND "protoc"
- ARGS --grpc_out="${CMAKE_SOURCE_DIR}/proto"
- --plugin=protoc-gen-grpc="${GRPC_CPP_PLUGIN}"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/keystore.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/messenger.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/messenger.grpc.pb.h"
- COMMAND "protoc"
- ARGS --grpc_out="${CMAKE_SOURCE_DIR}/proto"
- --plugin=protoc-gen-grpc="${GRPC_CPP_PLUGIN}"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/messenger.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/metrics.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/metrics.grpc.pb.h"
- COMMAND "protoc"
- ARGS --grpc_out="${CMAKE_SOURCE_DIR}/proto"
- --plugin=protoc-gen-grpc="${GRPC_CPP_PLUGIN}"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/metrics.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/sharedmemory.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.grpc.pb.h"
- COMMAND "protoc"
- ARGS --grpc_out="${CMAKE_SOURCE_DIR}/proto"
- --plugin=protoc-gen-grpc="${GRPC_CPP_PLUGIN}"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/rpcdb.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.grpc.pb.h"
- COMMAND "protoc"
- ARGS --grpc_out="${CMAKE_SOURCE_DIR}/proto"
- --plugin=protoc-gen-grpc="${GRPC_CPP_PLUGIN}"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/vm.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/vm.grpc.pb.h"
- COMMAND "protoc"
- ARGS --grpc_out="${CMAKE_SOURCE_DIR}/proto"
- --plugin=protoc-gen-grpc="${GRPC_CPP_PLUGIN}"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- --experimental_allow_proto3_optional
- "${CMAKE_SOURCE_DIR}/proto/vm.proto"
- )
-
- # Protobuf PROTOBUF_GENERATE_CPP does NOT work with --experimental_allow_proto3_optional
- # requiring us to go over protobuf files with add_custom_command
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/aliasreader.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/appsender.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/appsender.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/appsender.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/keystore.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/keystore.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/keystore.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/messenger.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/messenger.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/messenger.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/metrics.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/metrics.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/metrics.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/sharedmemory.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/rpcdb.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/metrics.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/metrics.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- "${CMAKE_SOURCE_DIR}/proto/metrics.proto"
- )
-
- add_custom_command(
- OUTPUT "${CMAKE_SOURCE_DIR}/proto/vm.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/vm.pb.h"
- COMMAND "protoc"
- ARGS --cpp_out="${CMAKE_SOURCE_DIR}/proto"
- --proto_path="${CMAKE_SOURCE_DIR}/proto"
- --experimental_allow_proto3_optional
- "${CMAKE_SOURCE_DIR}/proto/vm.proto"
- )
-
- add_library(ProtoFiles
- "${CMAKE_SOURCE_DIR}/proto/vm.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/appsender.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/keystore.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/messenger.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/metrics.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/vm.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/appsender.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/keystore.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/messenger.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/metrics.pb.h"
- )
-
- # You HAVE to set the file names
- add_library (gen-grpc
- "${CMAKE_SOURCE_DIR}/proto/vm.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/appsender.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/keystore.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/messenger.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.grpc.pb.cc"
- "${CMAKE_SOURCE_DIR}/proto/vm.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/aliasreader.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/appsender.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/keystore.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/messenger.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/sharedmemory.grpc.pb.h"
- "${CMAKE_SOURCE_DIR}/proto/rpcdb.grpc.pb.h"
- ${ProtoFiles}
- )
-
- target_link_libraries(gen-grpc PUBLIC ${Protobuf_LIBRARIES} ${GRPC_LIBRARIES} ${CARES_LIBRARY} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} absl::flags)
-
- add_library(orbitersdk_lib STATIC
- ${UTILS_HEADERS}
- ${UTILS_SOURCES}
- ${CONTRACT_HEADERS}
- ${CONTRACT_SOURCES}
- ${CORE_HEADERS}
- ${CORE_SOURCES}
- ${NET_HEADERS}
- ${NET_SOURCES}
- )
-
- add_dependencies(orbitersdk_lib gen-grpc ProtoFiles)
-
- target_include_directories(orbitersdk_lib PUBLIC ${CMAKE_SOURCE_DIR}/include ${OPENSSL_INCLUDE_DIR})
-
- target_link_libraries(orbitersdk_lib PRIVATE
- ${CRYPTOPP_LIBRARIES} ${SCRYPT_LIBRARY} Secp256k1 Ethash ${ETHASH_BYPRODUCTS} ${Protobuf_LIBRARIES}
- ${GRPC_LIBRARIES} ${CARES_LIBRARY} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} absl::flags
- Speedb ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES}
- )
-
- set_target_properties(orbitersdk_lib PROPERTIES COMPILE_FLAGS "-DAVALANCHEGO_COMPATIBLE=1")
-
- # Compile and link the executable
- add_executable(orbitersdkd "${CMAKE_SOURCE_DIR}/src/main.cpp")
-
- add_dependencies(orbitersdkd orbitersdk_lib gen-grpc ProtoFiles)
- target_include_directories(orbitersdkd PRIVATE orbitersdk_lib ${OPENSSL_INCLUDE_DIR})
- target_link_libraries(orbitersdkd
- orbitersdk_lib
- ${Protobuf_LIBRARIES} ${GRPC_LIBRARIES} ${CARES_LIBRARY} Speedb
- ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES}
- absl::flags Secp256k1 Ethash ${ETHASH_BYPRODUCTS}
- )
-
- # Compile and link the ABI generator executable
- add_executable(contractabigenerator "${CMAKE_SOURCE_DIR}/src/main-contract-abi.cpp")
-
- add_dependencies(contractabigenerator orbitersdk_lib)
- target_include_directories(contractabigenerator PRIVATE orbitersdk_lib ${OPENSSL_INCLUDE_DIR})
- target_link_libraries(contractabigenerator
- orbitersdk_lib Speedb ${SNAPPY_LIBRARY} ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} Secp256k1 Ethash ${ETHASH_BYPRODUCTS}
- )
-
- # TODO: Implement tests for AvalancheGo compilation.
-else()
- add_library(orbitersdk_lib STATIC
- ${UTILS_HEADERS}
- ${UTILS_SOURCES}
- ${CONTRACT_HEADERS}
- ${CONTRACT_SOURCES}
- ${CORE_HEADERS}
- ${CORE_SOURCES}
- ${NET_HEADERS}
- ${NET_SOURCES}
- )
-
- target_include_directories(orbitersdk_lib PRIVATE ${CMAKE_SOURCE_DIR}/include ${OPENSSL_INCLUDE_DIR})
-
- target_link_libraries(orbitersdk_lib PRIVATE
- ${CRYPTOPP_LIBRARIES} ${SCRYPT_LIBRARY} Secp256k1 Ethash ${ETHASH_BYPRODUCTS}
- Speedb ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES}
- )
-
- set_target_properties(orbitersdk_lib PROPERTIES COMPILE_FLAGS "-DAVALANCHEGO_COMPATIBLE=0")
-
- # Compile and link the executable
- add_executable(orbitersdkd "${CMAKE_SOURCE_DIR}/src/main.cpp")
-
- add_dependencies(orbitersdkd orbitersdk_lib)
- target_include_directories(orbitersdkd PRIVATE orbitersdk_lib ${OPENSSL_INCLUDE_DIR})
- target_link_libraries(orbitersdkd
- orbitersdk_lib Speedb ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} Secp256k1 Ethash ${ETHASH_BYPRODUCTS}
- )
-
- # Compile and link the ABI generator executable
- add_executable(contractabigenerator "${CMAKE_SOURCE_DIR}/src/main-contract-abi.cpp")
-
- add_dependencies(contractabigenerator orbitersdk_lib)
- target_include_directories(contractabigenerator PRIVATE orbitersdk_lib ${OPENSSL_INCLUDE_DIR})
- target_link_libraries(contractabigenerator
- orbitersdk_lib Speedb ${SNAPPY_LIBRARY} ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} Secp256k1 Ethash ${ETHASH_BYPRODUCTS}
- )
-
- # Compile and link the ABI generator executable
- add_executable(networkdeployer "${CMAKE_SOURCE_DIR}/src/networkdeployer.cpp")
+add_library(bdk_lib STATIC
+ ${UTILS_HEADERS} ${UTILS_SOURCES} ${CONTRACT_HEADERS} ${CONTRACT_SOURCES}
+ ${CORE_HEADERS} ${CORE_SOURCES} ${NET_HEADERS} ${NET_SOURCES}
+)
- add_dependencies(networkdeployer orbitersdk_lib)
- target_include_directories(networkdeployer PRIVATE orbitersdk_lib ${OPENSSL_INCLUDE_DIR})
- target_link_libraries(networkdeployer
- orbitersdk_lib Speedb ${SNAPPY_LIBRARY} ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} Secp256k1 Ethash ${ETHASH_BYPRODUCTS}
- )
-endif()
+target_include_directories(bdk_lib PRIVATE
+ ${CMAKE_SOURCE_DIR}/include ${OPENSSL_INCLUDE_DIR} ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${EVMC_INCLUDE_DIR} ${EVMONE_INCLUDE_DIR} ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+)
-# Compile and link the test executable if set to build it
-if (BUILD_TESTS)
- add_executable(orbitersdkd-tests ${TESTS_HEADERS} ${TESTS_SOURCES})
- add_dependencies(orbitersdkd-tests orbitersdk_lib)
- target_include_directories(orbitersdkd-tests PRIVATE orbitersdk_lib ${OPENSSL_INCLUDE_DIR})
- target_link_libraries(orbitersdkd-tests
- orbitersdk_lib Speedb ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} Secp256k1 catch2 Ethash ${ETHASH_BYPRODUCTS}
- )
-endif()
+target_link_libraries(bdk_lib PRIVATE
+ ${EVMC_INSTRUCTIONS_LIBRARY} ${EVMC_LOADER_LIBRARY} ${EVMONE_LIBRARY}
+ ${CRYPTOPP_LIBRARIES} ${SCRYPT_LIBRARY} ${SECP256K1_LIBRARY}
+ ${ETHASH_LIBRARY} ${KECCAK_LIBRARY} ${SPEEDB_LIBRARY}
+ ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} -l:liblz4.a SQLiteCpp
+)
-# Compile and link the Discovery Node test executable if set to build it
-if (BUILD_DISCOVERY)
- add_executable(orbitersdkd-discovery "${CMAKE_SOURCE_DIR}/src/main-discovery.cpp")
- add_dependencies(orbitersdkd-discovery orbitersdk_lib)
- target_include_directories(orbitersdkd-discovery PRIVATE orbitersdk_lib ${OPENSSL_INCLUDE_DIR})
- target_link_libraries(orbitersdkd-discovery
- orbitersdk_lib Speedb ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} Secp256k1 Ethash ${ETHASH_BYPRODUCTS}
- )
-endif()
+add_subdirectory(src/bins)
diff --git a/CMakePresets.json b/CMakePresets.json
index 2ffffa4f..4d8c159e 100644
--- a/CMakePresets.json
+++ b/CMakePresets.json
@@ -5,7 +5,7 @@
"name": "linux-release",
"displayName": "Linux Release",
"generator": "Ninja",
- "binaryDir": "/orbitersdk-data/build_local_testnet",
+ "binaryDir": "/bdk-data/build_local_testnet",
"installDir": "${sourceDir}/out/install/${presetName}",
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Release"
@@ -25,7 +25,7 @@
"name": "linux-debug",
"displayName": "Linux Debug",
"generator": "Ninja",
- "binaryDir": "/orbitersdk-data/build_local_testnet",
+ "binaryDir": "/bdk-data/build_local_testnet",
"installDir": "${sourceDir}/out/install/${presetName}",
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Debug"
diff --git a/Dockerfile b/Dockerfile
index b47ebff6..781642cf 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,30 +1,36 @@
-# Copyright (c) [2023-2024] [Sparq Network]
+# Copyright (c) [2023-2024] [AppLayer Developers]
# This software is distributed under the MIT License.
# See the LICENSE.txt file in the project root for more information.
# Start from a base Debian image
-FROM debian:bookworm
+FROM debian:trixie
+
+# Set shell to Bash because Docker standards are stupid
+SHELL ["/bin/bash", "-c"]
# Update the system
RUN apt-get update && apt-get upgrade -y
-# Install dependencies
-RUN apt-get install -y build-essential cmake tmux clang-tidy autoconf libtool pkg-config libabsl-dev libboost-all-dev libc-ares-dev libcrypto++-dev libgrpc-dev libgrpc++-dev librocksdb-dev libscrypt-dev libsnappy-dev libssl-dev zlib1g-dev openssl protobuf-compiler protobuf-compiler-grpc nano vim unison git
-
# Set the working directory in the Docker container
-WORKDIR /orbitersdk-cpp
+WORKDIR /bdk-cpp
# Copy the local folder to the container
-COPY . /orbitersdk-cpp
+COPY . /bdk-cpp
+
+# Install Docker-specific dependencies
+RUN apt-get -y install nano vim unison curl jq unzip
+
+# Install dependencies
+RUN bash /bdk-cpp/scripts/deps.sh --install
# Create the synchronized directory
-RUN mkdir /orbitersdk-volume
+RUN mkdir /bdk-volume
# Copy Unison configuration file
COPY sync.prf /root/.unison/sync.prf
# Start Unison in the background, ignoring files that should not be synced
-CMD nohup unison -repeat 1 /orbitersdk-volume /orbitersdk-cpp -auto -batch \
+CMD nohup unison -repeat 1 /bdk-volume /bdk-cpp -auto -batch \
-ignore 'Name {build}' \
-ignore 'Name {build_local_testnet}' \
-ignore 'Name {.vscode}' \
diff --git a/Doxyfile b/Doxyfile
index dc3d7d8c..3f4ceba1 100644
--- a/Doxyfile
+++ b/Doxyfile
@@ -41,7 +41,7 @@ DOXYFILE_ENCODING = UTF-8
# title of most generated pages and in a few other places.
# The default value is: My Project.
-PROJECT_NAME = "OrbiterSDK"
+PROJECT_NAME = "BDK"
# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
# could be handy for archiving the generated documentation or if some version
@@ -53,7 +53,7 @@ PROJECT_NUMBER = "1"
# for a project that appears at the top of each page and should give viewer a
# quick idea about the purpose of the project. Keep the description short.
-PROJECT_BRIEF = "Subnet from Sparq Labs"
+PROJECT_BRIEF = "Blockchain Development Kit"
# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
# in the documentation. The maximum height of the logo should not exceed 55
@@ -410,7 +410,7 @@ IDL_PROPERTY_SUPPORT = YES
# all members of a group must be documented explicitly.
# The default value is: NO.
-DISTRIBUTE_GROUP_DOC = NO
+DISTRIBUTE_GROUP_DOC = YES
# If one adds a struct or class to a group and this option is enabled, then also
# any nested class or struct is added to the same group. By default this option
diff --git a/README.md b/README.md
index 2cb8e0b0..253f6060 100644
--- a/README.md
+++ b/README.md
@@ -1,13 +1,13 @@
-# orbitersdk
+# Blockchain Development Kit (BDK)
-
-
-
-
-
-
+
+
+
+
+
+
@@ -20,7 +20,7 @@
alt="chat on Telegram">
-Sparq subnet source code. [See the docs](https://github.com/SparqNet/sparq-docs) for a more thorough look at the project.
+AppLayer's BDK source code. [See the docs](https://docs.applayer.com) for a more thorough look at the project.
If you are a developer, fill this form out for free support and additional incentives: https://forms.gle/m83ceG3XoJY3fpwU9
@@ -32,41 +32,70 @@ The project has a Dockerfile at the root of the repository that will build the p
* [Docker for Windows](https://docs.docker.com/docker-for-windows/install/)
* [Docker for Mac](https://docs.docker.com/docker-for-mac/install/)
* [Docker for Linux](https://docs.docker.com/desktop/install/linux-install/)
-* Build the image locally with `docker build -t bdk-cpp-dev:latest ` (if using Linux or Mac, run as `sudo`)
+* Build the image locally with `docker build -t bdk-cpp-dev:latest .`
* This will build the image and tag it as `bdk-cpp-dev:latest` - you can change the tag to whatever you want, but remember to change it on the next step
* Run the container (you will be logged in as root):
- * **For Linux/Mac**: `sudo docker run -it -v $(pwd):/orbitersdk-volume -p 8080-8099:8080-8099 -p 8110-8111:8110-8111 orbitersdk-cpp-dev:latest`
- * **For Windows**: `docker run -it -v %cd%:/orbitersdk-volume -p 8080-8099:8080-8099 -p 8110-8111:8110-8111 orbitersdk-cpp-dev:latest`
+ * **For Linux/Mac**: `docker run -it --name bdk-cpp -v $(pwd):/bdk-volume -p 8080-8099:8080-8099 -p 8110-8111:8110-8111 bdk-cpp-dev:latest`
+ * **For Windows**: `docker run -it --name bdk-cpp -v %cd%:/bdk-volume -p 8080-8099:8080-8099 -p 8110-8111:8110-8111 bdk-cpp-dev:latest`
-Remember that we are using our local SDK repo as a volume, so every change in the local folder will be reflected to the container in real time, and vice-versa.
+Remember that we are using our local repo as a volume, so every change in the local folder will be reflected to the container in real time, and vice-versa.
Also, you can integrate the container with your favorite IDE or editor, e.g. [VSCode + Docker extension](https://marketplace.visualstudio.com/items?itemName=ms-azuretools.vscode-docker).
## Developing manually
-Install the following dependencies on your system:
-
-* **GCC** with support for **C++23** or higher
-* **CMake 3.19.0** or higher
-* **Boost 1.74** or higher (components: *chrono, filesystem, program-options, system, thread, nowide*)
-* **OpenSSL 1.1.1**
-* **CryptoPP 8.2.0** or higher
-* **libscrypt**
-* **zlib**
-* **libsnappy** for database compression
-* (optional) **clang-tidy** for linting
-
-If building with AvalancheGo support, you'll also need:
-
-* **Abseil (absl)**
-* **libc-ares**
-* **Protobuf 3.12** or higher
-* **gRPC**
-
-### One-liners
-
-For **Debian 12 Bookworm or newer**:
-* `sudo apt install build-essential cmake tmux clang-tidy autoconf libtool pkg-config libabsl-dev libboost-all-dev libc-ares-dev libcrypto++-dev libgrpc-dev libgrpc++-dev libscrypt-dev libssl-dev zlib1g-dev openssl protobuf-compiler protobuf-compiler-grpc`
+You will need the following dependencies installed locally on your system:
+
+* *Toolchain binaries*:
+ * **git**
+ * **GCC** with support for **C++23** or higher
+ * **Make**
+ * **CMake 3.19.0** or higher
+ * **Protobuf** (protoc + grpc_cpp_plugin)
+ * **tmux** (for deploying)
+ * (optional) **ninja** if you prefer it over make
+ * (optional) **mold** if you prefer it over ld
+ * (optional) **doxygen** for generating docs
+ * (optional) **clang-tidy** for linting
+* *Libraries*:
+ * **Boost 1.83** or higher (components: *chrono, filesystem, program-options, system, thread, nowide*)
+ * **OpenSSL 1.1.1** / **libssl 1.1.1** or higher
+ * **libzstd**
+ * **CryptoPP 8.2.0** or higher
+ * **libscrypt**
+ * **libc-ares**
+ * **gRPC** (libgrpc and libgrpc++)
+ * **secp256k1**
+ * **ethash** + **keccak**
+ * **EVMOne** + **EVMC**
+ * **Speedb**
+
+The versions of those dependencies should suffice out-of-the-box for at least the following distros (or greater, including their derivatives):
+
+* **Debian 13 (Trixie)**
+* **Ubuntu 24.04 LTS (Noble Numbat)**
+* **Linux Mint 22 (Wilma)**
+* **Fedora 40**
+* Any rolling release distro from around **May 2024** onwards (check their repos to be sure)
+
+### Tips for dependencies
+
+There is a script called `scripts/deps.sh` which you can use to check if you have those dependencies installed (`deps.sh --check`), install them in case you don't (`deps.sh --install`), and clean up the external ones for reinstalling (`deps.sh --cleanext`). The script expects dependencies to be installed either on `/usr` or `/usr/local`, giving preference to the latter if it finds anything there (so you can use a higher version of a dependency while still keeping your distro's default one).
+
+**Please note that installing most dependencies through the script only works on APT-based distros** (Debian, Ubuntu and derivatives) - you can still check the dependencies on any distro and install the few ones labeled as "external" (those are fetched through `git`), but if you're on a distro with another package manager and/or a distro older than one of the minimum ones listed above, you're on your own.
+
+For Debian specifically, you can (and should) use `update-alternatives` to register and set your GCC version to a more up-to-date build if required.
+
+If you're using a self-compiled GCC build out of the system path (e.g. `--prefix=/usr/local/gcc-X.Y.Z` instead of `--prefix=/usr/local`), don't forget to export its installation paths in your `PATH` and `LD_LIBRARY_PATH` env vars (to prevent e.g. "version `GLIBCXX_...'/`CXXABI_...` not found" errors). Put something like this in your `~/.bashrc` file for example, changing the version accordingly to whichever one you have installed:
+
+```bash
+# For GCC in /usr/local
+export LD_LIBRARY_PATH=/usr/local/lib64:$LD_LIBRARY_PATH
+
+# For self-contained GCC outside /usr/local
+export PATH=/usr/local/gcc-14.2.0/bin:$PATH
+export LD_LIBRARY_PATH=/usr/local/gcc-14.2.0/lib64:$LD_LIBRARY_PATH
+```
## Documentation
@@ -74,18 +103,18 @@ We use [Doxygen](https://www.doxygen.nl/index.html) to generate documentation ov
You should do this after running `cmake ..` in the build directory, as some header files need to be generated first.
-For a more detailed explanation of the project's structure, check the [docs](https://github.com/SparqNet/sparq-docs/tree/main/Sparq_en-US) repository.
+For a more detailed explanation of the project's structure, check the [docs](https://github.com/AppLayer/sparq-docs/tree/main/Sparq_en-US) repository.
## Compiling
-* Clone the project: `git clone https://github.com/SparqNet/orbitersdk-cpp
+* Clone the project: `git clone https://github.com/AppLayer/bdk-cpp
* Go to the project's root folder, create a "build" folder and change to it:
- * `cd orbitersdk-cpp && mkdir build && cd build`
+ * `cd bdk-cpp && mkdir build && cd build`
* Run `cmake` inside the build folder: `cmake ..`
* Use `-DCMAKE_BUILD_TYPE={Debug,RelWithDebInfo,Release}` to set the respective debug/release builds (Debug by default)
* Use `-DDEBUG=OFF` to build without debug flags (ON by default)
* Use `-DUSE_LINT=ON` to run clang-tidy along the build (OFF by default, WILL TAKE SIGNIFICANTLY LONGER TO COMPILE)
-* Build the executable: `cmake --build . -- -j$(nproc)`
+* Build the executable: `cmake --build . -- -j$(nproc)` (adjust `-j$(nproc)` accordingly if needed)
* If using the linter, pipe stderr to a file (e.g. `cmake --build . -- -j$(nproc) 2> log.txt`)
## Deploying
@@ -124,17 +153,13 @@ The deployed chain will have the following information by default:
Nodes are all deployed on the same machine, under the following ports and tmux sessions:
-| Session Name | Node Type | P2P Port | HTTP Port | Validator Key |
-|--------------------------|-----------|----------|-----------|--------------------------------------------------------------------|
-| local_testnet_discovery | Discovery | 8080 | 8090 | XXXX |
-| local_testnet_validator1 | Validator | 8081 | 8090 | 0xba5e6e9dd9cbd263969b94ee385d885c2d303dfc181db2a09f6bf19a7ba26759 |
-| local_testnet_validator2 | Validator | 8082 | 8091 | 0xfd84d99aa18b474bf383e10925d82194f1b0ca268e7a339032679d6e3a201ad4 |
-| local_testnet_validator3 | Validator | 8083 | 8092 | 0x66ce71abe0b8acd92cfd3965d6f9d80122aed9b0e9bdd3dbe018230bafde5751 |
-| local_testnet_validator4 | Validator | 8084 | 8093 | 0x856aeb3b9c20a80d1520a2406875f405d336e09475f43c478eb4f0dafb765fe7 |
-| local_testnet_validator5 | Validator | 8085 | 8094 | 0x81f288dd776f4edfe256d34af1f7d719f511559f19115af3e3d692e741faadc6 |
-| local_testnet_normal1 | Normal | 8086 | 8095 | XXXX |
-| local_testnet_normal2 | Normal | 8087 | 8096 | XXXX |
-| local_testnet_normal3 | Normal | 8088 | 8097 | XXXX |
-| local_testnet_normal4 | Normal | 8089 | 8098 | XXXX |
-| local_testnet_normal5 | Normal | 8110 | 8099 | XXXX |
-| local_testnet_normal6 | Normal | 8111 | 8100 | XXXX |
+| Session Name | Node Type | P2P Port | HTTP Port | Validator Key |
+|--------------------------|-----------|----------|-----------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| local_testnet_discovery | Discovery | 8080 | 8090 | XXXX |
+| local_testnet_validator1 | Validator | 8081 | 8090 | 0xba5e6e9dd9cbd263969b94ee385d885c2d303dfc181db2a09f6bf19a7ba26759, 0xfd84d99aa18b474bf383e10925d82194f1b0ca268e7a339032679d6e3a201ad4, 0xfd84d99aa18b474bf383e10925d82194f1b0ca268e7a339032679d6e3a201ad4, 0x856aeb3b9c20a80d1520a2406875f405d336e09475f43c478eb4f0dafb765fe7, 0x81f288dd776f4edfe256d34af1f7d719f511559f19115af3e3d692e741faadc6 |
+| local_testnet_normal1 | Normal | 8086 | 8095 | XXXX |
+| local_testnet_normal2 | Normal | 8087 | 8096 | XXXX |
+| local_testnet_normal3 | Normal | 8088 | 8097 | XXXX |
+| local_testnet_normal4 | Normal | 8089 | 8098 | XXXX |
+| local_testnet_normal5 | Normal | 8110 | 8099 | XXXX |
+| local_testnet_normal6 | Normal | 8111 | 8100 | XXXX |
diff --git a/cmake/FindAbsl.cmake b/cmake/FindAbsl.cmake
deleted file mode 100644
index bf039dfc..00000000
--- a/cmake/FindAbsl.cmake
+++ /dev/null
@@ -1,1378 +0,0 @@
-# Finds the Abseil (absl) libraries in the system.
-# Custom-built/"copied-then-modded" from the original CMake config files
-# (abslConfig.cmake, abslTargets.cmake, abslTargets-none.cmake).
-# Those are hardcoded to link SHARED libraries, with no way to toggle to STATIC.
-# This one links STATIC by default, and can be toggled with `set(ABSL_FIND_SHARED ON)`.
-# WORKS ON LINUX PATHS ONLY. I won't bother porting to other systems.
-
-# Set up prefixes/suffixes and lib type
-set(ABSL_INCLUDE_DIR "/usr/include")
-set(ABSL_PATH_PREFIX "/usr/lib/x86_64-linux-gnu/")
-if(ABSL_FIND_SHARED)
- set(ABSL_LIB_TYPE SHARED)
- set(ABSL_LIB_SUFFIX ".so")
-else()
- set(ABSL_LIB_TYPE STATIC)
- set(ABSL_LIB_SUFFIX ".a")
-endif()
-
-# Protect against multiple inclusion, which would fail when already imported targets are added once more.
-set(_targetsDefined)
-set(_targetsNotDefined)
-set(_expectedTargets)
-foreach(_expectedTarget absl::atomic_hook absl::errno_saver absl::log_severity absl::raw_logging_internal absl::spinlock_wait absl::config absl::dynamic_annotations absl::core_headers absl::malloc_internal absl::base_internal absl::base absl::throw_delegate absl::pretty_function absl::endian absl::bits absl::exponential_biased absl::periodic_sampler absl::scoped_set_env absl::strerror absl::fast_type_id absl::algorithm absl::algorithm_container absl::container absl::btree absl::compressed_tuple absl::fixed_array absl::inlined_vector_internal absl::inlined_vector absl::counting_allocator absl::flat_hash_map absl::flat_hash_set absl::node_hash_map absl::node_hash_set absl::container_memory absl::hash_function_defaults absl::hash_policy_traits absl::hashtablez_sampler absl::hashtable_debug absl::hashtable_debug_hooks absl::have_sse absl::node_hash_policy absl::raw_hash_map absl::container_common absl::raw_hash_set absl::layout absl::stacktrace absl::symbolize absl::examine_stack absl::failure_signal_handler absl::debugging_internal absl::demangle_internal absl::leak_check absl::leak_check_disable absl::debugging absl::flags_path_util absl::flags_program_name absl::flags_config absl::flags_marshalling absl::flags_commandlineflag_internal absl::flags_commandlineflag absl::flags_private_handle_accessor absl::flags_reflection absl::flags_internal absl::flags absl::flags_usage_internal absl::flags_usage absl::flags_parse absl::bind_front absl::function_ref absl::hash absl::city absl::memory absl::type_traits absl::meta absl::int128 absl::numeric absl::random_random absl::random_bit_gen_ref absl::random_internal_mock_helpers absl::random_distributions absl::random_seed_gen_exception absl::random_seed_sequences absl::random_internal_traits absl::random_internal_distribution_caller absl::random_internal_fast_uniform_bits absl::random_internal_seed_material absl::random_internal_pool_urbg absl::random_internal_salted_seed_seq absl::random_internal_iostream_state_saver absl::random_internal_generate_real absl::random_internal_wide_multiply absl::random_internal_fastmath absl::random_internal_nonsecure_base absl::random_internal_pcg_engine absl::random_internal_randen_engine absl::random_internal_platform absl::random_internal_randen absl::random_internal_randen_slow absl::random_internal_randen_hwaes absl::random_internal_randen_hwaes_impl absl::random_internal_distribution_test_util absl::random_internal_uniform_helper absl::status absl::statusor absl::strings absl::strings_internal absl::str_format absl::str_format_internal absl::cord absl::graphcycles_internal absl::kernel_timeout_internal absl::synchronization absl::time absl::civil_time absl::time_zone absl::any absl::bad_any_cast absl::bad_any_cast_impl absl::span absl::optional absl::bad_optional_access absl::bad_variant_access absl::variant absl::compare absl::utility)
- list(APPEND _expectedTargets ${_expectedTarget})
- if(NOT TARGET ${_expectedTarget})
- list(APPEND _targetsNotDefined ${_expectedTarget})
- endif()
- if(TARGET ${_expectedTarget})
- list(APPEND _targetsDefined ${_expectedTarget})
- endif()
-endforeach()
-if("${_targetsDefined}" STREQUAL "${_expectedTargets}")
- unset(_targetsDefined)
- unset(_targetsNotDefined)
- unset(_expectedTargets)
- set(CMAKE_IMPORT_FILE_VERSION)
- cmake_policy(POP)
- return()
-endif()
-if(NOT "${_targetsDefined}" STREQUAL "")
- message(FATAL_ERROR "Some (but not all) targets in this export set were already defined.\nTargets Defined: ${_targetsDefined}\nTargets not yet defined: ${_targetsNotDefined}\n")
-endif()
-unset(_targetsDefined)
-unset(_targetsNotDefined)
-unset(_expectedTargets)
-
-# Create imported target absl::atomic_hook
-add_library(absl::atomic_hook INTERFACE IMPORTED)
-
-set_target_properties(absl::atomic_hook PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::errno_saver
-add_library(absl::errno_saver INTERFACE IMPORTED)
-
-set_target_properties(absl::errno_saver PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::log_severity
-add_library(absl::log_severity ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::log_severity PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_log_severity${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_log_severity${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::raw_logging_internal
-add_library(absl::raw_logging_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::raw_logging_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::atomic_hook;absl::config;absl::core_headers;absl::log_severity"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_raw_logging_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_raw_logging_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::spinlock_wait
-add_library(absl::spinlock_wait ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::spinlock_wait PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base_internal;absl::core_headers;absl::errno_saver"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_spinlock_wait${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_spinlock_wait${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::config
-add_library(absl::config INTERFACE IMPORTED)
-
-set_target_properties(absl::config PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::dynamic_annotations
-add_library(absl::dynamic_annotations INTERFACE IMPORTED)
-
-set_target_properties(absl::dynamic_annotations PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::core_headers
-add_library(absl::core_headers INTERFACE IMPORTED)
-
-set_target_properties(absl::core_headers PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::malloc_internal
-add_library(absl::malloc_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::malloc_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::base_internal;absl::config;absl::core_headers;absl::dynamic_annotations;absl::raw_logging_internal;Threads::Threads"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_malloc_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_malloc_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::base_internal
-add_library(absl::base_internal INTERFACE IMPORTED)
-
-set_target_properties(absl::base_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::base
-add_library(absl::base ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::base PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::atomic_hook;absl::base_internal;absl::config;absl::core_headers;absl::dynamic_annotations;absl::log_severity;absl::raw_logging_internal;absl::spinlock_wait;absl::type_traits;Threads::Threads"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_base${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_base${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::throw_delegate
-add_library(absl::throw_delegate ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::throw_delegate PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::raw_logging_internal"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_throw_delegate${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_throw_delegate${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::pretty_function
-add_library(absl::pretty_function INTERFACE IMPORTED)
-
-set_target_properties(absl::pretty_function PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::endian
-add_library(absl::endian INTERFACE IMPORTED)
-
-set_target_properties(absl::endian PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::config;absl::core_headers;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::bits
-add_library(absl::bits INTERFACE IMPORTED)
-
-set_target_properties(absl::bits PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::exponential_biased
-add_library(absl::exponential_biased ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::exponential_biased PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_exponential_biased${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_exponential_biased${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::periodic_sampler
-add_library(absl::periodic_sampler ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::periodic_sampler PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers;absl::exponential_biased"
-)
-
-# Create imported target absl::scoped_set_env
-add_library(absl::scoped_set_env ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::scoped_set_env PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::raw_logging_internal"
-)
-
-# Create imported target absl::strerror
-add_library(absl::strerror ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::strerror PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::errno_saver"
-)
-
-# Create imported target absl::fast_type_id
-add_library(absl::fast_type_id INTERFACE IMPORTED)
-
-set_target_properties(absl::fast_type_id PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::algorithm
-add_library(absl::algorithm INTERFACE IMPORTED)
-
-set_target_properties(absl::algorithm PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::algorithm_container
-add_library(absl::algorithm_container INTERFACE IMPORTED)
-
-set_target_properties(absl::algorithm_container PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::algorithm;absl::core_headers;absl::meta;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::container
-add_library(absl::container INTERFACE IMPORTED)
-
-set_target_properties(absl::container PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::btree
-add_library(absl::btree INTERFACE IMPORTED)
-
-set_target_properties(absl::btree PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::container_common;absl::compare;absl::compressed_tuple;absl::container_memory;absl::cord;absl::core_headers;absl::layout;absl::memory;absl::strings;absl::throw_delegate;absl::type_traits;absl::utility;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::compressed_tuple
-add_library(absl::compressed_tuple INTERFACE IMPORTED)
-
-set_target_properties(absl::compressed_tuple PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::utility;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::fixed_array
-add_library(absl::fixed_array INTERFACE IMPORTED)
-
-set_target_properties(absl::fixed_array PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::compressed_tuple;absl::algorithm;absl::config;absl::core_headers;absl::dynamic_annotations;absl::throw_delegate;absl::memory;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::inlined_vector_internal
-add_library(absl::inlined_vector_internal INTERFACE IMPORTED)
-
-set_target_properties(absl::inlined_vector_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::compressed_tuple;absl::core_headers;absl::memory;absl::span;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::inlined_vector
-add_library(absl::inlined_vector INTERFACE IMPORTED)
-
-set_target_properties(absl::inlined_vector PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::algorithm;absl::core_headers;absl::inlined_vector_internal;absl::throw_delegate;absl::memory;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::counting_allocator
-add_library(absl::counting_allocator INTERFACE IMPORTED)
-
-set_target_properties(absl::counting_allocator PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::flat_hash_map
-add_library(absl::flat_hash_map INTERFACE IMPORTED)
-
-set_target_properties(absl::flat_hash_map PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::container_memory;absl::hash_function_defaults;absl::raw_hash_map;absl::algorithm_container;absl::memory;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::flat_hash_set
-add_library(absl::flat_hash_set INTERFACE IMPORTED)
-
-set_target_properties(absl::flat_hash_set PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::container_memory;absl::hash_function_defaults;absl::raw_hash_set;absl::algorithm_container;absl::core_headers;absl::memory;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::node_hash_map
-add_library(absl::node_hash_map INTERFACE IMPORTED)
-
-set_target_properties(absl::node_hash_map PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::container_memory;absl::hash_function_defaults;absl::node_hash_policy;absl::raw_hash_map;absl::algorithm_container;absl::memory;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::node_hash_set
-add_library(absl::node_hash_set INTERFACE IMPORTED)
-
-set_target_properties(absl::node_hash_set PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::hash_function_defaults;absl::node_hash_policy;absl::raw_hash_set;absl::algorithm_container;absl::memory;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::container_memory
-add_library(absl::container_memory INTERFACE IMPORTED)
-
-set_target_properties(absl::container_memory PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::memory;absl::type_traits;absl::utility;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::hash_function_defaults
-add_library(absl::hash_function_defaults INTERFACE IMPORTED)
-
-set_target_properties(absl::hash_function_defaults PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::cord;absl::hash;absl::strings;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::hash_policy_traits
-add_library(absl::hash_policy_traits INTERFACE IMPORTED)
-
-set_target_properties(absl::hash_policy_traits PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::meta;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::hashtablez_sampler
-add_library(absl::hashtablez_sampler ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::hashtablez_sampler PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::exponential_biased;absl::have_sse;absl::synchronization"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_hashtablez_sampler${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_hashtablez_sampler${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::hashtable_debug
-add_library(absl::hashtable_debug INTERFACE IMPORTED)
-
-set_target_properties(absl::hashtable_debug PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::hashtable_debug_hooks;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::hashtable_debug_hooks
-add_library(absl::hashtable_debug_hooks INTERFACE IMPORTED)
-
-set_target_properties(absl::hashtable_debug_hooks PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::have_sse
-add_library(absl::have_sse INTERFACE IMPORTED)
-
-set_target_properties(absl::have_sse PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::node_hash_policy
-add_library(absl::node_hash_policy INTERFACE IMPORTED)
-
-set_target_properties(absl::node_hash_policy PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::raw_hash_map
-add_library(absl::raw_hash_map INTERFACE IMPORTED)
-
-set_target_properties(absl::raw_hash_map PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::container_memory;absl::raw_hash_set;absl::throw_delegate;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::container_common
-add_library(absl::container_common INTERFACE IMPORTED)
-
-set_target_properties(absl::container_common PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::raw_hash_set
-add_library(absl::raw_hash_set ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::raw_hash_set PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bits;absl::compressed_tuple;absl::config;absl::container_common;absl::container_memory;absl::core_headers;absl::endian;absl::hash_policy_traits;absl::hashtable_debug_hooks;absl::have_sse;absl::layout;absl::memory;absl::meta;absl::optional;absl::utility;absl::hashtablez_sampler"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_raw_hash_set${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_raw_hash_set${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::layout
-add_library(absl::layout INTERFACE IMPORTED)
-
-set_target_properties(absl::layout PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::meta;absl::strings;absl::span;absl::utility;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::stacktrace
-add_library(absl::stacktrace ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::stacktrace PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::debugging_internal;absl::config;absl::core_headers"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_stacktrace${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_stacktrace${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::symbolize
-add_library(absl::symbolize ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::symbolize PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::debugging_internal;absl::demangle_internal;absl::base;absl::config;absl::core_headers;absl::dynamic_annotations;absl::malloc_internal;absl::raw_logging_internal;absl::strings"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_symbolize${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_symbolize${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::examine_stack
-add_library(absl::examine_stack ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::examine_stack PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::stacktrace;absl::symbolize;absl::config;absl::core_headers;absl::raw_logging_internal"
-)
-
-# Create imported target absl::failure_signal_handler
-add_library(absl::failure_signal_handler ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::failure_signal_handler PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::examine_stack;absl::stacktrace;absl::base;absl::config;absl::core_headers;absl::errno_saver;absl::raw_logging_internal"
-)
-
-# Create imported target absl::debugging_internal
-add_library(absl::debugging_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::debugging_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers;absl::config;absl::dynamic_annotations;absl::errno_saver;absl::raw_logging_internal"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_debugging_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_debugging_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::demangle_internal
-add_library(absl::demangle_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::demangle_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::core_headers"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_demangle_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_demangle_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::leak_check
-add_library(absl::leak_check ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::leak_check PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers"
-)
-
-# Create imported target absl::leak_check_disable
-add_library(absl::leak_check_disable ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::leak_check_disable PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
-)
-
-# Create imported target absl::debugging
-add_library(absl::debugging INTERFACE IMPORTED)
-
-set_target_properties(absl::debugging PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::stacktrace;absl::leak_check;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::flags_path_util
-add_library(absl::flags_path_util INTERFACE IMPORTED)
-
-set_target_properties(absl::flags_path_util PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::strings;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::flags_program_name
-add_library(absl::flags_program_name ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_program_name PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::flags_path_util;absl::strings;absl::synchronization"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_flags_program_name${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags_program_name${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags_config
-add_library(absl::flags_config ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_config PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::flags_path_util;absl::flags_program_name;absl::core_headers;absl::strings;absl::synchronization"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_flags_config${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags_config${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags_marshalling
-add_library(absl::flags_marshalling ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_marshalling PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::log_severity;absl::strings;absl::str_format"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_flags_marshalling${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags_marshalling${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags_commandlineflag_internal
-add_library(absl::flags_commandlineflag_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_commandlineflag_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::fast_type_id"
- IMPORTED_LOCATION
- "${ABSL_PATH_PREFIX}libabsl_flags_commandlineflag_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags_commandlineflag_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags_commandlineflag
-add_library(absl::flags_commandlineflag ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_commandlineflag PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::fast_type_id;absl::flags_commandlineflag_internal;absl::optional;absl::strings"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_flags_commandlineflag${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags_commandlineflag${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags_private_handle_accessor
-add_library(absl::flags_private_handle_accessor ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_private_handle_accessor PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::flags_commandlineflag;absl::flags_commandlineflag_internal;absl::strings"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_flags_private_handle_accessor${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags_private_handle_accessor${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags_reflection
-add_library(absl::flags_reflection ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_reflection PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::flags_commandlineflag;absl::flags_private_handle_accessor;absl::flags_config;absl::strings;absl::synchronization;absl::flat_hash_map"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_flags_reflection${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags_reflection${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags_internal
-add_library(absl::flags_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::config;absl::flags_commandlineflag;absl::flags_commandlineflag_internal;absl::flags_config;absl::flags_marshalling;absl::synchronization;absl::meta;absl::utility"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_flags_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags
-add_library(absl::flags ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::flags_commandlineflag;absl::flags_config;absl::flags_internal;absl::flags_reflection;absl::base;absl::core_headers;absl::strings"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_flags${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_flags${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::flags_usage_internal
-add_library(absl::flags_usage_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_usage_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::flags_config;absl::flags;absl::flags_commandlineflag;absl::flags_internal;absl::flags_path_util;absl::flags_private_handle_accessor;absl::flags_program_name;absl::flags_reflection;absl::strings;absl::synchronization"
-)
-
-# Create imported target absl::flags_usage
-add_library(absl::flags_usage ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_usage PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::flags_usage_internal;absl::strings;absl::synchronization"
-)
-
-# Create imported target absl::flags_parse
-add_library(absl::flags_parse ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::flags_parse PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::flags_config;absl::flags;absl::flags_commandlineflag;absl::flags_commandlineflag_internal;absl::flags_internal;absl::flags_private_handle_accessor;absl::flags_program_name;absl::flags_reflection;absl::flags_usage;absl::strings;absl::synchronization"
-)
-
-# Create imported target absl::bind_front
-add_library(absl::bind_front INTERFACE IMPORTED)
-
-set_target_properties(absl::bind_front PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base_internal;absl::compressed_tuple;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::function_ref
-add_library(absl::function_ref INTERFACE IMPORTED)
-
-set_target_properties(absl::function_ref PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base_internal;absl::meta;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::hash
-add_library(absl::hash ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::hash PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::endian;absl::fixed_array;absl::meta;absl::int128;absl::strings;absl::optional;absl::variant;absl::utility;absl::city"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_hash${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_hash${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::city
-add_library(absl::city ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::city PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::endian"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_city${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_city${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::memory
-add_library(absl::memory INTERFACE IMPORTED)
-
-set_target_properties(absl::memory PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers;absl::meta;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::type_traits
-add_library(absl::type_traits INTERFACE IMPORTED)
-
-set_target_properties(absl::type_traits PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::meta
-add_library(absl::meta INTERFACE IMPORTED)
-
-set_target_properties(absl::meta PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::int128
-add_library(absl::int128 ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::int128 PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bits;absl::config;absl::core_headers"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_int128${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_int128${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::numeric
-add_library(absl::numeric INTERFACE IMPORTED)
-
-set_target_properties(absl::numeric PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::int128;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_random
-add_library(absl::random_random INTERFACE IMPORTED)
-
-set_target_properties(absl::random_random PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::random_distributions;absl::random_internal_nonsecure_base;absl::random_internal_pcg_engine;absl::random_internal_pool_urbg;absl::random_internal_randen_engine;absl::random_seed_sequences;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_bit_gen_ref
-add_library(absl::random_bit_gen_ref INTERFACE IMPORTED)
-
-set_target_properties(absl::random_bit_gen_ref PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers;absl::random_internal_distribution_caller;absl::random_internal_fast_uniform_bits;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_mock_helpers
-add_library(absl::random_internal_mock_helpers INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_mock_helpers PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::fast_type_id;absl::optional;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_distributions
-add_library(absl::random_distributions ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_distributions PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base_internal;absl::config;absl::core_headers;absl::random_internal_generate_real;absl::random_internal_distribution_caller;absl::random_internal_fast_uniform_bits;absl::random_internal_fastmath;absl::random_internal_iostream_state_saver;absl::random_internal_traits;absl::random_internal_uniform_helper;absl::random_internal_wide_multiply;absl::strings;absl::type_traits"
-)
-
-# Create imported target absl::random_seed_gen_exception
-add_library(absl::random_seed_gen_exception ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_seed_gen_exception PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config"
-)
-
-# Create imported target absl::random_seed_sequences
-add_library(absl::random_seed_sequences ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_seed_sequences PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::inlined_vector;absl::random_internal_nonsecure_base;absl::random_internal_pool_urbg;absl::random_internal_salted_seed_seq;absl::random_internal_seed_material;absl::random_seed_gen_exception;absl::span"
-)
-
-# Create imported target absl::random_internal_traits
-add_library(absl::random_internal_traits INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_traits PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_distribution_caller
-add_library(absl::random_internal_distribution_caller INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_distribution_caller PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::utility;absl::fast_type_id;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_fast_uniform_bits
-add_library(absl::random_internal_fast_uniform_bits INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_fast_uniform_bits PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_seed_material
-add_library(absl::random_internal_seed_material ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_internal_seed_material PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers;absl::optional;absl::random_internal_fast_uniform_bits;absl::raw_logging_internal;absl::span;absl::strings"
-)
-
-# Create imported target absl::random_internal_pool_urbg
-add_library(absl::random_internal_pool_urbg ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_internal_pool_urbg PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::config;absl::core_headers;absl::endian;absl::random_internal_randen;absl::random_internal_seed_material;absl::random_internal_traits;absl::random_seed_gen_exception;absl::raw_logging_internal;absl::span"
-)
-
-# Create imported target absl::random_internal_salted_seed_seq
-add_library(absl::random_internal_salted_seed_seq INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_salted_seed_seq PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::inlined_vector;absl::optional;absl::span;absl::random_internal_seed_material;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_iostream_state_saver
-add_library(absl::random_internal_iostream_state_saver INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_iostream_state_saver PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::int128;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_generate_real
-add_library(absl::random_internal_generate_real INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_generate_real PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bits;absl::random_internal_fastmath;absl::random_internal_traits;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_wide_multiply
-add_library(absl::random_internal_wide_multiply INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_wide_multiply PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bits;absl::config;absl::int128;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_fastmath
-add_library(absl::random_internal_fastmath INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_fastmath PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_nonsecure_base
-add_library(absl::random_internal_nonsecure_base INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_nonsecure_base PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers;absl::optional;absl::random_internal_pool_urbg;absl::random_internal_salted_seed_seq;absl::random_internal_seed_material;absl::span;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_pcg_engine
-add_library(absl::random_internal_pcg_engine INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_pcg_engine PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::int128;absl::random_internal_fastmath;absl::random_internal_iostream_state_saver;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_randen_engine
-add_library(absl::random_internal_randen_engine INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_randen_engine PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::endian;absl::random_internal_iostream_state_saver;absl::random_internal_randen;absl::raw_logging_internal;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::random_internal_platform
-add_library(absl::random_internal_platform ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_internal_platform PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config"
-)
-
-# Create imported target absl::random_internal_randen
-add_library(absl::random_internal_randen ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_internal_randen PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::random_internal_platform;absl::random_internal_randen_hwaes;absl::random_internal_randen_slow"
-)
-
-# Create imported target absl::random_internal_randen_slow
-add_library(absl::random_internal_randen_slow ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_internal_randen_slow PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::random_internal_platform;absl::config"
-)
-
-# Create imported target absl::random_internal_randen_hwaes
-add_library(absl::random_internal_randen_hwaes ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_internal_randen_hwaes PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::random_internal_platform;absl::random_internal_randen_hwaes_impl;absl::config"
-)
-
-# Create imported target absl::random_internal_randen_hwaes_impl
-add_library(absl::random_internal_randen_hwaes_impl ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_internal_randen_hwaes_impl PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::random_internal_platform;absl::config"
-)
-
-# Create imported target absl::random_internal_distribution_test_util
-add_library(absl::random_internal_distribution_test_util ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::random_internal_distribution_test_util PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::raw_logging_internal;absl::strings;absl::str_format;absl::span"
-)
-
-# Create imported target absl::random_internal_uniform_helper
-add_library(absl::random_internal_uniform_helper INTERFACE IMPORTED)
-
-set_target_properties(absl::random_internal_uniform_helper PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::random_internal_traits;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::status
-add_library(absl::status ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::status PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::atomic_hook;absl::config;absl::core_headers;absl::raw_logging_internal;absl::inlined_vector;absl::stacktrace;absl::symbolize;absl::strings;absl::cord;absl::str_format;absl::optional"
-)
-
-# Create imported target absl::statusor
-add_library(absl::statusor ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::statusor PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::status;absl::core_headers;absl::raw_logging_internal;absl::type_traits;absl::strings;absl::utility;absl::variant"
-)
-
-# Create imported target absl::strings
-add_library(absl::strings ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::strings PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::strings_internal;absl::base;absl::bits;absl::config;absl::core_headers;absl::endian;absl::int128;absl::memory;absl::raw_logging_internal;absl::throw_delegate;absl::type_traits"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_strings${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_strings${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::strings_internal
-add_library(absl::strings_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::strings_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::core_headers;absl::endian;absl::raw_logging_internal;absl::type_traits"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_strings_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_strings_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::str_format
-add_library(absl::str_format INTERFACE IMPORTED)
-
-set_target_properties(absl::str_format PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::str_format_internal;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::str_format_internal
-add_library(absl::str_format_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::str_format_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bits;absl::strings;absl::config;absl::core_headers;absl::type_traits;absl::int128;absl::span"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_str_format_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_str_format_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::cord
-add_library(absl::cord ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::cord PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::base_internal;absl::compressed_tuple;absl::core_headers;absl::endian;absl::fixed_array;absl::function_ref;absl::inlined_vector;absl::optional;absl::raw_logging_internal;absl::strings;absl::strings_internal;absl::type_traits"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_cord${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_cord${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::graphcycles_internal
-add_library(absl::graphcycles_internal ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::graphcycles_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::base_internal;absl::config;absl::core_headers;absl::malloc_internal;absl::raw_logging_internal"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_graphcycles_internal${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_graphcycles_internal${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::kernel_timeout_internal
-add_library(absl::kernel_timeout_internal INTERFACE IMPORTED)
-
-set_target_properties(absl::kernel_timeout_internal PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers;absl::raw_logging_internal;absl::time;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::synchronization
-add_library(absl::synchronization ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::synchronization PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::graphcycles_internal;absl::kernel_timeout_internal;absl::atomic_hook;absl::base;absl::base_internal;absl::config;absl::core_headers;absl::dynamic_annotations;absl::malloc_internal;absl::raw_logging_internal;absl::stacktrace;absl::symbolize;absl::time;Threads::Threads"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_synchronization${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_synchronization${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::time
-add_library(absl::time ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::time PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base;absl::civil_time;absl::core_headers;absl::int128;absl::raw_logging_internal;absl::strings;absl::time_zone"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_time${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_time${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::civil_time
-add_library(absl::civil_time ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::civil_time PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_civil_time${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_civil_time${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::time_zone
-add_library(absl::time_zone ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::time_zone PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "\$<\$:>"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_time_zone${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_time_zone${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::any
-add_library(absl::any INTERFACE IMPORTED)
-
-set_target_properties(absl::any PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bad_any_cast;absl::config;absl::core_headers;absl::fast_type_id;absl::type_traits;absl::utility;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::bad_any_cast
-add_library(absl::bad_any_cast INTERFACE IMPORTED)
-
-set_target_properties(absl::bad_any_cast PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bad_any_cast_impl;absl::config;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::bad_any_cast_impl
-add_library(absl::bad_any_cast_impl ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::bad_any_cast_impl PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::raw_logging_internal"
-)
-
-# Create imported target absl::span
-add_library(absl::span INTERFACE IMPORTED)
-
-set_target_properties(absl::span PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::algorithm;absl::core_headers;absl::throw_delegate;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::optional
-add_library(absl::optional INTERFACE IMPORTED)
-
-set_target_properties(absl::optional PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bad_optional_access;absl::base_internal;absl::config;absl::core_headers;absl::memory;absl::type_traits;absl::utility;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::bad_optional_access
-add_library(absl::bad_optional_access ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::bad_optional_access PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::raw_logging_internal"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_bad_optional_access${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_bad_optional_access${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::bad_variant_access
-add_library(absl::bad_variant_access ${ABSL_LIB_TYPE} IMPORTED)
-
-set_target_properties(absl::bad_variant_access PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::config;absl::raw_logging_internal"
- IMPORTED_LOCATION "${ABSL_PATH_PREFIX}libabsl_bad_variant_access${ABSL_LIB_SUFFIX}"
- IMPORTED_SONAME "libabsl_bad_variant_access${ABSL_LIB_SUFFIX}"
-)
-
-# Create imported target absl::variant
-add_library(absl::variant INTERFACE IMPORTED)
-
-set_target_properties(absl::variant PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::bad_variant_access;absl::base_internal;absl::config;absl::core_headers;absl::type_traits;absl::utility;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::compare
-add_library(absl::compare INTERFACE IMPORTED)
-
-set_target_properties(absl::compare PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::core_headers;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Create imported target absl::utility
-add_library(absl::utility INTERFACE IMPORTED)
-
-set_target_properties(absl::utility PROPERTIES
- INTERFACE_INCLUDE_DIRECTORIES ${ABSL_INCLUDE_DIR}
- INTERFACE_LINK_LIBRARIES "absl::base_internal;absl::config;absl::type_traits;-Wl,--as-needed;-latomic;-Wl,--no-as-needed"
-)
-
-# Get all library paths for checking if they exist
-list(APPEND _IMPORT_CHECK_TARGETS absl::log_severity)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::log_severity
- "${ABSL_PATH_PREFIX}libabsl_log_severity${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::raw_logging_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::raw_logging_internal
- "${ABSL_PATH_PREFIX}libabsl_raw_logging_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::spinlock_wait)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::spinlock_wait
- "${ABSL_PATH_PREFIX}libabsl_spinlock_wait${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::malloc_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::malloc_internal
- "${ABSL_PATH_PREFIX}libabsl_malloc_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::base)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::base
- "${ABSL_PATH_PREFIX}libabsl_base${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::throw_delegate)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::throw_delegate
- "${ABSL_PATH_PREFIX}libabsl_throw_delegate${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::exponential_biased)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::exponential_biased
- "${ABSL_PATH_PREFIX}libabsl_exponential_biased${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::periodic_sampler)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::periodic_sampler
- "${ABSL_PATH_PREFIX}libabsl_periodic_sampler${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::scoped_set_env)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::scoped_set_env
- "${ABSL_PATH_PREFIX}libabsl_scoped_set_env${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::strerror)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::strerror
- "${ABSL_PATH_PREFIX}libabsl_strerror${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::hashtablez_sampler)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::hashtablez_sampler
- "${ABSL_PATH_PREFIX}libabsl_hashtablez_sampler${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::raw_hash_set)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::raw_hash_set
- "${ABSL_PATH_PREFIX}libabsl_raw_hash_set${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::stacktrace)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::stacktrace
- "${ABSL_PATH_PREFIX}libabsl_stacktrace${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::symbolize)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::symbolize
- "${ABSL_PATH_PREFIX}libabsl_symbolize${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::examine_stack)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::examine_stack
- "${ABSL_PATH_PREFIX}libabsl_examine_stack${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::failure_signal_handler)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::failure_signal_handler
- "${ABSL_PATH_PREFIX}libabsl_failure_signal_handler${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::debugging_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::debugging_internal
- "${ABSL_PATH_PREFIX}libabsl_debugging_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::demangle_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::demangle_internal
- "${ABSL_PATH_PREFIX}libabsl_demangle_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::leak_check)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::leak_check
- "${ABSL_PATH_PREFIX}libabsl_leak_check${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::leak_check_disable)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::leak_check_disable
- "${ABSL_PATH_PREFIX}libabsl_leak_check_disable${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_program_name)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_program_name
- "${ABSL_PATH_PREFIX}libabsl_flags_program_name${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_config)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_config
- "${ABSL_PATH_PREFIX}libabsl_flags_config${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_marshalling)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_marshalling
- "${ABSL_PATH_PREFIX}libabsl_flags_marshalling${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_commandlineflag_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_commandlineflag_internal
- "${ABSL_PATH_PREFIX}libabsl_flags_commandlineflag_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_commandlineflag)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_commandlineflag
- "${ABSL_PATH_PREFIX}libabsl_flags_commandlineflag${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_private_handle_accessor)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_private_handle_accessor
- "${ABSL_PATH_PREFIX}libabsl_flags_private_handle_accessor${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_reflection)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_reflection
- "${ABSL_PATH_PREFIX}libabsl_flags_reflection${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_internal
- "${ABSL_PATH_PREFIX}libabsl_flags_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags
- "${ABSL_PATH_PREFIX}libabsl_flags${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_usage_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_usage_internal
- "${ABSL_PATH_PREFIX}libabsl_flags_usage_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_usage)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_usage
- "${ABSL_PATH_PREFIX}libabsl_flags_usage${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::flags_parse)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::flags_parse
- "${ABSL_PATH_PREFIX}libabsl_flags_parse${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::hash)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::hash
- "${ABSL_PATH_PREFIX}libabsl_hash${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::city)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::city
- "${ABSL_PATH_PREFIX}libabsl_city${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::int128)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::int128
- "${ABSL_PATH_PREFIX}libabsl_int128${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_distributions)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_distributions
- "${ABSL_PATH_PREFIX}libabsl_random_distributions${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_seed_gen_exception)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_seed_gen_exception
- "${ABSL_PATH_PREFIX}libabsl_random_seed_gen_exception${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_seed_sequences)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_seed_sequences
- "${ABSL_PATH_PREFIX}libabsl_random_seed_sequences${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_internal_seed_material)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_internal_seed_material
- "${ABSL_PATH_PREFIX}libabsl_random_internal_seed_material${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_internal_pool_urbg)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_internal_pool_urbg
- "${ABSL_PATH_PREFIX}libabsl_random_internal_pool_urbg${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_internal_platform)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_internal_platform
- "${ABSL_PATH_PREFIX}libabsl_random_internal_platform${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_internal_randen)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_internal_randen
- "${ABSL_PATH_PREFIX}libabsl_random_internal_randen${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_internal_randen_slow)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_internal_randen_slow
- "${ABSL_PATH_PREFIX}libabsl_random_internal_randen_slow${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_internal_randen_hwaes)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_internal_randen_hwaes
- "${ABSL_PATH_PREFIX}libabsl_random_internal_randen_hwaes${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_internal_randen_hwaes_impl)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_internal_randen_hwaes_impl
- "${ABSL_PATH_PREFIX}libabsl_random_internal_randen_hwaes_impl${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::random_internal_distribution_test_util)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::random_internal_distribution_test_util
- "${ABSL_PATH_PREFIX}libabsl_random_internal_distribution_test_util${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::status)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::status
- "${ABSL_PATH_PREFIX}libabsl_status${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::statusor)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::statusor
- "${ABSL_PATH_PREFIX}libabsl_statusor${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::strings)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::strings
- "${ABSL_PATH_PREFIX}libabsl_strings${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::strings_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::strings_internal
- "${ABSL_PATH_PREFIX}libabsl_strings_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::str_format_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::str_format_internal
- "${ABSL_PATH_PREFIX}libabsl_str_format_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::cord)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::cord
- "${ABSL_PATH_PREFIX}libabsl_cord${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::graphcycles_internal)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::graphcycles_internal
- "${ABSL_PATH_PREFIX}libabsl_graphcycles_internal${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::synchronization)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::synchronization
- "${ABSL_PATH_PREFIX}libabsl_synchronization${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::time)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::time
- "${ABSL_PATH_PREFIX}libabsl_time${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::civil_time)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::civil_time
- "${ABSL_PATH_PREFIX}libabsl_civil_time${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::time_zone)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::time_zone
- "${ABSL_PATH_PREFIX}libabsl_time_zone${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::bad_any_cast_impl)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::bad_any_cast_impl
- "${ABSL_PATH_PREFIX}libabsl_bad_any_cast_impl${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::bad_optional_access)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::bad_optional_access
- "${ABSL_PATH_PREFIX}libabsl_bad_optional_access${ABSL_LIB_SUFFIX}"
-)
-list(APPEND _IMPORT_CHECK_TARGETS absl::bad_variant_access)
-list(APPEND _IMPORT_CHECK_FILES_FOR_absl::bad_variant_access
- "${ABSL_PATH_PREFIX}libabsl_bad_variant_access${ABSL_LIB_SUFFIX}"
-)
-
-# Loop over all imported files and verify that they actually exist
-foreach(target ${_IMPORT_CHECK_TARGETS} )
- foreach(file ${_IMPORT_CHECK_FILES_FOR_${target}} )
- if(NOT EXISTS "${file}" )
- message(FATAL_ERROR "The imported target \"${target}\" references the file
- \"${file}\"
-but this file does not exist. Possible reasons include:
-* The file was deleted, renamed, or moved to another location.
-* An install or uninstall procedure did not complete successfully.
-* The installation package was faulty and contained
- \"${CMAKE_CURRENT_LIST_FILE}\"
-but not all the files it references.
-")
- endif()
- endforeach()
- unset(_IMPORT_CHECK_FILES_FOR_${target})
-endforeach()
-unset(_IMPORT_CHECK_TARGETS)
-
diff --git a/cmake/FindCares.cmake b/cmake/FindCares.cmake
index 97beb316..5122c420 100644
--- a/cmake/FindCares.cmake
+++ b/cmake/FindCares.cmake
@@ -7,7 +7,7 @@ include(SelectLibraryConfigurations)
include(FindPackageHandleStandardArgs)
find_path(CARES_INCLUDE_DIR NAMES ares.h)
-find_library(CARES_LIBRARY NAMES libcares.a)
+find_library(CARES_LIBRARY NAMES libcares_static.a libcares.a)
SELECT_LIBRARY_CONFIGURATIONS(Cares)
diff --git a/cmake/FindEthash.cmake b/cmake/FindEthash.cmake
new file mode 100644
index 00000000..d04eed65
--- /dev/null
+++ b/cmake/FindEthash.cmake
@@ -0,0 +1,20 @@
+# Find the Ethash libraries and define the following variables:
+# ETHASH_FOUND
+# ETHASH_INCLUDE_DIR
+# ETHASH_LIBRARY
+
+include(SelectLibraryConfigurations)
+include(FindPackageHandleStandardArgs)
+
+find_path(ETHASH_INCLUDE_DIR NAMES ethash.h PATH_SUFFIXES ethash)
+find_library(ETHASH_LIBRARY NAMES libethash.a)
+
+SELECT_LIBRARY_CONFIGURATIONS(Ethash)
+
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ Ethash DEFAULT_MSG
+ ETHASH_LIBRARY ETHASH_INCLUDE_DIR
+)
+
+mark_as_advanced(ETHASH_INCLUDE_DIR ETHASH_LIBRARY)
+
diff --git a/cmake/FindEvmc.cmake b/cmake/FindEvmc.cmake
new file mode 100644
index 00000000..56e606ce
--- /dev/null
+++ b/cmake/FindEvmc.cmake
@@ -0,0 +1,22 @@
+# Find the EVMC libraries and define the following variables:
+# EVMC_FOUND
+# EVMC_INCLUDE_DIR
+# EVMC_INSTRUCTIONS_LIBRARY
+# EVMC_LOADER_LIBRARY
+
+include(SelectLibraryConfigurations)
+include(FindPackageHandleStandardArgs)
+
+find_path(EVMC_INCLUDE_DIR NAMES evmc.h PATH_SUFFIXES evmc)
+find_library(EVMC_INSTRUCTIONS_LIBRARY NAMES libevmc-instructions.a)
+find_library(EVMC_LOADER_LIBRARY NAMES libevmc-loader.a)
+
+SELECT_LIBRARY_CONFIGURATIONS(Evmc)
+
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ Evmc DEFAULT_MSG
+ EVMC_INSTRUCTIONS_LIBRARY EVMC_LOADER_LIBRARY EVMC_INCLUDE_DIR
+)
+
+mark_as_advanced(EVMC_INCLUDE_DIR EVMC_INSTRUCTIONS_LIBRARY EVMC_LOADER_LIBRARY)
+
diff --git a/cmake/FindEvmone.cmake b/cmake/FindEvmone.cmake
new file mode 100644
index 00000000..50c71f85
--- /dev/null
+++ b/cmake/FindEvmone.cmake
@@ -0,0 +1,20 @@
+# Find the EVMOne library and define the following variables:
+# EVMONE_FOUND
+# EVMONE_INCLUDE_DIR
+# EVMONE_LIBRARY
+
+include(SelectLibraryConfigurations)
+include(FindPackageHandleStandardArgs)
+
+find_path(EVMONE_INCLUDE_DIR NAMES evmone.h PATH_SUFFIXES evmone)
+find_library(EVMONE_LIBRARY NAMES libevmone.a)
+
+SELECT_LIBRARY_CONFIGURATIONS(Evmone)
+
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ Evmone DEFAULT_MSG
+ EVMONE_LIBRARY EVMONE_INCLUDE_DIR
+)
+
+mark_as_advanced(EVMONE_INCLUDE_DIR EVMONE_LIBRARY)
+
diff --git a/cmake/FindKeccak.cmake b/cmake/FindKeccak.cmake
new file mode 100644
index 00000000..4a81c826
--- /dev/null
+++ b/cmake/FindKeccak.cmake
@@ -0,0 +1,20 @@
+# Find the Keccak libraries and define the following variables:
+# KECCAK_FOUND
+# KECCAK_INCLUDE_DIR
+# KECCAK_LIBRARY
+
+include(SelectLibraryConfigurations)
+include(FindPackageHandleStandardArgs)
+
+find_path(KECCAK_INCLUDE_DIR NAMES keccak.h PATH_SUFFIXES ethash)
+find_library(KECCAK_LIBRARY NAMES libkeccak.a)
+
+SELECT_LIBRARY_CONFIGURATIONS(Keccak)
+
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ Keccak DEFAULT_MSG
+ KECCAK_LIBRARY KECCAK_INCLUDE_DIR
+)
+
+mark_as_advanced(KECCAK_INCLUDE_DIR KECCAK_LIBRARY)
+
diff --git a/cmake/FindSecp256k1.cmake b/cmake/FindSecp256k1.cmake
new file mode 100644
index 00000000..835ee5dd
--- /dev/null
+++ b/cmake/FindSecp256k1.cmake
@@ -0,0 +1,20 @@
+# Find the secp256k1 library and define the following variables:
+# SECP256K1_FOUND
+# SECP256K1_INCLUDE_DIR
+# SECP256K1_LIBRARY
+
+include(SelectLibraryConfigurations)
+include(FindPackageHandleStandardArgs)
+
+find_path(SECP256K1_INCLUDE_DIR NAMES secp256k1.h)
+find_library(SECP256K1_LIBRARY NAMES libsecp256k1.a)
+
+SELECT_LIBRARY_CONFIGURATIONS(Secp256k1)
+
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ Secp256k1 DEFAULT_MSG
+ SECP256K1_LIBRARY SECP256K1_INCLUDE_DIR
+)
+
+mark_as_advanced(SECP256K1_INCLUDE_DIR SECP256K1_LIBRARY)
+
diff --git a/cmake/FindSpeedb.cmake b/cmake/FindSpeedb.cmake
new file mode 100644
index 00000000..cb2959af
--- /dev/null
+++ b/cmake/FindSpeedb.cmake
@@ -0,0 +1,20 @@
+# Find the Speedb library and define the following variables:
+# SPEEDB_FOUND
+# SPEEDB_INCLUDE_DIR
+# SPEEDB_LIBRARY
+
+include(SelectLibraryConfigurations)
+include(FindPackageHandleStandardArgs)
+
+find_path(SPEEDB_INCLUDE_DIR NAMES db.h PATH_SUFFIXES rocksdb)
+find_library(SPEEDB_LIBRARY NAMES libspeedb.a)
+
+SELECT_LIBRARY_CONFIGURATIONS(Speedb)
+
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ Speedb DEFAULT_MSG
+ SPEEDB_LIBRARY SPEEDB_INCLUDE_DIR
+)
+
+mark_as_advanced(SPEEDB_INCLUDE_DIR SPEEDB_LIBRARY)
+
diff --git a/cmake/ProjectBoostCertify.cmake b/cmake/ProjectBoostCertify.cmake
deleted file mode 100644
index 89f610a0..00000000
--- a/cmake/ProjectBoostCertify.cmake
+++ /dev/null
@@ -1,39 +0,0 @@
-include(ExternalProject)
-
-if (MSVC)
- set(_only_release_configuration -DCMAKE_CONFIGURATION_TYPES=Release)
- set(_overwrite_install_command INSTALL_COMMAND cmake --build --config Release --target install)
-endif()
-
-set(prefix "${CMAKE_BINARY_DIR}/deps")
-set(CERTIFY_LIBRARY "${prefix}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}certify${CMAKE_STATIC_LIBRARY_SUFFIX}")
-set(CERTIFY_INCLUDE_DIR "${prefix}/include")
-
-ExternalProject_Add(
- certify
- PREFIX "${prefix}"
- URL https://github.com/djarek/certify/archive/97f5eebfd99a5d6e99d07e4820240994e4e59787.tar.gz
- URL_HASH SHA256=1c964b0aba47cd90081eaacc4946ea8e58d0c14fb267856f26515219e8ca1d68
- PATCH_COMMAND patch -p1 < ${CMAKE_CURRENT_SOURCE_DIR}/cmake/certifyPatch.patch
- CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=
- -DCMAKE_POSITION_INDEPENDENT_CODE=${BUILD_SHARED_LIBS}
- -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
- -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
- -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
- -DOPENSSL_ROOT_DIR=${OPENSSL_ROOT_DIR}
- ${_only_release_configuration}
- ${_windows_configuration}
- -DCMAKE_INSTALL_LIBDIR=lib
- LOG_CONFIGURE 1
- ${_overwrite_install_command}
- LOG_INSTALL 1
- BUILD_BYPRODUCTS "${CERTIFY_BYPRODUCTS}"
-)
-
-# Create imported library
-add_library(Certify STATIC IMPORTED)
-file(MAKE_DIRECTORY "${CERTIFY_INCLUDE_DIR}") # Must exist.
-set_property(TARGET Certify PROPERTY IMPORTED_CONFIGURATIONS Release)
-set_property(TARGET Certify PROPERTY IMPORTED_LOCATION_RELEASE "${CERTIFY_LIBRARY}")
-set_property(TARGET Certify PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${CERTIFY_INCLUDE_DIR}")
-add_dependencies(Certify certify ${CERTIFY_BYPRODUCTS})
diff --git a/cmake/ProjectEthash.cmake b/cmake/ProjectEthash.cmake
deleted file mode 100644
index 55f99dc6..00000000
--- a/cmake/ProjectEthash.cmake
+++ /dev/null
@@ -1,42 +0,0 @@
-include(ExternalProject)
-
-if (MSVC)
- set(_only_release_configuration -DCMAKE_CONFIGURATION_TYPES=Release)
- set(_overwrite_install_command INSTALL_COMMAND cmake --build --config Release --target install)
-endif()
-
-set(prefix "${CMAKE_BINARY_DIR}/deps")
-set(ETHASH_LIBRARY "${prefix}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}ethash${CMAKE_STATIC_LIBRARY_SUFFIX}")
-set(ETHASH_INCLUDE_DIR "${prefix}/include")
-set(ETHASH_BYPRODUCTS "${prefix}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}keccak${CMAKE_STATIC_LIBRARY_SUFFIX}")
-
-ExternalProject_Add(
- ethash
- PREFIX "${prefix}"
- DOWNLOAD_NAME ethash-v1.0.0.tar.gz
- DOWNLOAD_NO_PROGRESS 1
- URL https://github.com/chfast/ethash/archive/refs/tags/v1.0.0.tar.gz
- URL_HASH SHA256=36071d9c4aaf3fd9e43155d7c2604404d6ab70613e6978cff964c5814f461a1a
- CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${prefix}
- -DCMAKE_POSITION_INDEPENDENT_CODE=${BUILD_SHARED_LIBS}
- -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
- -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
- -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
- -DETHASH_BUILD_TESTS=OFF
- -DETHASH_BUILD_ETHASH=ON
- -DCMAKE_INSTALL_LIBDIR=lib
- ${_only_release_configuration}
- LOG_CONFIGURE 1
- ${_overwrite_install_command}
- LOG_INSTALL 1
- BUILD_BYPRODUCTS "${ETHASH_LIBRARY}"
- BUILD_BYPRODUCTS "${ETHASH_BYPRODUCTS}"
-)
-
-# Create imported library
-add_library(Ethash STATIC IMPORTED)
-file(MAKE_DIRECTORY "${ETHASH_INCLUDE_DIR}") # Must exist.
-set_property(TARGET Ethash PROPERTY IMPORTED_CONFIGURATIONS Release)
-set_property(TARGET Ethash PROPERTY IMPORTED_LOCATION_RELEASE "${ETHASH_LIBRARY}")
-set_property(TARGET Ethash PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${ETHASH_INCLUDE_DIR}")
-add_dependencies(Ethash ethash ${ETHASH_LIBRARY} ${ETHASH_BYPRODUCTS})
diff --git a/cmake/ProjectSecp256k1.cmake b/cmake/ProjectSecp256k1.cmake
deleted file mode 100644
index 98e4771b..00000000
--- a/cmake/ProjectSecp256k1.cmake
+++ /dev/null
@@ -1,40 +0,0 @@
-include(ExternalProject)
-
-if (MSVC)
- set(_only_release_configuration -DCMAKE_CONFIGURATION_TYPES=Release)
- set(_overwrite_install_command INSTALL_COMMAND cmake --build --config Release --target install)
-endif()
-
-set(prefix "${CMAKE_BINARY_DIR}/deps")
-set(SECP256K1_LIBRARY "${prefix}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}secp256k1${CMAKE_STATIC_LIBRARY_SUFFIX}")
-set(SECP256K1_INCLUDE_DIR "${prefix}/include")
-
-ExternalProject_Add(
- secp256k1
- PREFIX "${prefix}"
- DOWNLOAD_NAME secp256k1-ac8ccf29.tar.gz
- DOWNLOAD_NO_PROGRESS 1
- GIT_REPOSITORY https://github.com/bitcoin-core/secp256k1
- GIT_TAG "bdf39000b9c6a0818e7149ccb500873d079e6e85"
- PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different
- ${CMAKE_CURRENT_LIST_DIR}/secp256k1/CMakeLists.txt
- CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${prefix}
- -DCMAKE_POSITION_INDEPENDENT_CODE=${BUILD_SHARED_LIBS}
- -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
- -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
- -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
- ${_only_release_configuration}
- -DCMAKE_INSTALL_LIBDIR=lib
- LOG_CONFIGURE 1
- ${_overwrite_install_command}
- LOG_INSTALL 1
- BUILD_BYPRODUCTS "${SECP256K1_LIBRARY}"
-)
-
-# Create imported library
-add_library(Secp256k1 STATIC IMPORTED)
-file(MAKE_DIRECTORY "${SECP256K1_INCLUDE_DIR}") # Must exist.
-set_property(TARGET Secp256k1 PROPERTY IMPORTED_CONFIGURATIONS Release)
-set_property(TARGET Secp256k1 PROPERTY IMPORTED_LOCATION_RELEASE "${SECP256K1_LIBRARY}")
-set_property(TARGET Secp256k1 PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${SECP256K1_INCLUDE_DIR}")
-add_dependencies(Secp256k1 secp256k1)
diff --git a/cmake/ProjectSpeedb.cmake b/cmake/ProjectSpeedb.cmake
deleted file mode 100644
index 16ec6a6d..00000000
--- a/cmake/ProjectSpeedb.cmake
+++ /dev/null
@@ -1,48 +0,0 @@
-include(ExternalProject)
-
-if (MSVC)
- set(_only_release_configuration -DCMAKE_CONFIGURATION_TYPES=Release)
- set(_overwrite_install_command INSTALL_COMMAND cmake --build --config Release --target install)
-endif()
-
-set(prefix "${CMAKE_BINARY_DIR}/deps")
-set(SPEEDB_LIBRARY "${prefix}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}speedb${CMAKE_STATIC_LIBRARY_SUFFIX}")
-set(SPEEDB_INCLUDE_DIR "${prefix}/include")
-
-ExternalProject_Add(
- speedb
- PREFIX "${prefix}"
- DOWNLOAD_NAME speedb-2.4.1.tar.gz
- DOWNLOAD_NO_PROGRESS 1
- GIT_REPOSITORY https://github.com/speedb-io/speedb
- GIT_TAG "speedb/v2.4.1"
- #URL https://github.com/speedb-io/speedb/releases/download/speedb/v2.4.1/speedb-2.4.1.tar.gz
- #URL_HASH SHA256=4e984515bbed0942d4ba22d8a219c752b0679d261a4baf7ac72c206f5ab1cd04
- CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${prefix}
- -DCMAKE_POSITION_INDEPENDENT_CODE=${BUILD_SHARED_LIBS}
- -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
- -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
- -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
- -DCMAKE_BUILD_TYPE=Release
- ${_only_release_configuration}
- -DCMAKE_INSTALL_LIBDIR=lib
- -DROCKSDB_BUILD_SHARED=OFF
- -DFAIL_ON_WARNINGS=OFF
- -DWITH_GFLAGS=OFF
- -DWITH_RUNTIME_DEBUG=OFF
- -DWITH_TESTS=OFF
- -DWITH_BENCHMARK_TOOLS=OFF
- -DWITH_CORE_TOOLS=OFF
- -DWITH_TOOLS=OFF
- -DWITH_TRACE_TOOLS=OFF
- ${_overwrite_install_command}
- BUILD_BYPRODUCTS "${SPEEDB_LIBRARY}"
- UPDATE_COMMAND ""
-)
-
-# Create imported library
-add_library(Speedb STATIC IMPORTED)
-set_property(TARGET Speedb PROPERTY IMPORTED_CONFIGURATIONS Release)
-set_property(TARGET Speedb PROPERTY IMPORTED_LOCATION_RELEASE "${SPEEDB_LIBRARY}")
-set_property(TARGET Speedb PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${SPEEDB_INCLUDE_DIR}")
-add_dependencies(Speedb speedb SPEEDB_LIBRARY)
diff --git a/cmake/certifyPatch.patch b/cmake/certifyPatch.patch
deleted file mode 100644
index 270d2c30..00000000
--- a/cmake/certifyPatch.patch
+++ /dev/null
@@ -1,121 +0,0 @@
-diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 7cceb2c..630f262 100644
---- a/CMakeLists.txt
-+++ b/CMakeLists.txt
-@@ -59,13 +59,13 @@ write_basic_package_version_file(
- COMPATIBILITY AnyNewerVersion)
-
- install(FILES
-- "netutilsConfig.cmake"
-+ "certifyConfig.cmake"
- "${CMAKE_BINARY_DIR}/certifyConfigVersion.cmake"
- DESTINATION lib/cmake/certify)
-
- install(DIRECTORY ${CMAKE_SOURCE_DIR}/include/
- DESTINATION include
-- FILES_MATCHING PATTERN "*.hpp")
-+ FILES_MATCHING PATTERN "*.hpp" PATTERN "*.ipp")
-
- install(TARGETS core
- EXPORT certifyTargets
-diff --git a/certifyConfig.cmake b/certifyConfig.cmake
-index 272dd90..87313e8 100644
---- a/certifyConfig.cmake
-+++ b/certifyConfig.cmake
-@@ -1,6 +1,7 @@
- include(CMakeFindDependencyMacro)
-
--find_dependency(Boost COMPONENTS system)
-+find_dependency(Boost COMPONENTS system filesystem date_time)
-+find_dependency(OpenSSL)
- find_dependency(Threads)
-
--include("${CMAKE_CURRENT_LIST_DIR}/certify-Targets.cmake")
-+include("${CMAKE_CURRENT_LIST_DIR}/certifyTargets.cmake")
-diff --git a/include/boost/certify/crlset_parser.hpp b/include/boost/certify/crlset_parser.hpp
-index 7174944..29ab461 100644
---- a/include/boost/certify/crlset_parser.hpp
-+++ b/include/boost/certify/crlset_parser.hpp
-@@ -4,6 +4,7 @@
- #include
- #include
- #include
-+#include
-
- namespace boost
- {
-diff --git a/include/boost/certify/detail/keystore_windows.ipp b/include/boost/certify/detail/keystore_windows.ipp
-index efcc697..625ef00 100644
---- a/include/boost/certify/detail/keystore_windows.ipp
-+++ b/include/boost/certify/detail/keystore_windows.ipp
-@@ -6,6 +6,7 @@
-
- #include
- #include
-+#include
-
- namespace boost
- {
-diff --git a/include/boost/certify/detail/spki_blacklist.hpp b/include/boost/certify/detail/spki_blacklist.hpp
-index 7833d80..2f456e9 100644
---- a/include/boost/certify/detail/spki_blacklist.hpp
-+++ b/include/boost/certify/detail/spki_blacklist.hpp
-@@ -2,6 +2,7 @@
- #define BOOST_CERTIFY_DETAIL_SPKI_BLACKLIST_HPP
-
- #include
-+#include
-
- namespace boost
- {
-diff --git a/include/boost/certify/detail/spki_digest.hpp b/include/boost/certify/detail/spki_digest.hpp
-index d9e4ba9..5f937c2 100644
---- a/include/boost/certify/detail/spki_digest.hpp
-+++ b/include/boost/certify/detail/spki_digest.hpp
-@@ -5,6 +5,7 @@
- #include
- #include
- #include
-+#include
-
- namespace boost
- {
-diff --git a/include/boost/certify/impl/crlset_parser.ipp b/include/boost/certify/impl/crlset_parser.ipp
-index d41fb7f..853894e 100644
---- a/include/boost/certify/impl/crlset_parser.ipp
-+++ b/include/boost/certify/impl/crlset_parser.ipp
-@@ -2,6 +2,7 @@
- #define BOOST_CERTIFY_IMPL_CRLSET_PARSER_IPP
-
- #include
-+#include
-
- namespace boost
- {
-diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
-index c1cda26..03be79c 100644
---- a/tests/CMakeLists.txt
-+++ b/tests/CMakeLists.txt
-@@ -11,5 +11,9 @@ function (certify_verify_add_test test_file)
- COMMAND ${target_name})
- endfunction(certify_verify_add_test)
-
-+certify_verify_add_test(extensions.cpp)
- certify_verify_add_test(https_verification_success.cpp)
- certify_verify_add_test(https_verification_fail.cpp)
-+certify_verify_add_test(crl_set_parser.cpp)
-+certify_verify_add_test(detail_spki_digest.cpp)
-+certify_verify_add_test(status_cache.cpp)
-\ No newline at end of file
-diff --git a/tests/crl_set_parser.cpp b/tests/crl_set_parser.cpp
-index 4e5b221..8f29a27 100644
---- a/tests/crl_set_parser.cpp
-+++ b/tests/crl_set_parser.cpp
-@@ -5,6 +5,7 @@
- #include
- #include
- #include
-+#include
-
- const std::uint8_t array[46] = {
- 0x02, 0x00, 0x7b, 0x7d, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
diff --git a/cmake/secp256k1/CMakeLists.txt b/cmake/secp256k1/CMakeLists.txt
deleted file mode 100644
index 14267f67..00000000
--- a/cmake/secp256k1/CMakeLists.txt
+++ /dev/null
@@ -1,35 +0,0 @@
-# This CMake config file for secp256k1 project from https://github.com/bitcoin-core/secp256k1
-#
-# The secp256k1 project has been configured following official docs with following options:
-#
-# ./configure --disable-shared --disable-tests --disable-coverage --disable-openssl-tests --disable-exhaustive-tests --disable-jni --with-bignum=no --with-field=64bit --with-scalar=64bit --with-asm=no
-#
-# Build static context:
-# make src/ecmult_static_context.h
-#
-# Copy src/ecmult_static_context.h and src/libsecp256k1-config.h
-#
-# Copy CFLAGS from Makefile to COMPILE_OPTIONS.
-
-list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/modules)
-
-cmake_minimum_required(VERSION 3.4)
-project(secp256k1 LANGUAGES C)
-
-set(COMMON_COMPILE_FLAGS ENABLE_MODULE_RECOVERY ENABLE_MODULE_ECDH USE_ECMULT_STATIC_PRECOMPUTATION USE_FIELD_INV_BUILTIN USE_NUM_NONE USE_SCALAR_INV_BUILTIN)
-if (MSVC)
- set(COMPILE_FLAGS USE_FIELD_10X26 USE_SCALAR_8X32)
- set(COMPILE_OPTIONS "")
-else()
- set(COMPILE_FLAGS USE_FIELD_5X52 USE_SCALAR_4X64 HAVE_BUILTIN_EXPECT HAVE___INT128)
- set(COMPILE_OPTIONS -O2 -W -std=c89 -pedantic -Wall -Wextra -Wcast-align -Wnested-externs -Wshadow -Wstrict-prototypes -Wno-unused-function -Wno-long-long -Wno-overlength-strings -fvisibility=hidden)
-endif()
-
-add_library(secp256k1 STATIC src/secp256k1.c src/precomputed_ecmult.h src/precomputed_ecmult_gen.h src/precomputed_ecmult.c src/precomputed_ecmult_gen.c)
-target_compile_definitions(secp256k1 PRIVATE ${COMMON_COMPILE_FLAGS} ${COMPILE_FLAGS})
-target_include_directories(secp256k1 PRIVATE ${CMAKE_SOURCE_DIR} ${CMAKE_SOURCE_DIR}/src)
-target_compile_options(secp256k1 PRIVATE ${COMPILE_OPTIONS})
-
-install(TARGETS secp256k1 ARCHIVE DESTINATION lib)
-install(DIRECTORY include/ DESTINATION include)
-
diff --git a/deployedcontracts b/deployedcontracts
new file mode 100644
index 00000000..a115e239
--- /dev/null
+++ b/deployedcontracts
@@ -0,0 +1,5 @@
+
+BTVEnergy: 0x33F2f10CbcC56D3d615594FF271200eF11cb2610
+BTVPlayer: 0x9bD17306692d837743b6fb0597c8dA3279a8e501
+BTVProposals: 0x2668009cCeCAc872A37CeD6a5c3DF4682C6dfe89
+BuildTheVoid: 0x30C37F6B1d6321C4398238525046c604C7b26150
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index ecf7fddf..34660f4a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,17 +1,15 @@
-# Copyright (c) [2023-2024] [Sparq Network]
+# Copyright (c) [2023-2024] [AppLayer Developers]
# This software is distributed under the MIT License.
# See the LICENSE.txt file in the project root for more information.
-version: '3'
-
services:
- orbitersdk-cpp-dev:
- build:
+ bdk:
+ build:
context: .
- ports:
- - "8080-8099:8080-8099"
- - "8110-8111:8110-8111"
+ dockerfile: docker/bdk_cpp.dockerfile
volumes:
- - :/orbitersdk-volume
+ - .:/bdk-cpp
+ entrypoint: ["/entrypoint.sh"]
+ working_dir: /bdk-cpp
tty: true
stdin_open: true
diff --git a/docker/bdk_cpp.dockerfile b/docker/bdk_cpp.dockerfile
new file mode 100644
index 00000000..aaf2a1db
--- /dev/null
+++ b/docker/bdk_cpp.dockerfile
@@ -0,0 +1,55 @@
+# Copyright (c) [2023-2024] [AppLayer Developers]
+# This software is distributed under the MIT License.
+# See the LICENSE.txt file in the project root for more information.
+
+# Start from a base Debian image
+FROM debian:trixie
+
+# Set shell to Bash because Docker standards are stupid
+SHELL ["/bin/bash", "-c"]
+
+# Update the system
+RUN apt-get update && apt-get upgrade -y
+
+# Install Docker-specific dependencies
+RUN apt-get -y install nano vim unison curl jq unzip wget
+RUN apt-get install -y python3 python3-pip python3-venv
+
+# Create venv and install gcovr (for SonarQube)
+# Locked at 5.0 due to https://github.com/gcovr/gcovr/issues/583#issuecomment-1079974142
+RUN pip install gcovr==5.0 --break-system-packages
+
+# Copy the deps script to the container
+COPY scripts/deps.sh /
+
+# Install dependencies
+RUN bash ./deps.sh --install
+
+# Create a directory for sonarcloud
+RUN mkdir /root/.sonar
+
+# Copy sonarcloud scripts to sonarcloud
+COPY scripts/sonarcloud.sh /sonarcloud
+
+# Copy Unison configuration file
+COPY sync.prf /root/.unison/sync.prf
+
+# Copy the entrypoint script
+COPY docker/entrypoint.sh /entrypoint.sh
+
+# Copy the entrypoint script
+COPY scripts/sonarcloud.sh /sonarcloud.sh
+
+# Execute sonarcloud install script
+RUN /sonarcloud.sh
+
+# Update running paths
+ENV PATH=/root/.sonar/build-wrapper-linux-x86:$PATH
+ENV PATH=/root/.sonar/sonar-scanner-6.2.1.4610-linux-x64/bin:$PATH
+ENV PATH=/root/.sonar/sonar-scanner-6.2.0.4584-linux-x64/bin:$PATH
+ENV PATH=/root/.sonar/sonar-scanner-7.0.1.4817-linux-x64/bin:$PATH
+ENV PATH=/usr/local/bin:$PATH
+
+# Copy the entrypoint script
+COPY docker/entrypoint.sh /entrypoint.sh
+
diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh
new file mode 100755
index 00000000..b947c7f3
--- /dev/null
+++ b/docker/entrypoint.sh
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+# Start Unison in the background, ignoring files that should not be synced
+nohup unison -repeat 1 /bdk-volume /bdk-cpp -auto -batch \
+ -ignore 'Name {build}' \
+ -ignore 'Name {build_local_testnet}' \
+ -ignore 'Name {.vscode}' \
+ -ignore 'Name {proto/metrics.pb.cc}' \
+ -ignore 'Name {proto/metrics.pb.h}' \
+ -ignore 'Name {proto/vm.grpc.pb.cc}' \
+ -ignore 'Name {proto/vm.grpc.pb.h}' \
+ -ignore 'Name {proto/vm.pb.cc}' \
+ -ignore 'Name {proto/vm.pb.h}' \
+ -ignore 'Name {storageVM}' \
+ -ignore 'Name {info.txt}' \
+ -ignore 'Name {.vscode}' \
+ -ignore 'Name {vmInfo.txt}' \
+ -ignore 'Name {*.[pP][bB].[hH]}' \
+ -ignore 'Name {tests/node_modules}' \
+ -ignore 'Name {depends/x86_64-pc-linux-gnu}' \
+ -ignore 'Name {scripts/AIO-setup.log}' \
+ -ignore 'Name {compile_commands.json}' \
+ -ignore 'Name {.cache}' \
+ -ignore 'Name {Dockerfile}' \
+ -ignore 'Name {docker-compose.yml}' \
+ -ignore 'Name {sync.prf}' \
+ -ignore 'Name {kateproject}' \
+ -ignore 'Name {*.o}' \
+ -ignore 'Name {*.gch}' \
+> /dev/null 2>&1 & /bin/bash
diff --git a/proto/aliasreader.proto b/proto/aliasreader.proto
deleted file mode 100644
index 926f86b1..00000000
--- a/proto/aliasreader.proto
+++ /dev/null
@@ -1,23 +0,0 @@
-syntax = "proto3";
-
-package aliasreader;
-
-option go_package = "github.com/ava-labs/avalanchego/proto/pb/aliasreader";
-
-service AliasReader {
- rpc Lookup(Alias) returns (ID);
- rpc PrimaryAlias(ID) returns (Alias);
- rpc Aliases(ID) returns (AliasList);
-}
-
-message ID {
- bytes id = 1;
-}
-
-message Alias {
- string alias = 1;
-}
-
-message AliasList {
- repeated string aliases = 1;
-}
diff --git a/proto/appsender.proto b/proto/appsender.proto
deleted file mode 100644
index 338d9e9a..00000000
--- a/proto/appsender.proto
+++ /dev/null
@@ -1,65 +0,0 @@
-syntax = "proto3";
-
-package appsender;
-
-import "google/protobuf/empty.proto";
-
-option go_package = "github.com/ava-labs/avalanchego/proto/pb/appsender";
-
-service AppSender {
- rpc SendAppRequest(SendAppRequestMsg) returns (google.protobuf.Empty);
- rpc SendAppResponse(SendAppResponseMsg) returns (google.protobuf.Empty);
- rpc SendAppGossip(SendAppGossipMsg) returns (google.protobuf.Empty);
- rpc SendAppGossipSpecific(SendAppGossipSpecificMsg) returns (google.protobuf.Empty);
-
- rpc SendCrossChainAppRequest(SendCrossChainAppRequestMsg) returns (google.protobuf.Empty);
- rpc SendCrossChainAppResponse(SendCrossChainAppResponseMsg) returns (google.protobuf.Empty);
-}
-
-message SendAppRequestMsg {
- // The nodes to send this request to
- repeated bytes node_ids = 1;
- // The ID of this request
- uint32 request_id = 2;
- // The request body
- bytes request = 3;
-}
-
-message SendAppResponseMsg {
- // The node to send a response to
- bytes node_id = 1;
- // ID of this request
- uint32 request_id = 2;
- // The response body
- bytes response = 3;
-}
-
-message SendAppGossipMsg {
- // The message body
- bytes msg = 1;
-}
-
-message SendAppGossipSpecificMsg {
- // The nodes to send this request to
- repeated bytes node_ids = 1;
- // The message body
- bytes msg = 2;
-}
-
-message SendCrossChainAppRequestMsg {
- // The chain to send this request to
- bytes chain_id = 1;
- // the ID of this request
- uint32 request_id = 2;
- // The request body
- bytes request = 3;
-}
-
-message SendCrossChainAppResponseMsg {
- // The chain to send this response to
- bytes chain_id = 1;
- // the ID of this request
- uint32 request_id = 2;
- // The response body
- bytes response = 3;
-}
diff --git a/proto/keystore.proto b/proto/keystore.proto
deleted file mode 100644
index c59f1b4d..00000000
--- a/proto/keystore.proto
+++ /dev/null
@@ -1,23 +0,0 @@
-syntax = "proto3";
-
-package keystore;
-
-option go_package = "github.com/ava-labs/avalanchego/proto/pb/keystore";
-
-service Keystore {
- rpc GetDatabase(GetDatabaseRequest) returns (GetDatabaseResponse);
-}
-
-message GetDatabaseRequest {
- string username = 1;
- string password = 2;
-}
-
-message GetDatabaseResponse {
- // reserved for backward compatibility
- // avalanchego <=v1.7.9 used the field "1" as an id to identify the gRPC server
- // address which served the Database service via the now removed service broker
- reserved 1;
- // server_addr is the address of the gRPC server hosting the Database service
- string server_addr = 2;
-}
diff --git a/proto/messenger.proto b/proto/messenger.proto
deleted file mode 100644
index 027fcdeb..00000000
--- a/proto/messenger.proto
+++ /dev/null
@@ -1,15 +0,0 @@
-syntax = "proto3";
-
-package messenger;
-
-option go_package = "github.com/ava-labs/avalanchego/proto/pb/messenger";
-
-service Messenger {
- rpc Notify(NotifyRequest) returns (NotifyResponse);
-}
-
-message NotifyRequest {
- uint32 message = 1;
-}
-
-message NotifyResponse {}
diff --git a/proto/metrics.proto b/proto/metrics.proto
deleted file mode 100644
index c9546f14..00000000
--- a/proto/metrics.proto
+++ /dev/null
@@ -1,92 +0,0 @@
-// Copyright 2013 Prometheus Team
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto2";
-
-package io.prometheus.client;
-option java_package = "io.prometheus.client";
-option go_package = "github.com/prometheus/client_model/go;io_prometheus_client";
-
-import "google/protobuf/timestamp.proto";
-
-message LabelPair {
- optional string name = 1;
- optional string value = 2;
-}
-
-enum MetricType {
- COUNTER = 0;
- GAUGE = 1;
- SUMMARY = 2;
- UNTYPED = 3;
- HISTOGRAM = 4;
-}
-
-message Gauge {
- optional double value = 1;
-}
-
-message Counter {
- optional double value = 1;
- optional Exemplar exemplar = 2;
-}
-
-message Quantile {
- optional double quantile = 1;
- optional double value = 2;
-}
-
-message Summary {
- optional uint64 sample_count = 1;
- optional double sample_sum = 2;
- repeated Quantile quantile = 3;
-}
-
-message Untyped {
- optional double value = 1;
-}
-
-message Histogram {
- optional uint64 sample_count = 1;
- optional double sample_sum = 2;
- repeated Bucket bucket = 3; // Ordered in increasing order of upper_bound, +Inf bucket is optional.
-}
-
-message Bucket {
- optional uint64 cumulative_count = 1; // Cumulative in increasing order.
- optional double upper_bound = 2; // Inclusive.
- optional Exemplar exemplar = 3;
-}
-
-message Exemplar {
- repeated LabelPair label = 1;
- optional double value = 2;
- optional google.protobuf.Timestamp timestamp = 3; // OpenMetrics-style.
-}
-
-message Metric {
- repeated LabelPair label = 1;
- optional Gauge gauge = 2;
- optional Counter counter = 3;
- optional Summary summary = 4;
- optional Untyped untyped = 5;
- optional Histogram histogram = 7;
- optional int64 timestamp_ms = 6;
-}
-
-message MetricFamily {
- optional string name = 1;
- optional string help = 2;
- optional MetricType type = 3;
- repeated Metric metric = 4;
-}
diff --git a/proto/rpcdb.proto b/proto/rpcdb.proto
deleted file mode 100644
index c1800bd7..00000000
--- a/proto/rpcdb.proto
+++ /dev/null
@@ -1,122 +0,0 @@
-syntax = "proto3";
-
-package rpcdb;
-
-import "google/protobuf/empty.proto";
-
-option go_package = "github.com/ava-labs/avalanchego/proto/pb/rpcdb";
-
-service Database {
- rpc Has(HasRequest) returns (HasResponse);
- rpc Get(GetRequest) returns (GetResponse);
- rpc Put(PutRequest) returns (PutResponse);
- rpc Delete(DeleteRequest) returns (DeleteResponse);
- rpc Compact(CompactRequest) returns (CompactResponse);
- rpc Close(CloseRequest) returns (CloseResponse);
- rpc HealthCheck(google.protobuf.Empty) returns (HealthCheckResponse);
- rpc WriteBatch(WriteBatchRequest) returns (WriteBatchResponse);
- rpc NewIteratorWithStartAndPrefix(NewIteratorWithStartAndPrefixRequest) returns (NewIteratorWithStartAndPrefixResponse);
- rpc IteratorNext(IteratorNextRequest) returns (IteratorNextResponse);
- rpc IteratorError(IteratorErrorRequest) returns (IteratorErrorResponse);
- rpc IteratorRelease(IteratorReleaseRequest) returns (IteratorReleaseResponse);
-}
-
-message HasRequest {
- bytes key = 1;
-}
-
-message HasResponse {
- bool has = 1;
- uint32 err = 2;
-}
-
-message GetRequest {
- bytes key = 1;
-}
-
-message GetResponse {
- bytes value = 1;
- uint32 err = 2;
-}
-
-message PutRequest {
- bytes key = 1;
- bytes value = 2;
-}
-
-message PutResponse {
- uint32 err = 1;
-}
-
-message DeleteRequest {
- bytes key = 1;
-}
-
-message DeleteResponse {
- uint32 err = 1;
-}
-
-message CompactRequest {
- bytes start = 1;
- bytes limit = 2;
-}
-
-message CompactResponse {
- uint32 err = 1;
-}
-
-message CloseRequest {}
-
-message CloseResponse {
- uint32 err = 1;
-}
-
-message WriteBatchRequest {
- repeated PutRequest puts = 1;
- repeated DeleteRequest deletes = 2;
- int64 id = 3;
- bool continues = 4;
-}
-
-message WriteBatchResponse {
- uint32 err = 1;
-}
-
-message NewIteratorRequest {}
-
-message NewIteratorWithStartAndPrefixRequest {
- bytes start = 1;
- bytes prefix = 2;
-}
-
-message NewIteratorWithStartAndPrefixResponse {
- uint64 id = 1;
-}
-
-message IteratorNextRequest {
- uint64 id = 1;
-}
-
-message IteratorNextResponse {
- repeated PutRequest data = 1;
-}
-
-message IteratorErrorRequest {
- uint64 id = 1;
-}
-
-message IteratorErrorResponse {
- uint32 err = 1;
-}
-
-message IteratorReleaseRequest {
- uint64 id = 1;
-}
-
-message IteratorReleaseResponse {
- uint32 err = 1;
-}
-
-message HealthCheckResponse {
- bytes details = 1;
-}
diff --git a/proto/sharedmemory.proto b/proto/sharedmemory.proto
deleted file mode 100644
index 62e8277c..00000000
--- a/proto/sharedmemory.proto
+++ /dev/null
@@ -1,76 +0,0 @@
-syntax = "proto3";
-
-package sharedmemory;
-
-option go_package = "github.com/ava-labs/avalanchego/proto/pb/sharedmemory";
-
-service SharedMemory {
- rpc Get(GetRequest) returns (GetResponse);
- rpc Indexed(IndexedRequest) returns (IndexedResponse);
- rpc Apply(ApplyRequest) returns (ApplyResponse);
-}
-
-message BatchPut {
- bytes key = 1;
- bytes value = 2;
-}
-
-message BatchDelete {
- bytes key = 1;
-}
-
-message Batch {
- repeated BatchPut puts = 1;
- repeated BatchDelete deletes = 2;
- int64 id = 3;
-}
-
-message AtomicRequest {
- repeated bytes remove_requests = 1;
- repeated Element put_requests = 2;
- bytes peer_chain_id = 3;
-}
-
-message Element {
- bytes key = 1;
- bytes value = 2;
- repeated bytes traits = 3;
-}
-
-message GetRequest {
- bytes peer_chain_id = 1;
- repeated bytes keys = 2;
- int64 id = 3;
- bool continues = 4;
-}
-
-message GetResponse {
- repeated bytes values = 1;
- bool continues = 2;
-}
-
-message IndexedRequest {
- bytes peer_chain_id = 1;
- repeated bytes traits = 2;
- bytes start_trait = 3;
- bytes start_key = 4;
- int32 limit = 5;
- int64 id = 6;
- bool continues = 7;
-}
-
-message IndexedResponse {
- repeated bytes values = 1;
- bytes last_trait = 2;
- bytes last_key = 3;
- bool continues = 4;
-}
-
-message ApplyRequest {
- repeated AtomicRequest requests = 1;
- repeated Batch batches = 2;
- int64 id = 3;
- bool continues = 4;
-}
-
-message ApplyResponse {}
diff --git a/proto/vm.proto b/proto/vm.proto
deleted file mode 100644
index b635e108..00000000
--- a/proto/vm.proto
+++ /dev/null
@@ -1,380 +0,0 @@
-syntax = "proto3";
-
-package vm;
-
-import "google/protobuf/empty.proto";
-import "google/protobuf/timestamp.proto";
-import "metrics.proto";
-
-option go_package = "github.com/ava-labs/avalanchego/proto/pb/vm";
-
-// ref. https://pkg.go.dev/github.com/ava-labs/avalanchego/snow/engine/snowman/block
-// ref. https://pkg.go.dev/github.com/ava-labs/avalanchego/snow/consensus/snowman#Block
-service VM {
- // ChainVM
- //
- // Initialize this VM.
- rpc Initialize(InitializeRequest) returns (InitializeResponse);
- // SetState communicates to VM its next state it starts
- rpc SetState(SetStateRequest) returns (SetStateResponse);
- // Shutdown is called when the node is shutting down.
- rpc Shutdown(google.protobuf.Empty) returns (google.protobuf.Empty);
- // Creates the HTTP handlers for custom chain network calls.
- rpc CreateHandlers(google.protobuf.Empty) returns (CreateHandlersResponse);
- // Creates the HTTP handlers for custom VM network calls.
- //
- // Note: RPC Chain VM Factory will start a new instance of the VM in a
- // seperate process which will populate the static handlers. After this
- // process is created other processes will be created to populate blockchains,
- // but they will not have the static handlers be called again.
- rpc CreateStaticHandlers(google.protobuf.Empty) returns (CreateStaticHandlersResponse);
- rpc Connected(ConnectedRequest) returns (google.protobuf.Empty);
- rpc Disconnected(DisconnectedRequest) returns (google.protobuf.Empty);
- // Attempt to create a new block from data contained in the VM.
- rpc BuildBlock(BuildBlockRequest) returns (BuildBlockResponse);
- // Attempt to create a block from a stream of bytes.
- rpc ParseBlock(ParseBlockRequest) returns (ParseBlockResponse);
- // Attempt to load a block.
- rpc GetBlock(GetBlockRequest) returns (GetBlockResponse);
- // Notify the VM of the currently preferred block.
- rpc SetPreference(SetPreferenceRequest) returns (google.protobuf.Empty);
- // Attempt to verify the health of the VM.
- rpc Health(google.protobuf.Empty) returns (HealthResponse);
- // Version returns the version of the VM.
- rpc Version(google.protobuf.Empty) returns (VersionResponse);
- // Notify this engine of a request for data from [nodeID].
- rpc AppRequest(AppRequestMsg) returns (google.protobuf.Empty);
- // Notify this engine that an AppRequest message it sent to [nodeID] with
- // request ID [requestID] failed.
- rpc AppRequestFailed(AppRequestFailedMsg) returns (google.protobuf.Empty);
- // Notify this engine of a response to the AppRequest message it sent to
- // [nodeID] with request ID [requestID].
- rpc AppResponse(AppResponseMsg) returns (google.protobuf.Empty);
- // Notify this engine of a gossip message from [nodeID].
- rpc AppGossip(AppGossipMsg) returns (google.protobuf.Empty);
- // Attempts to gather metrics from a VM.
- rpc Gather(google.protobuf.Empty) returns (GatherResponse);
- rpc CrossChainAppRequest(CrossChainAppRequestMsg) returns (google.protobuf.Empty);
- rpc CrossChainAppRequestFailed(CrossChainAppRequestFailedMsg) returns (google.protobuf.Empty);
- rpc CrossChainAppResponse(CrossChainAppResponseMsg) returns (google.protobuf.Empty);
-
- // BatchedChainVM
- rpc GetAncestors(GetAncestorsRequest) returns (GetAncestorsResponse);
- rpc BatchedParseBlock(BatchedParseBlockRequest) returns (BatchedParseBlockResponse);
-
- // HeightIndexedChainVM
- rpc VerifyHeightIndex(google.protobuf.Empty) returns (VerifyHeightIndexResponse);
- rpc GetBlockIDAtHeight(GetBlockIDAtHeightRequest) returns (GetBlockIDAtHeightResponse);
-
- // StateSyncableVM
- //
- // StateSyncEnabled indicates whether the state sync is enabled for this VM.
- rpc StateSyncEnabled(google.protobuf.Empty) returns (StateSyncEnabledResponse);
- // GetOngoingSyncStateSummary returns an in-progress state summary if it exists.
- rpc GetOngoingSyncStateSummary(google.protobuf.Empty) returns (GetOngoingSyncStateSummaryResponse);
- // GetLastStateSummary returns the latest state summary.
- rpc GetLastStateSummary(google.protobuf.Empty) returns (GetLastStateSummaryResponse);
- // ParseStateSummary parses a state summary out of [summaryBytes].
- rpc ParseStateSummary(ParseStateSummaryRequest) returns (ParseStateSummaryResponse);
- // GetStateSummary retrieves the state summary that was generated at height
- // [summaryHeight].
- rpc GetStateSummary(GetStateSummaryRequest) returns (GetStateSummaryResponse);
-
- // Block
- rpc BlockVerify(BlockVerifyRequest) returns (BlockVerifyResponse);
- rpc BlockAccept(BlockAcceptRequest) returns (google.protobuf.Empty);
- rpc BlockReject(BlockRejectRequest) returns (google.protobuf.Empty);
-
- // StateSummary
- rpc StateSummaryAccept(StateSummaryAcceptRequest) returns (StateSummaryAcceptResponse);
-}
-
-message InitializeRequest {
- uint32 network_id = 1;
- bytes subnet_id = 2;
- bytes chain_id = 3;
- bytes node_id = 4;
- bytes x_chain_id = 5;
- bytes c_chain_id = 6;
- bytes avax_asset_id = 7;
- string chain_data_dir = 8;
- bytes genesis_bytes = 9;
- bytes upgrade_bytes = 10;
- bytes config_bytes = 11;
- repeated VersionedDBServer db_servers = 12;
- // server_addr is the address of the gRPC server which serves
- // the messenger, keystore, shared memory, blockchain alias,
- // subnet alias, and appSender services
- string server_addr = 13;
-}
-
-message InitializeResponse {
- bytes last_accepted_id = 1;
- bytes last_accepted_parent_id = 2;
- uint64 height = 3;
- bytes bytes = 4;
- google.protobuf.Timestamp timestamp = 5;
-}
-
-message VersionedDBServer {
- string version = 1;
- // server_addr is the address of the gRPC server which serves the
- // Database service
- string server_addr = 2;
-}
-
-message SetStateRequest {
- uint32 state = 1;
-}
-
-message SetStateResponse {
- bytes last_accepted_id = 1;
- bytes last_accepted_parent_id = 2;
- uint64 height = 3;
- bytes bytes = 4;
- google.protobuf.Timestamp timestamp = 5;
-}
-
-message CreateHandlersResponse {
- repeated Handler handlers = 1;
-}
-
-message CreateStaticHandlersResponse {
- repeated Handler handlers = 1;
-}
-
-message Handler {
- string prefix = 1;
- uint32 lock_options = 2;
- // server_addr is the address of the gRPC server which serves the
- // HTTP service
- string server_addr = 3;
-}
-
-message BuildBlockRequest {
- optional uint64 p_chain_height = 1;
-}
-
-// Note: The status of a freshly built block is assumed to be Processing.
-message BuildBlockResponse {
- bytes id = 1;
- bytes parent_id = 2;
- bytes bytes = 3;
- uint64 height = 4;
- google.protobuf.Timestamp timestamp = 5;
- bool verify_with_context = 6;
-}
-
-message ParseBlockRequest {
- bytes bytes = 1;
-}
-
-message ParseBlockResponse {
- bytes id = 1;
- bytes parent_id = 2;
- uint32 status = 3;
- uint64 height = 4;
- google.protobuf.Timestamp timestamp = 5;
- bool verify_with_context = 6;
-}
-
-message GetBlockRequest {
- bytes id = 1;
-}
-
-message GetBlockResponse {
- bytes parent_id = 1;
- bytes bytes = 2;
- uint32 status = 3;
- uint64 height = 4;
- google.protobuf.Timestamp timestamp = 5;
- // used to propagate database.ErrNotFound through RPC
- uint32 err = 6;
- bool verify_with_context = 7;
-}
-
-message SetPreferenceRequest {
- bytes id = 1;
-}
-
-message BlockVerifyRequest {
- bytes bytes = 1;
-
- // If set, the VM server casts the block to a [block.WithVerifyContext] and
- // calls [VerifyWithContext] instead of [Verify].
- optional uint64 p_chain_height = 2;
-}
-
-message BlockVerifyResponse {
- google.protobuf.Timestamp timestamp = 1;
-}
-
-message BlockAcceptRequest {
- bytes id = 1;
-}
-
-message BlockRejectRequest {
- bytes id = 1;
-}
-
-message HealthResponse {
- bytes details = 1;
-}
-
-message VersionResponse {
- string version = 1;
-}
-
-message AppRequestMsg {
- // The node that sent us this request
- bytes node_id = 1;
- // The ID of this request
- uint32 request_id = 2;
- // deadline for this request
- google.protobuf.Timestamp deadline = 3;
- // The request body
- bytes request = 4;
-}
-
-message AppRequestFailedMsg {
- // The node that we failed to get a response from
- bytes node_id = 1;
- // The ID of the request we sent and didn't get a response to
- uint32 request_id = 2;
-}
-
-message AppResponseMsg {
- // The node that we got a response from
- bytes node_id = 1;
- // Request ID of request that this is in response to
- uint32 request_id = 2;
- // The response body
- bytes response = 3;
-}
-
-message AppGossipMsg {
- // The node that sent us a gossip message
- bytes node_id = 1;
- // The message body
- bytes msg = 2;
-}
-
-message CrossChainAppRequestMsg {
- // The chain that sent us this request
- bytes chain_id = 1;
- // The ID of this request
- uint32 request_id = 2;
- // deadline for this request
- google.protobuf.Timestamp deadline = 3;
- // The request body
- bytes request = 4;
-}
-
-message CrossChainAppRequestFailedMsg {
- // The chain that we failed to get a response from
- bytes chain_id = 1;
- // The ID of the request we sent and didn't get a response to
- uint32 request_id = 2;
-}
-
-message CrossChainAppResponseMsg {
- // The chain that we got a response from
- bytes chain_id = 1;
- // Request ID of request that this is in response to
- uint32 request_id = 2;
- // The response body
- bytes response = 3;
-}
-
-message ConnectedRequest {
- bytes node_id = 1;
- string version = 2;
-}
-
-message DisconnectedRequest {
- bytes node_id = 1;
-}
-
-message GetAncestorsRequest {
- bytes blk_id = 1;
- int32 max_blocks_num = 2;
- int32 max_blocks_size = 3;
- int64 max_blocks_retrival_time = 4;
-}
-
-message GetAncestorsResponse {
- repeated bytes blks_bytes = 1;
-}
-
-message BatchedParseBlockRequest {
- repeated bytes request = 1;
-}
-
-message BatchedParseBlockResponse {
- repeated ParseBlockResponse response = 1;
-}
-
-message VerifyHeightIndexResponse {
- uint32 err = 1;
-}
-
-message GetBlockIDAtHeightRequest {
- uint64 height = 1;
-}
-
-message GetBlockIDAtHeightResponse {
- bytes blk_id = 1;
- uint32 err = 2;
-}
-
-message GatherResponse {
- repeated io.prometheus.client.MetricFamily metric_families = 1;
-}
-
-message StateSyncEnabledResponse {
- bool enabled = 1;
- uint32 err = 2;
-}
-
-message GetOngoingSyncStateSummaryResponse {
- bytes id = 1;
- uint64 height = 2;
- bytes bytes = 3;
- uint32 err = 4;
-}
-
-message GetLastStateSummaryResponse {
- bytes id = 1;
- uint64 height = 2;
- bytes bytes = 3;
- uint32 err = 4;
-}
-
-message ParseStateSummaryRequest {
- bytes bytes = 1;
-}
-
-message ParseStateSummaryResponse {
- bytes id = 1;
- uint64 height = 2;
- uint32 err = 3;
-}
-
-message GetStateSummaryRequest {
- uint64 height = 1;
-}
-
-message GetStateSummaryResponse {
- bytes id = 1;
- bytes bytes = 2;
- uint32 err = 3;
-}
-
-message StateSummaryAcceptRequest {
- bytes bytes = 1;
-}
-
-message StateSummaryAcceptResponse {
- bool accepted = 1;
- uint32 err = 2;
-}
diff --git a/scripts/AIO-setup.sh b/scripts/AIO-setup.sh
index 20794076..5b9d6857 100755
--- a/scripts/AIO-setup.sh
+++ b/scripts/AIO-setup.sh
@@ -1,4 +1,4 @@
-# Copyright (c) [2023-2024] [Sparq Network]
+# Copyright (c) [2023-2024] [AppLayer Developers]
# This software is distributed under the MIT License.
# See the LICENSE.txt file in the project root for more information.
@@ -12,10 +12,6 @@
# Kill the tmux terminals "local_testnet_validatorX" and "local_testnet_discovery"
tmux kill-session -t local_testnet_validator1
-tmux kill-session -t local_testnet_validator2
-tmux kill-session -t local_testnet_validator3
-tmux kill-session -t local_testnet_validator4
-tmux kill-session -t local_testnet_validator5
tmux kill-session -t local_testnet_normal1
tmux kill-session -t local_testnet_normal2
tmux kill-session -t local_testnet_normal3
@@ -28,8 +24,8 @@ tmux kill-session -t local_testnet_discovery
CLEAN=false # Clean the build folder
DEPLOY=true # Deploy the executables to the local_testnet folder
ONLY_DEPLOY=false # Only deploy, do not build
-DEBUG=ON # Build the project in debug mode
-CORES=$(grep -c ^processor /proc/cpuinfo) # Number of cores for parallel build
+DEBUG=OFF # Build the project in debug mode
+CORES=12 # Number of cores for parallel build
for arg in "$@"
do
@@ -103,7 +99,7 @@ mkdir local_testnet
if [ "$ONLY_DEPLOY" = false ]; then
## Build the project
cd build_local_testnet
- cmake -DDEBUG=$DEBUG ..
+ cmake -DDEBUG=$DEBUG -DBUILD_TESTS=OFF -DBUILD_VARIABLES_TESTS=OFF ..
make -j${CORES}
fi
@@ -111,37 +107,44 @@ if [ "$DEPLOY" = true ]; then
if [ "$ONLY_DEPLOY" = true ]; then
cd build_local_testnet
fi
- ## Copy the orbitersdkd and orbitersdk-discovery executables to the local_testnet directory
- cp orbitersdkd ../local_testnet
- cp orbitersdkd-discovery ../local_testnet
+ ## Copy the bdkd and bdkd-discovery executables to the local_testnet directory
+ cp src/bins/bdkd/bdkd ../local_testnet
+ cp src/bins/bdkd-discovery/bdkd-discovery ../local_testnet
# Create the directories for the Validators and Discovery Node and copy the executables
cd ../local_testnet
- for i in $(seq 1 5); do
+ for i in $(seq 1 1); do
mkdir local_testnet_validator$i
mkdir local_testnet_validator$i/blockchain
- cp orbitersdkd local_testnet_validator$i
+ cp bdkd local_testnet_validator$i
done
for i in $(seq 1 6); do
mkdir local_testnet_normal$i
mkdir local_testnet_normal$i/blockchain
- cp orbitersdkd local_testnet_normal$i
+ cp bdkd local_testnet_normal$i
done
mkdir local_testnet_discovery
mkdir local_testnet_discovery/discoveryNode
- cp orbitersdkd-discovery local_testnet_discovery
+ cp bdkd-discovery local_testnet_discovery
# Create the JSON files for the Discovery Node, Validators and Normal Nodes
echo '{
"rootPath": "discoveryNode",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8080,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8080,
"httpPort": 9999,
+ "minDiscoveryConns": 11,
+ "minNormalConns": 11,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"privKey": "0000000000000000000000000000000000000000000000000000000000000000",
"genesis" : {
"validators": [
@@ -154,23 +157,37 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
- }
+ },
+ "indexingMode" : "RPC"
}' >> local_testnet_discovery/discoveryNode/options.json
# Create the JSON file for the Validators
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8081,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8081,
"httpPort": 8090,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"privKey": "0xba5e6e9dd9cbd263969b94ee385d885c2d303dfc181db2a09f6bf19a7ba26759",
+ "extraValidators": [
+ "0xfd84d99aa18b474bf383e10925d82194f1b0ca268e7a339032679d6e3a201ad4",
+ "0x66ce71abe0b8acd92cfd3965d6f9d80122aed9b0e9bdd3dbe018230bafde5751",
+ "0x856aeb3b9c20a80d1520a2406875f405d336e09475f43c478eb4f0dafb765fe7",
+ "0x81f288dd776f4edfe256d34af1f7d719f511559f19115af3e3d692e741faadc6"
+ ],
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -182,7 +199,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -190,152 +207,28 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC_TRACE"
}' >> local_testnet_validator1/blockchain/options.json
- echo '{
- "rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
- "version": 1,
- "chainID": 808080,
- "chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8082,
- "httpPort": 8091,
- "eventBlockCap": 2000,
- "eventLogCap": 10000,
- "privKey": "0xfd84d99aa18b474bf383e10925d82194f1b0ca268e7a339032679d6e3a201ad4",
- "genesis" : {
- "validators": [
- "0x7588b0f553d1910266089c58822e1120db47e572",
- "0xcabf34a268847a610287709d841e5cd590cc5c00",
- "0x5fb516dc2cfc1288e689ed377a9eebe2216cf1e3",
- "0x795083c42583842774febc21abb6df09e784fce5",
- "0xbec7b74f70c151707a0bfb20fe3767c6e65499e0"
- ],
- "timestamp" : 1656356646000000,
- "signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
- "balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
- ]
- },
- "discoveryNodes": [
- {
- "address" : "127.0.0.1",
- "port" : 8080
- }
- ]
- }' >> local_testnet_validator2/blockchain/options.json
-
- echo '{
- "rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
- "version": 1,
- "chainID": 808080,
- "chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8083,
- "httpPort": 8092,
- "eventBlockCap": 2000,
- "eventLogCap": 10000,
- "privKey": "0x66ce71abe0b8acd92cfd3965d6f9d80122aed9b0e9bdd3dbe018230bafde5751",
- "genesis" : {
- "validators": [
- "0x7588b0f553d1910266089c58822e1120db47e572",
- "0xcabf34a268847a610287709d841e5cd590cc5c00",
- "0x5fb516dc2cfc1288e689ed377a9eebe2216cf1e3",
- "0x795083c42583842774febc21abb6df09e784fce5",
- "0xbec7b74f70c151707a0bfb20fe3767c6e65499e0"
- ],
- "timestamp" : 1656356646000000,
- "signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
- "balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
- ]
- },
- "discoveryNodes": [
- {
- "address" : "127.0.0.1",
- "port" : 8080
- }
- ]
- }' >> local_testnet_validator3/blockchain/options.json
-
- echo '{
- "rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
- "version": 1,
- "chainID": 808080,
- "chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8084,
- "httpPort": 8093,
- "eventBlockCap": 2000,
- "eventLogCap": 10000,
- "privKey": "0x856aeb3b9c20a80d1520a2406875f405d336e09475f43c478eb4f0dafb765fe7",
- "genesis" : {
- "validators": [
- "0x7588b0f553d1910266089c58822e1120db47e572",
- "0xcabf34a268847a610287709d841e5cd590cc5c00",
- "0x5fb516dc2cfc1288e689ed377a9eebe2216cf1e3",
- "0x795083c42583842774febc21abb6df09e784fce5",
- "0xbec7b74f70c151707a0bfb20fe3767c6e65499e0"
- ],
- "timestamp" : 1656356646000000,
- "signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
- "balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
- ]
- },
- "discoveryNodes": [
- {
- "address" : "127.0.0.1",
- "port" : 8080
- }
- ]
- }' >> local_testnet_validator4/blockchain/options.json
-
- echo '{
- "rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
- "version": 1,
- "chainID": 808080,
- "chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8085,
- "httpPort": 8094,
- "eventBlockCap": 2000,
- "eventLogCap": 10000,
- "privKey": "0x81f288dd776f4edfe256d34af1f7d719f511559f19115af3e3d692e741faadc6",
- "genesis" : {
- "validators": [
- "0x7588b0f553d1910266089c58822e1120db47e572",
- "0xcabf34a268847a610287709d841e5cd590cc5c00",
- "0x5fb516dc2cfc1288e689ed377a9eebe2216cf1e3",
- "0x795083c42583842774febc21abb6df09e784fce5",
- "0xbec7b74f70c151707a0bfb20fe3767c6e65499e0"
- ],
- "timestamp" : 1656356646000000,
- "signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
- "balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
- ]
- },
- "discoveryNodes": [
- {
- "address" : "127.0.0.1",
- "port" : 8080
- }
- ]
- }' >> local_testnet_validator5/blockchain/options.json
-
# Create the json file for the Normal Nodes
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8086,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8086,
"httpPort": 8095,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -347,7 +240,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -355,19 +248,27 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC"
}' >> local_testnet_normal1/blockchain/options.json
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8087,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8087,
"httpPort": 8096,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -379,7 +280,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -387,19 +288,27 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC"
}' >> local_testnet_normal2/blockchain/options.json
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8088,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8088,
"httpPort": 8097,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -411,7 +320,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -419,19 +328,27 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC"
}' >> local_testnet_normal3/blockchain/options.json
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8089,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8089,
"httpPort": 8098,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -443,7 +360,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -451,19 +368,27 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC"
}' >> local_testnet_normal4/blockchain/options.json
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8110,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8110,
"httpPort": 8099,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -475,7 +400,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -483,19 +408,27 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC"
}' >> local_testnet_normal5/blockchain/options.json
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8111,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8111,
"httpPort": 8100,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -507,7 +440,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -515,19 +448,27 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC"
}' >> local_testnet_normal6/blockchain/options.json
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8110,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8110,
"httpPort": 8099,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -539,7 +480,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -547,19 +488,27 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC"
}' >> local_testnet_normal5/blockchain/options.json
echo '{
"rootPath": "blockchain",
- "web3clientVersion": "OrbiterSDK/cpp/linux_x86-64/0.2.0",
+ "web3clientVersion": "bdk/cpp/linux_x86-64/0.2.0",
"version": 1,
"chainID": 808080,
"chainOwner": "0x00dead00665771855a34155f5e7405489df2c3c6",
- "wsPort": 8111,
+ "p2pIp" : "127.0.0.1",
+ "p2pPort": 8111,
"httpPort": 8100,
+ "minDiscoveryConns": 5,
+ "minNormalConns": 5,
+ "maxDiscoveryConns": 200,
+ "maxNormalConns": 50,
"eventBlockCap": 2000,
"eventLogCap": 10000,
+ "stateDumpTrigger" : 1000,
+ "minValidators": 4,
"genesis" : {
"validators": [
"0x7588b0f553d1910266089c58822e1120db47e572",
@@ -571,7 +520,7 @@ if [ "$DEPLOY" = true ]; then
"timestamp" : 1656356646000000,
"signer" : "0x4d48bdf34d65ef2bed2e4ee9020a7d3162b494ac31d3088153425f286f3d3c8c",
"balances": [
- { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "1000000000000000000000" }
+ { "address": "0x00dead00665771855a34155f5e7405489df2c3c6", "balance": "100000000000000000000000000000000000000000" }
]
},
"discoveryNodes": [
@@ -579,60 +528,45 @@ if [ "$DEPLOY" = true ]; then
"address" : "127.0.0.1",
"port" : 8080
}
- ]
+ ],
+ "indexingMode" : "RPC"
}' >> local_testnet_normal6/blockchain/options.json
# Launch the Discovery Node through tmux
echo "Launching Discovery Node"
cd local_testnet_discovery
- tmux new-session -d -s local_testnet_discovery './orbitersdkd-discovery || bash && bash'
+ tmux new-session -d -s local_testnet_discovery './bdkd-discovery || bash && bash'
sleep 1
# Launch the Validators through tmux, don't exit the tmux session when closing the terminal
echo "Launching Validator 1"
cd ../local_testnet_validator1
- tmux new-session -d -s local_testnet_validator1 './orbitersdkd || bash && bash'
-
- echo "Launching Validator 2"
- cd ../local_testnet_validator2
- tmux new-session -d -s local_testnet_validator2 './orbitersdkd || bash && bash'
-
- echo "Launching Validator 3"
- cd ../local_testnet_validator3
- tmux new-session -d -s local_testnet_validator3 './orbitersdkd || bash && bash'
-
- echo "Launching Validator 4"
- cd ../local_testnet_validator4
- tmux new-session -d -s local_testnet_validator4 './orbitersdkd || bash && bash'
-
- echo "Launching Validator 5"
- cd ../local_testnet_validator5
- tmux new-session -d -s local_testnet_validator5 './orbitersdkd || bash && bash'
+ tmux new-session -d -s local_testnet_validator1 './bdkd || bash && bash'
# Launch the Normal Nodes through tmux, don't exit the tmux session when closing the terminal
echo "Launching Normal Node 1"
cd ../local_testnet_normal1
- tmux new-session -d -s local_testnet_normal1 './orbitersdkd || bash && bash'
+ tmux new-session -d -s local_testnet_normal1 './bdkd || bash && bash'
echo "Launching Normal Node 2"
cd ../local_testnet_normal2
- tmux new-session -d -s local_testnet_normal2 './orbitersdkd || bash && bash'
+ tmux new-session -d -s local_testnet_normal2 './bdkd || bash && bash'
echo "Launching Normal Node 3"
cd ../local_testnet_normal3
- tmux new-session -d -s local_testnet_normal3 './orbitersdkd || bash && bash'
+ tmux new-session -d -s local_testnet_normal3 './bdkd || bash && bash'
echo "Launching Normal Node 4"
cd ../local_testnet_normal4
- tmux new-session -d -s local_testnet_normal4 './orbitersdkd || bash && bash'
+ tmux new-session -d -s local_testnet_normal4 './bdkd || bash && bash'
echo "Launching Normal Node 5"
cd ../local_testnet_normal5
- tmux new-session -d -s local_testnet_normal5 './orbitersdkd || bash && bash'
+ tmux new-session -d -s local_testnet_normal5 './bdkd || bash && bash'
echo "Launching Normal Node 6"
cd ../local_testnet_normal6
- tmux new-session -d -s local_testnet_normal6 './orbitersdkd || bash && bash'
+ tmux new-session -d -s local_testnet_normal6 './bdkd > output.txt || bash && bash'
# Finish deploying
GREEN=$'\e[0;32m'
diff --git a/scripts/auto.sh b/scripts/auto.sh
new file mode 100755
index 00000000..53ff040e
--- /dev/null
+++ b/scripts/auto.sh
@@ -0,0 +1,102 @@
+#!/usr/bin/env bash
+
+# debug
+# set -x
+
+# set working directory
+_AUTO_DIR=$(dirname ${0})
+
+# load modules
+. ${_AUTO_DIR}/auto_defines.sh
+. ${_AUTO_DIR}/auto_compose.sh
+. ${_AUTO_DIR}/auto_actions.sh
+
+# logs, printf wrapper
+log ()
+{
+ printf "$@";
+}
+
+# error
+log_error ()
+{
+ printf $_RED; log "$@"; printf $_RESET;
+}
+
+# success
+log_ok ()
+{
+ printf $_GREEN; log "$@"; printf $_RESET;
+}
+
+# fatal, die: logs error and exit
+die ()
+{
+ log_error "[-] Fatal: $@"; printf $_RESET; exit 1;
+}
+
+check_action ()
+{
+ # any function named as _NAME_action can be executed directly as an action
+ ( command -v _${_ACTION}_action 2>&1 > /dev/null ) && return 0
+
+ die "invalid action! Use: $0 help\n"
+}
+
+# parse user options
+parse_opts ()
+{
+ # get options
+ for opt in "$@"; do
+ case $opt in
+ # enable debugging
+ -x) shift 1; set -x ;;
+
+ # enable verbose
+ -v) shift 1; set -v ;;
+
+ # enable errexit
+ -e) shift 1; set -e ;;
+
+ # set compose file
+ -f) shift 1; _COMPOSE_FILE=$(echo "${1}" | tr , ' '); shift 1 ;;
+
+ # selected services
+ -s) shift 1; _COMPOSE_SERVICE=$1; shift ;;
+ esac
+ done
+
+ # set action
+ _ACTION=${1} ; shift ;
+
+ # update compose services variable
+ _COMPOSE_SERVICE=$(echo "${_COMPOSE_SERVICE}" | tr , ' ')
+
+ # additional params with will be appended to the action function parameters
+ _PARAMS="${@}"
+}
+
+# select an action and execute it associated function
+handle_action ()
+{
+ eval _${_ACTION}_action $_PARAMS
+}
+
+# main entry point
+main ()
+{
+ # get/set user options (command-line)
+ parse_opts "$@"
+
+ # verify action argument
+ check_action
+
+ # logs and continue
+ log "Params: $(printf '%s' "${_PARAMS[@]}")\n"
+
+ # select build related actions and handle it
+ handle_action ${_PARAMS[@]}
+}
+
+# main routine
+main $@
diff --git a/scripts/auto_actions.sh b/scripts/auto_actions.sh
new file mode 100644
index 00000000..16a95ec5
--- /dev/null
+++ b/scripts/auto_actions.sh
@@ -0,0 +1,186 @@
+#!/usr/bin/env bash
+
+_help_action ()
+{
+ # print help message
+ printf $_GREEN
+ cat >&1 < /dev/null | head -n 1)
+ FOUND2=$(find /usr/bin -name "$1" 2> /dev/null | head -n 1)
+ if [ -n "$FOUND1" ]; then echo "$FOUND1"; elif [ -n "$FOUND2" ]; then echo "$FOUND2"; else echo ""; fi
+}
+
+# Helper function to check for a library in the system.
+# ONLY CHECKS /usr/local AND /usr. If both match, gives preference to the former.
+# Returns the first found match, or an empty string if there is no match.
+# Usage: HAS_LIB=$(check_lib "libname")
+# $1 = library name, including suffix (e.g. "libz.a")
+check_lib() {
+ FOUND1=$(find /usr/local/lib -name "$1" 2> /dev/null | head -n 1)
+ FOUND2=$(find /usr/lib -name "$1" 2> /dev/null | head -n 1)
+ if [ -n "$FOUND1" ]; then echo "$FOUND1"; elif [ -n "$FOUND2" ]; then echo "$FOUND2"; else echo ""; fi
+}
+
+# Another version of check_lib() for use with libs with multiple components (e.g. Boost).
+# Returns the first found match, or an empty string if there is no match.
+# Usage: HAS_LIBS=$(check_libs "libname")
+# $1 = library name, including suffix (e.g. "libboost_*.a")
+check_libs() {
+ FOUND1=$(find /usr/local/lib -name "$1" 2> /dev/null | head -n 1)
+ FOUND2=$(find /usr/lib -name "$1" 2> /dev/null | head -n 1)
+ if [ -n "$FOUND1" ]; then echo "/usr/local/lib/$1"; elif [ -n "$FOUND2" ]; then echo "/usr/lib/$1"; else echo ""; fi
+}
+
+# Versions for external dependencies - update numbers here if required
+ETHASH_VERSION="1.1.0"
+EVMONE_VERSION="0.15.0"
+SPEEDB_VERSION="2.8.0"
+SQLITECPP_VERSION="3.3.2"
+
+# ===========================================================================
+# SCRIPT STARTS HERE
+# ===========================================================================
+
+echo "-- Scanning for dependencies..."
+
+# Check toolchain binaries
+# Necessary: git, wget, gcc/g++, make, ld, autoconf, libtool, pkg-config, cmake, tmux,
+# protoc + grpc_cpp_plugin (external)
+# Optional: ninja, mold, doxygen, clang-tidy
+HAS_GIT=$(check_exec git)
+HAS_WGET=$(check_exec wget)
+HAS_GCC=$(check_exec gcc)
+HAS_GPP=$(check_exec g++)
+HAS_MAKE=$(check_exec make)
+HAS_LD=$(check_exec ld)
+HAS_AUTOCONF=$(check_exec autoconf) # Required for local gRPC compilation
+HAS_LIBTOOL=$(check_exec libtool) # Required for local gRPC compilation
+HAS_PKGCONFIG=$(check_exec pkg-config) # Required for local gRPC compilation
+HAS_CMAKE=$(check_exec cmake)
+HAS_TMUX=$(check_exec tmux)
+HAS_PROTOC=$(check_exec protoc)
+HAS_GRPC=$(check_exec grpc_cpp_plugin)
+
+HAS_NINJA=$(check_exec ninja)
+HAS_MOLD=$(check_exec mold)
+HAS_DOXYGEN=$(check_exec doxygen)
+HAS_CLANGTIDY=$(check_exec clang-tidy)
+
+# Check internal libraries
+# Necessary: libboost-all-dev, openssl/libssl-dev, libzstd-dev, liblz4-dev, libcrypto++-dev,
+# libscrypt-dev, libgrpc-dev, libgrpc++-dev, libc-ares-dev, libsecp256k1-dev
+HAS_BOOST=$(check_libs "libboost_*.a")
+HAS_LIBSSL=$(check_lib "libssl.a")
+HAS_ZSTD=$(check_lib "libzstd.a")
+HAS_LZ4=$(check_lib "liblz4.a")
+HAS_LIBCRYPTOPP=$(check_lib "libcryptopp.a")
+HAS_LIBSCRYPT=$(check_lib "libscrypt.a")
+HAS_LIBCARES=$(check_lib "libcares_static.a") # Debian 13 and higher
+if [ -z "$HAS_LIBCARES" ]; then HAS_LIBCARES=$(check_lib "libcares.a"); fi # Debian 12 and lower
+HAS_LIBGRPC=$(check_lib "libgrpc.a")
+HAS_LIBGRPCPP=$(check_lib "libgrpc++.a")
+HAS_SECP256K1=$(check_lib "libsecp256k1.a")
+
+# Check external libraries
+# Necessary: ethash (+ keccak), evmone (+ evmc), speedb
+HAS_ETHASH=$(check_lib "libethash.a")
+HAS_KECCAK=$(check_lib "libkeccak.a")
+HAS_EVMC_INSTRUCTIONS=$(check_lib "libevmc-instructions.a")
+HAS_EVMC_LOADER=$(check_lib "libevmc-loader.a")
+HAS_EVMONE=$(check_lib "libevmone.a")
+HAS_SPEEDB=$(check_lib "libspeedb.a")
+HAS_SQLITECPP=$(check_lib "libSQLiteCpp.a")
+
+if [ "${1:-}" == "--check" ]; then
+ echo "-- Required toolchain binaries:"
+ echo -n "git: " && [ -n "$HAS_GIT" ] && echo "$HAS_GIT" || echo "not found"
+ echo -n "wget: " && [ -n "$HAS_WGET" ] && echo "$HAS_WGET" || echo "not found"
+ echo -n "gcc: " && [ -n "$HAS_GCC" ] && echo "$HAS_GCC" || echo "not found"
+ echo -n "g++: " && [ -n "$HAS_GPP" ] && echo "$HAS_GPP" || echo "not found"
+ echo -n "make: " && [ -n "$HAS_MAKE" ] && echo "$HAS_MAKE" || echo "not found"
+ echo -n "ld: " && [ -n "$HAS_LD" ] && echo "$HAS_LD" || echo "not found"
+ echo -n "autoconf: " && [ -n "$HAS_AUTOCONF" ] && echo "$HAS_AUTOCONF" || echo "not found"
+ echo -n "libtool: " && [ -n "$HAS_LIBTOOL" ] && echo "$HAS_LIBTOOL" || echo "not found"
+ echo -n "pkg-config: " && [ -n "$HAS_PKGCONFIG" ] && echo "$HAS_PKGCONFIG" || echo "not found"
+ echo -n "cmake: " && [ -n "$HAS_CMAKE" ] && echo "$HAS_CMAKE" || echo "not found"
+ echo -n "tmux: " && [ -n "$HAS_TMUX" ] && echo "$HAS_TMUX" || echo "not found"
+ echo -n "protoc: " && [ -n "$HAS_PROTOC" ] && echo "$HAS_PROTOC" || echo "not found"
+ echo -n "grpc_cpp_plugin: " && [ -n "$HAS_GRPC" ] && echo "$HAS_GRPC" || echo "not found"
+
+ echo "-- Optional toolchain binaries:"
+ echo -n "ninja: " && [ -n "$HAS_NINJA" ] && echo "$HAS_NINJA" || echo "not found"
+ echo -n "mold: " && [ -n "$HAS_MOLD" ] && echo "$HAS_MOLD" || echo "not found"
+ echo -n "doxygen: " && [ -n "$HAS_DOXYGEN" ] && echo "$HAS_DOXYGEN" || echo "not found"
+ echo -n "clang-tidy: " && [ -n "$HAS_CLANGTIDY" ] && echo "$HAS_CLANGTIDY" || echo "not found"
+
+ echo "-- Internal libraries:"
+ echo -n "boost: " && [ -n "$HAS_BOOST" ] && echo "$HAS_BOOST" || echo "not found"
+ echo -n "libssl: " && [ -n "$HAS_LIBSSL" ] && echo "$HAS_LIBSSL" || echo "not found"
+ echo -n "libzstd: " && [ -n "$HAS_ZSTD" ] && echo "$HAS_ZSTD" || echo "not found"
+ echo -n "liblz4: " && [ -n "$HAS_LZ4" ] && echo "$HAS_LZ4" || echo "not found"
+ echo -n "libcryptopp: " && [ -n "$HAS_LIBCRYPTOPP" ] && echo "$HAS_LIBCRYPTOPP" || echo "not found"
+ echo -n "libscrypt: " && [ -n "$HAS_LIBSCRYPT" ] && echo "$HAS_LIBSCRYPT" || echo "not found"
+ echo -n "libcares: " && [ -n "$HAS_LIBCARES" ] && echo "$HAS_LIBCARES" || echo "not found"
+ echo -n "libgrpc: " && [ -n "$HAS_LIBGRPC" ] && echo "$HAS_LIBGRPC" || echo "not found"
+ echo -n "libgrpc++: " && [ -n "$HAS_LIBGRPCPP" ] && echo "$HAS_LIBGRPCPP" || echo "not found"
+ echo -n "libsecp256k1: " && [ -n "$HAS_SECP256K1" ] && echo "$HAS_SECP256K1" || echo "not found"
+
+ echo "-- External libraries:"
+ echo -n "libethash: " && [ -n "$HAS_ETHASH" ] && echo "$HAS_ETHASH" || echo "not found"
+ echo -n "libkeccak: " && [ -n "$HAS_KECCAK" ] && echo "$HAS_KECCAK" || echo "not found"
+ echo -n "libevmc-instructions: " && [ -n "$HAS_EVMC_INSTRUCTIONS" ] && echo "$HAS_EVMC_INSTRUCTIONS" || echo "not found"
+ echo -n "libevmc-loader: " && [ -n "$HAS_EVMC_LOADER" ] && echo "$HAS_EVMC_LOADER" || echo "not found"
+ echo -n "libevmone: " && [ -n "$HAS_EVMONE" ] && echo "$HAS_EVMONE" || echo "not found"
+ echo -n "libspeedb: " && [ -n "$HAS_SPEEDB" ] && echo "$HAS_SPEEDB" || echo "not found"
+ echo -n "libSQLiteCpp: " && [ -n "$HAS_SQLITECPP" ] && echo "$HAS_SQLITECPP" || echo "not found"
+elif [ "${1:-}" == "--install" ]; then
+ # Anti-anti-sudo prevention
+ if [ $(id -u) -ne 0 ]; then
+ echo "Please run this command as root."
+ exit
+ fi
+
+ # Install binaries and internal libs (skip if not on an APT-based distro)
+ HAS_APT=$(check_exec apt)
+ if [ -n "$HAS_APT" ]; then
+ echo "-- Checking internal dependencies..."
+ PKGS=""
+ if [ -z "$HAS_GIT" ]; then PKGS+="git "; fi
+ if [ -z "$HAS_WGET" ]; then PKGS+="wget "; fi
+ if [ -z "$HAS_GCC" ] || [ -z "$HAS_GPP" ] || [ -z "$HAS_MAKE" ] || [ -z "$HAS_LD" ]; then PKGS+="build-essential "; fi
+ if [ -z "$HAS_AUTOCONF" ]; then PKGS+="autoconf "; fi
+ if [ -z "$HAS_LIBTOOL" ]; then PKGS+="libtool-bin "; fi
+ if [ -z "$HAS_PKGCONFIG" ]; then PKGS+="pkg-config "; fi
+ if [ -z "$HAS_CMAKE" ]; then PKGS+="cmake "; fi
+ if [ -z "$HAS_TMUX" ]; then PKGS+="tmux "; fi
+ if [ -z "$HAS_PROTOC" ]; then PKGS+="protobuf-compiler "; fi
+ if [ -z "$HAS_GRPC" ]; then PKGS+="protobuf-compiler-grpc "; fi
+ if [ -z "$HAS_NINJA" ]; then PKGS+="ninja-build "; fi
+ if [ -z "$HAS_MOLD" ]; then PKGS+="mold "; fi
+ if [ -z "$HAS_DOXYGEN" ]; then PKGS+="doxygen "; fi
+ if [ -z "$HAS_CLANGTIDY" ]; then PKGS+="clang-tidy "; fi
+ if [ -z "$HAS_BOOST" ]; then PKGS+="libboost-all-dev "; fi
+ if [ -z "$HAS_LIBSSL" ]; then PKGS+="libssl-dev "; fi
+ if [ -z "$HAS_ZSTD" ]; then PKGS+="libzstd-dev "; fi
+ if [ -z "$HAS_LZ4" ]; then PKGS+="liblz4-dev "; fi
+ if [ -z "$HAS_LIBCRYPTOPP" ]; then PKGS+="libcrypto++-dev "; fi
+ if [ -z "$HAS_LIBSCRYPT" ]; then PKGS+="libscrypt-dev "; fi
+ if [ -z "$HAS_LIBCARES" ]; then PKGS+="libc-ares-dev "; fi
+ if [ -z "$HAS_LIBGRPC" ]; then PKGS+="libgrpc-dev "; fi
+ if [ -z "$HAS_LIBGRPCPP" ]; then PKGS+="libgrpc++-dev "; fi
+ if [ -z "$HAS_SECP256K1" ]; then PKGS+="libsecp256k1-dev "; fi
+ if [ -n "$PKGS" ]; then
+ echo "-- Installing internal dependencies..."
+ apt-get install -y $PKGS
+ fi
+ else
+ echo "-- Skipping internal dependencies (non-APT-based distro, please install those manually)"
+ fi
+
+ # Take note of the EVMONE Patch Path
+ EVMONEPATCH_PATH="$(realpath evmoneCLI11.patch)"
+
+ # Install external libs
+ echo "-- Checking external dependencies..."
+ if [ -z "$HAS_ETHASH" ] || [ -z "$HAS_KECCAK" ]; then
+ echo "-- Installing ethash..."
+ cd /usr/local/src && git clone --depth 1 --branch "v${ETHASH_VERSION}" https://github.com/chfast/ethash
+ cd ethash && mkdir build && cd build
+ cmake -DCMAKE_INSTALL_PREFIX="/usr/local" ..
+ cmake --build . -- -j$(nproc) && cmake --install .
+ fi
+ if [ -z "$HAS_EVMC_INSTRUCTIONS" ] || [ -z "$HAS_EVMC_LOADER" ] || [ -z "$HAS_EVMONE" ]; then
+ echo "-- Installing evmone..."
+ cd /usr/local/src && git clone --recurse-submodules --depth 1 --branch "v${EVMONE_VERSION}" https://github.com/ethereum/evmone
+ cd evmone
+ # Apply patch located at the same path of this script called "evmoneCLI11.patch"
+ echo "-- Applying patch to evmone..."
+ git apply "$EVMONEPATCH_PATH"
+ mkdir build && cd build
+ cmake -DCMAKE_INSTALL_PREFIX="/usr/local" -DBUILD_SHARED_LIBS=ON -DEVMC_INSTALL=ON -DEVMONE_TESTING=ON ..
+ cmake --build . -- -j$(nproc)
+ cmake -DCMAKE_INSTALL_PREFIX="/usr/local" -DBUILD_SHARED_LIBS=OFF -DEVMC_INSTALL=ON ..
+ cmake --build . -- -j$(nproc)
+ ./bin/evmc-vmtester /usr/local/src/evmone/build/lib/libevmone.so && ./bin/evmone-unittests
+ cmake --install .
+ fi
+ if [ -z "$HAS_SPEEDB" ]; then
+ echo "-- Installing speedb..."
+ cd /usr/local/src && git clone --depth 1 --branch "speedb/v${SPEEDB_VERSION}" https://github.com/speedb-io/speedb
+ cd speedb && mkdir build && cd build
+ cmake -DCMAKE_INSTALL_PREFIX="/usr/local" -DCMAKE_BUILD_TYPE=Release \
+ -DROCKSDB_BUILD_SHARED=OFF -DFAIL_ON_WARNINGS=OFF -DWITH_GFLAGS=OFF -DWITH_RUNTIME_DEBUG=OFF \
+ -DWITH_BENCHMARK_TOOLS=OFF -DWITH_CORE_TOOLS=OFF -DWITH_TOOLS=OFF -DWITH_TRACE_TOOLS=OFF \
+ -DWITH_LZ4=ON ..
+ cmake --build . -- -j$(nproc) && cmake --install .
+ fi
+ if [ -z "$HAS_SQLITECPP" ]; then
+ echo "-- Installing SQLiteCpp..."
+ cd /usr/local/src && git clone --depth 1 --branch "${SQLITECPP_VERSION}" https://github.com/SRombauts/SQLiteCpp
+ cd SQLiteCpp && mkdir build && cd build
+ cmake -DCMAKE_INSTALL_PREFIX="/usr/local" -DCMAKE_BUILD_TYPE=Release ..
+ cmake --build . -- -j$(nproc) && cmake --install .
+ fi
+ echo "-- Dependencies installed"
+elif [ "${1:-}" == "--cleanext" ]; then
+ # Anti-anti-sudo prevention
+ if [ $(id -u) -ne 0 ]; then
+ echo "Please run this command as root."
+ exit
+ fi
+
+ # Uninstall any external dependencies (+ source code repos) found in the system
+ if [ -n "$HAS_ETHASH" ] || [ -n "$HAS_KECCAK" ]; then
+ echo "-- Uninstalling ethash..."
+ rm -rf "/usr/local/src/ethash"
+ rm -rf "/usr/local/include/ethash"
+ rm "/usr/local/lib/libethash.a"
+ rm "/usr/local/lib/libethash-global-context.a"
+ rm "/usr/local/lib/libkeccak.a"
+ fi
+ if [ -n "$HAS_EVMC_INSTRUCTIONS" ] || [ -n "$HAS_EVMC_LOADER" ] || [ -n "$HAS_EVMONE" ]; then
+ echo "-- Uninstalling evmone..."
+ rm -rf "/usr/local/src/evmone"
+ rm -rf "/usr/local/include/evmc"
+ rm -rf "/usr/local/include/evmmax"
+ rm -rf "/usr/local/include/evmone"
+ rm "/usr/local/lib/libevmc-instructions.a"
+ rm "/usr/local/lib/libevmc-loader.a"
+ rm "/usr/local/lib/libevmone.a"
+ rm "/usr/local/lib/libevmone-standalone.a"
+ fi
+ if [ -n "$HAS_SPEEDB" ]; then
+ echo "-- Uninstalling speedb..."
+ rm -rf "/usr/local/src/speedb"
+ rm -rf "/usr/local/include/rocksdb"
+ rm "/usr/local/lib/libspeedb.a"
+ fi
+ if [ -n "$HAS_SQLITECPP" ]; then
+ echo "-- Uninstalling SQLiteCpp..."
+ rm "/usr/local/lib/libSQLiteCpp.a"
+ rm "/usr/local/lib/libsqlite3.a"
+ rm -rf "/usr/local/include/SQLiteCpp"
+ rm -rf "/usr/local/src/SQLiteCpp"
+ fi
+ echo "-- External dependencies cleaned, please reinstall them later with --install"
+fi
+
diff --git a/scripts/evmoneCLI11.patch b/scripts/evmoneCLI11.patch
new file mode 100644
index 00000000..dd39ff20
--- /dev/null
+++ b/scripts/evmoneCLI11.patch
@@ -0,0 +1,55 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index b7fc715..5360074 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -131,8 +131,10 @@ set(include_dir ${CMAKE_CURRENT_SOURCE_DIR}/include)
+ add_subdirectory(lib)
+
+ if(EVMONE_TESTING)
++ hunter_add_package(CLI11)
++ find_package(CLI11 REQUIRED)
+ enable_testing()
+- add_subdirectory(test)
++ add_subdirectory(test)
+ endif()
+
+
+diff --git a/test/blockchaintest/CMakeLists.txt b/test/blockchaintest/CMakeLists.txt
+index 6823b4c..1f6d200 100644
+--- a/test/blockchaintest/CMakeLists.txt
++++ b/test/blockchaintest/CMakeLists.txt
+@@ -3,7 +3,7 @@
+ # SPDX-License-Identifier: Apache-2.0
+
+ add_executable(evmone-blockchaintest)
+-target_link_libraries(evmone-blockchaintest PRIVATE evmone evmone::statetestutils evmone-buildinfo GTest::gtest)
++target_link_libraries(evmone-blockchaintest PRIVATE evmone evmone::statetestutils evmone-buildinfo GTest::gtest CLI11::CLI11)
+ target_include_directories(evmone-blockchaintest PRIVATE ${evmone_private_include_dir})
+ target_sources(
+ evmone-blockchaintest PRIVATE
+diff --git a/test/eoftest/CMakeLists.txt b/test/eoftest/CMakeLists.txt
+index 25df1d0..7f6d728 100644
+--- a/test/eoftest/CMakeLists.txt
++++ b/test/eoftest/CMakeLists.txt
+@@ -3,7 +3,7 @@
+ # SPDX-License-Identifier: Apache-2.0
+
+ add_executable(evmone-eoftest)
+-target_link_libraries(evmone-eoftest PRIVATE evmone evmone::testutils nlohmann_json::nlohmann_json GTest::gtest)
++target_link_libraries(evmone-eoftest PRIVATE evmone evmone::testutils nlohmann_json::nlohmann_json GTest::gtest CLI11::CLI11)
+ target_include_directories(evmone-eoftest PRIVATE ${evmone_private_include_dir})
+ target_sources(
+ evmone-eoftest PRIVATE
+diff --git a/test/statetest/CMakeLists.txt b/test/statetest/CMakeLists.txt
+index eaf85df..f156d77 100644
+--- a/test/statetest/CMakeLists.txt
++++ b/test/statetest/CMakeLists.txt
+@@ -17,7 +17,7 @@ target_sources(
+ )
+
+ add_executable(evmone-statetest)
+-target_link_libraries(evmone-statetest PRIVATE evmone::statetestutils evmone evmone-buildinfo GTest::gtest)
++target_link_libraries(evmone-statetest PRIVATE evmone::statetestutils evmone evmone-buildinfo GTest::gtest CLI11::CLI11)
+ target_include_directories(evmone-statetest PRIVATE ${evmone_private_include_dir})
+ target_sources(
+ evmone-statetest PRIVATE
diff --git a/scripts/format-code.sh b/scripts/format-code.sh
index 91fad982..bea6e970 100755
--- a/scripts/format-code.sh
+++ b/scripts/format-code.sh
@@ -1,4 +1,4 @@
-# Copyright (c) [2023-2024] [Sparq Network]
+# Copyright (c) [2023-2024] [AppLayer Developers]
# This software is distributed under the MIT License.
# See the LICENSE.txt file in the project root for more information.
diff --git a/scripts/sonarcloud.sh b/scripts/sonarcloud.sh
new file mode 100755
index 00000000..9367b3bc
--- /dev/null
+++ b/scripts/sonarcloud.sh
@@ -0,0 +1,107 @@
+#!/usr/bin/env bash
+
+OS=linux
+ARCH="x64"
+TMP_PATH=/tmp
+INSTALL_PATH=/root/.sonar
+VERIFY_CORRECTNESS=false
+
+check_status() {
+ exit_status=$?
+ if [ $exit_status -ne 0 ]; then
+ echo "ERROR $1"
+ exit $exit_status
+ fi
+}
+
+realpath() {
+ readlink -f "$1"
+}
+
+parse_args() {
+ while getopts "hv" arg; do
+ case $arg in
+ x) set -x
+ ;;
+ v) VERIFY_CORRECTNESS=true
+ echo "Verify correctness is set to true"
+ ;;
+ ?) exit 0 ;;
+ esac
+ done
+}
+
+config_sonar_path() {
+ echo "Installation path is '${INSTALL_PATH}'"
+
+ test ! -z "${INSTALL_PATH}"
+ check_status "Empty installation path specified"
+
+ if [[ ! -e "${INSTALL_PATH}" ]]; then
+ mkdir -p "${INSTALL_PATH}"
+ check_status "Failed to create non-existing installation path '${INSTALL_PATH}'"
+ fi
+
+ ABSOLUTE_INSTALL_PATH=$(realpath "${INSTALL_PATH}")
+ echo "Absolute installation path is '${ABSOLUTE_INSTALL_PATH}'"
+
+ test -d "${INSTALL_PATH}"
+ check_status "Installation path '${INSTALL_PATH}' is not a directory (absolute path is '${ABSOLUTE_INSTALL_PATH}')"
+
+ test -r "${INSTALL_PATH}"
+ check_status "Installation path '${INSTALL_PATH}' is not readable (absolute path is '${ABSOLUTE_INSTALL_PATH}')"
+
+ test -w "${INSTALL_PATH}"
+ check_status "Installation path '${INSTALL_PATH}' is not writeable (absolute path is '${ABSOLUTE_INSTALL_PATH}')"
+}
+
+set_sonar_vars() {
+ SONAR_HOST_URL=${SONAR_HOST_URL:-https://sonarcloud.io}
+ SONAR_SCANNER_NAME="sonar-scanner"
+ SONAR_SCANNER_SUFFIX="linux-x64"
+ SONAR_SCANNER_VERSION=$(curl -sSL -H "Accept: application/vnd.github+json" \
+ https://api.github.com/repos/SonarSource/sonar-scanner-cli/releases/latest | jq -r '.tag_name')
+ check_status "Failed to fetch latest sonar-scanner version from GitHub API"
+ SONAR_SCANNER_DIR="${INSTALL_PATH}/sonar-scanner-${SONAR_SCANNER_VERSION}-${SONAR_SCANNER_SUFFIX}"
+ SONAR_SCANNER_URL="https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${SONAR_SCANNER_VERSION}-${OS}-${ARCH}.zip"
+ check_status "Failed to download ${OS} ${ARCH} sonar-scanner checksum from '${SONAR_SCANNER_URL}'"
+ BUILD_WRAPPER_SUFFIX="linux-x86"
+ BUILD_WRAPPER_NAME="build-wrapper-linux-x86-64"
+ BUILD_WRAPPER_DIR="${INSTALL_PATH}/build-wrapper-${BUILD_WRAPPER_SUFFIX}"
+ BUILD_WRAPPER_URL=${SONAR_HOST_URL}/static/cpp/build-wrapper-${BUILD_WRAPPER_SUFFIX}.zip
+
+ echo "sonar-scanner-version=${SONAR_SCANNER_VERSION}"
+ echo "sonar-scanner-url-${OS}-${ARCH}=${SONAR_SCANNER_URL}"
+ echo "sonar-scanner-dir=${SONAR_SCANNER_DIR}"
+ echo "sonar-scanner-bin=${SONAR_SCANNER_DIR}/bin/${SONAR_SCANNER_NAME}"
+ echo "build-wrapper-url=${SONAR_HOST_URL}/static/cpp/build-wrapper-${BUILD_WRAPPER_SUFFIX}.zip"
+ echo "build-wrapper-dir=${BUILD_WRAPPER_DIR}"
+ echo "build-wrapper-bin=${BUILD_WRAPPER_DIR}/${BUILD_WRAPPER_NAME}"
+}
+
+fetch_sonar() {
+ echo "Downloading '${SONAR_SCANNER_URL}'"
+ curl -sSLo "${TMP_PATH}/sonar-scanner.zip" "${SONAR_SCANNER_URL}"
+ check_status "Failed to download '${SONAR_SCANNER_URL}'"
+ echo "Downloading '${BUILD_WRAPPER_URL}'"
+ curl -sSLo "${TMP_PATH}/build-wrapper-linux-x86.zip" "${BUILD_WRAPPER_URL}"
+ check_status "Failed to download '${BUILD_WRAPPER_URL}'"
+}
+
+decompress_sonar() {
+ echo "Decompressing"
+ unzip -o -d "${INSTALL_PATH}" "${TMP_PATH}/sonar-scanner.zip"
+ check_status "Failed to unzip the archive into '${INSTALL_PATH}'"
+ unzip -o -d "${INSTALL_PATH}" "${TMP_PATH}/build-wrapper-linux-x86.zip"
+ check_status "Failed to unzip the archive into '${INSTALL_PATH}'"
+}
+
+main() {
+ parse_args "$@"
+ set_sonar_vars
+ config_sonar_path
+ fetch_sonar
+ decompress_sonar
+}
+
+main "$@"
diff --git a/sonar-project.properties b/sonar-project.properties
index a428eba3..d05d2fb0 100644
--- a/sonar-project.properties
+++ b/sonar-project.properties
@@ -1,6 +1,6 @@
-sonar.projectKey=SparqNet_orbitersdk-cpp_AY16kEq6lKE0qFQROdKZ
-sonar.organization=SparqNet
-sonar.cfamily.threads=4
+sonar.projectKey=AppLayerLabs_bdk-cpp_ccf13a2c-7f2c-4116-b2fe-a974ebed07ff
+sonar.organization=AppLayerLabs
+sonar.cfamily.threads=12
sonar.projectVersion=1.0
# =====================================================
@@ -11,5 +11,7 @@ sonar.projectVersion=1.0
sonar.sources=src
sonar.sourceEncoding=UTF-8
+sonar.c.file.suffixes=-
+sonar.cpp.file.suffixes=.cc,.cpp,.cxx,.c++,.hh,.hpp,.hxx,.h++,.ipp,.c,.h
sonar.cfamily.cpp23.enabled=true
sonar.exclusions=src/libs/**, tests/**
diff --git a/src/bins/CMakeLists.txt b/src/bins/CMakeLists.txt
new file mode 100644
index 00000000..57583282
--- /dev/null
+++ b/src/bins/CMakeLists.txt
@@ -0,0 +1,8 @@
+add_subdirectory(bdkd)
+add_subdirectory(bdkd-tests)
+add_subdirectory(bdkd-discovery)
+add_subdirectory(networkdeployer)
+add_subdirectory(contractabigenerator)
+add_subdirectory(network-sim)
+add_subdirectory(faucet-api)
+add_subdirectory(btv-server)
\ No newline at end of file
diff --git a/src/bins/bdkd-discovery/CMakeLists.txt b/src/bins/bdkd-discovery/CMakeLists.txt
new file mode 100644
index 00000000..16c2fc38
--- /dev/null
+++ b/src/bins/bdkd-discovery/CMakeLists.txt
@@ -0,0 +1,17 @@
+# Compile and link the Discovery Node test executable if set to build it
+if (BUILD_DISCOVERY)
+ add_executable(bdkd-discovery "main.cpp")
+
+ add_dependencies(bdkd-discovery bdk_lib)
+
+ target_include_directories(bdkd-discovery PRIVATE
+ bdk_lib ${OPENSSL_INCLUDE_DIR} ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+ )
+
+ target_link_libraries(bdkd-discovery
+ bdk_lib ${SPEEDB_LIBRARY} ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES}
+ ${ZLIB_LIBRARIES} ${SECP256K1_LIBRARY} ${ETHASH_LIBRARY} ${KECCAK_LIBRARY}
+ )
+endif()
+
diff --git a/src/bins/bdkd-discovery/main.cpp b/src/bins/bdkd-discovery/main.cpp
new file mode 100644
index 00000000..e52fc28a
--- /dev/null
+++ b/src/bins/bdkd-discovery/main.cpp
@@ -0,0 +1,77 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include
+#include
+#include
+
+#include "src/net/p2p/managerdiscovery.h"
+
+#include "src/utils/clargs.h"
+#include "src/utils/options.h"
+
+std::condition_variable cv;
+std::mutex cv_m;
+int signalCaught = 0;
+
+void signalHandler(int signum) {
+ {
+ std::unique_lock lk(cv_m);
+ Utils::safePrint("Signal caught: " + Utils::getSignalName(signum));
+ signalCaught = signum;
+ }
+ cv.notify_one();
+}
+
+/// Executable with a discovery node for the given Options default chain.
+int main(int argc, char* argv[]) {
+ Log::logToCout = true;
+ Utils::safePrint("bdkd-discovery: Blockchain Development Kit discovery node daemon");
+ std::signal(SIGINT, signalHandler);
+ std::signal(SIGHUP, signalHandler);
+
+ // Parse command-line options
+ ProcessOptions opt = parseCommandLineArgs(argc, argv, BDKTool::DISCOVERY_NODE);
+
+ // Select a default log level for this program if none is specified
+ if (opt.logLevel == "") opt.logLevel = "INFO";
+
+ // Apply selected process options
+ if (!applyProcessOptions(opt)) return 1;
+
+ // Start the discovery node
+ Utils::safePrint("Main thread starting node...");
+ // Local binary path + /blockchain
+ std::string blockchainPath = std::filesystem::current_path().string() + std::string("/discoveryNode");
+ const auto options = Options::fromFile(blockchainPath);
+ auto p2p = std::make_unique(options.getP2PIp(), options);
+ p2p->start();
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+ p2p->startDiscovery();
+
+ // Main thread waits for a non-zero signal code to be raised and caught
+ Utils::safePrint("Main thread waiting for interrupt signal...");
+ int exitCode = 0;
+ {
+ std::unique_lock lk(cv_m);
+ cv.wait(lk, [] { return signalCaught != 0; });
+ exitCode = signalCaught;
+ }
+ Utils::safePrint("Main thread stopping due to interrupt signal [" + Utils::getSignalName(exitCode) + "], shutting down node...");
+
+ // Shut down the node
+ SLOGINFO("Received signal " + std::to_string(exitCode));
+ Utils::safePrint("Main thread stopping node...");
+ p2p->stopDiscovery();
+ Utils::safePrint("Main thread shutting down...");
+ p2p.reset();
+
+ // Return the signal code
+ Utils::safePrint("Main thread exiting with code " + std::to_string(exitCode) + ".");
+ return exitCode;
+}
+
diff --git a/src/bins/bdkd-tests/CMakeLists.txt b/src/bins/bdkd-tests/CMakeLists.txt
new file mode 100644
index 00000000..2fa369d9
--- /dev/null
+++ b/src/bins/bdkd-tests/CMakeLists.txt
@@ -0,0 +1,18 @@
+# Compile and link the test executable if set to build it
+if (BUILD_TESTS)
+ add_executable(bdkd-tests ${TESTS_HEADERS} ${TESTS_SOURCES})
+
+ add_dependencies(bdkd-tests bdk_lib)
+
+ target_include_directories(bdkd-tests PRIVATE
+ bdk_lib ${OPENSSL_INCLUDE_DIR} ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+ )
+
+ target_link_libraries(bdkd-tests PRIVATE
+ bdk_lib catch2
+ ${SPEEDB_LIBRARY} ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES}
+ ${SECP256K1_LIBRARY} ${ETHASH_LIBRARY} ${KECCAK_LIBRARY}
+ )
+endif()
+
diff --git a/src/bins/bdkd/CMakeLists.txt b/src/bins/bdkd/CMakeLists.txt
new file mode 100644
index 00000000..3766f460
--- /dev/null
+++ b/src/bins/bdkd/CMakeLists.txt
@@ -0,0 +1,15 @@
+# Compile and link the executable
+add_executable(bdkd "main.cpp")
+
+add_dependencies(bdkd bdk_lib)
+
+target_include_directories(bdkd PRIVATE
+ bdk_lib ${OPENSSL_INCLUDE_DIR} ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+)
+
+target_link_libraries(bdkd
+ bdk_lib ${SPEEDB_LIBRARY} ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES}
+ ${SECP256K1_LIBRARY} ${ETHASH_LIBRARY} ${KECCAK_LIBRARY}
+)
+
diff --git a/src/bins/bdkd/main.cpp b/src/bins/bdkd/main.cpp
new file mode 100644
index 00000000..6b57328e
--- /dev/null
+++ b/src/bins/bdkd/main.cpp
@@ -0,0 +1,75 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include
+#include
+
+#include
+#include
+
+#include "src/core/blockchain.h"
+#include "src/utils/clargs.h"
+
+#include "src/utils/logger.h"
+
+std::unique_ptr blockchain = nullptr;
+
+std::condition_variable cv;
+std::mutex cv_m;
+int signalCaught = 0;
+
+void signalHandler(int signum) {
+ {
+ std::unique_lock lk(cv_m);
+ Utils::safePrint("Signal caught: " + Utils::getSignalName(signum));
+ signalCaught = signum;
+ }
+ cv.notify_one();
+}
+
+int main(int argc, char* argv[]) {
+ Log::logToCout = true;
+ Utils::safePrint("bdkd: Blockchain Development Kit full node daemon");
+ std::signal(SIGINT, signalHandler);
+ std::signal(SIGHUP, signalHandler);
+
+ // Parse command-line options
+ ProcessOptions opt = parseCommandLineArgs(argc, argv, BDKTool::FULL_NODE);
+
+ // Select a default log level for this program if none is specified
+ if (opt.logLevel == "") opt.logLevel = "INFO";
+
+ // Apply selected process options
+ if (!applyProcessOptions(opt)) return 1;
+
+ // Start the blockchain syncing engine.
+ Utils::safePrint("Main thread starting node...");
+ std::string blockchainPath = std::filesystem::current_path().string() + std::string("/blockchain");
+ blockchain = std::make_unique(blockchainPath);
+ blockchain->start();
+
+ // Main thread waits for a non-zero signal code to be raised and caught
+ Utils::safePrint("Main thread waiting for interrupt signal...");
+ int exitCode = 0;
+ {
+ std::unique_lock lk(cv_m);
+ cv.wait(lk, [] { return signalCaught != 0; });
+ exitCode = signalCaught;
+ }
+ Utils::safePrint("Main thread stopping due to interrupt signal [" + Utils::getSignalName(exitCode) + "], shutting down node...");
+
+ // Shut down the node
+ SLOGINFO("Received signal " + std::to_string(exitCode));
+ Utils::safePrint("Main thread stopping node...");
+ blockchain->stop();
+ Utils::safePrint("Main thread shutting down...");
+ blockchain = nullptr; // Destroy the blockchain object, calling the destructor of every module and dumping to DB.
+
+ // Return the signal code
+ Utils::safePrint("Main thread exiting with code " + std::to_string(exitCode) + ".");
+ return exitCode;
+}
diff --git a/src/bins/btv-server/CMakeLists.txt b/src/bins/btv-server/CMakeLists.txt
new file mode 100644
index 00000000..aa079845
--- /dev/null
+++ b/src/bins/btv-server/CMakeLists.txt
@@ -0,0 +1,47 @@
+if (BUILD_BTVSERVER)
+ add_library(btv_server_lib STATIC
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/utils.h
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/httpclient.h
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/manager.h
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/socketlistener.h
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/websocketsession.h
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/websocketserver.h
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/utils.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/httpclient.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/manager.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/socketlistener.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/websocketsession.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/btv-server/src/websocketserver.cpp
+ )
+
+ target_include_directories(btv_server_lib PRIVATE
+ ${CMAKE_SOURCE_DIR}/include ${OPENSSL_INCLUDE_DIR} bdk_lib
+ ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+ )
+
+ target_link_libraries(btv_server_lib PRIVATE bdk_lib
+ ${CRYPTOPP_LIBRARIES} ${SCRYPT_LIBRARY} ${SECP256K1_LIBRARY}
+ ${ETHASH_LIBRARY} ${KECCAK_LIBRARY} ${SPEEDB_LIBRARY}
+ ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} bdk_lib
+ )
+
+ # Compile and link the btv-server executable
+ add_executable(btv-server "main.cpp")
+
+ add_dependencies(btv-server bdk_lib btv_server_lib)
+
+ target_include_directories(btv-server PRIVATE
+ bdk_lib btv_server_lib ${OPENSSL_INCLUDE_DIR}
+ ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+ )
+
+ target_link_libraries(btv-server
+ bdk_lib btv_server_lib
+ ${SPEEDB_LIBRARY} ${SNAPPY_LIBRARY} ${Boost_LIBRARIES}
+ ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} ${SECP256K1_LIBRARY}
+ ${ETHASH_LIBRARY} ${KECCAK_LIBRARY}
+ )
+
+endif ()
\ No newline at end of file
diff --git a/src/bins/btv-server/main.cpp b/src/bins/btv-server/main.cpp
new file mode 100644
index 00000000..27709d6b
--- /dev/null
+++ b/src/bins/btv-server/main.cpp
@@ -0,0 +1,22 @@
+#include "src/manager.h"
+
+
+std::unique_ptr manager = nullptr;
+
+void signalHandler(int signum) {
+ BTVServer::Printer::safePrint("Signal caught: " + Utils::getSignalName(signum));
+ manager.reset();
+}
+
+int main() {
+ std::signal(SIGINT, signalHandler);
+ std::signal(SIGHUP, signalHandler);
+ Log::logToCout = true;
+ manager = std::make_unique();
+ BTVServer::Printer::safePrint("Starting Build The Void Websocket Server...");
+ manager->start();
+ BTVServer::Printer::safePrint("Exitting Build The Void Websocket Server...");
+
+
+ return 0;
+}
\ No newline at end of file
diff --git a/src/bins/btv-server/src/httpclient.cpp b/src/bins/btv-server/src/httpclient.cpp
new file mode 100644
index 00000000..1a47598a
--- /dev/null
+++ b/src/bins/btv-server/src/httpclient.cpp
@@ -0,0 +1,67 @@
+#include "httpclient.h"
+#include "manager.h"
+
+namespace BTVServer {
+ HTTPSemiSyncClient::HTTPSemiSyncClient(const std::string& host, const std::string& port, net::io_context& ioc_, Manager& manager)
+ : host(host), port(port), resolver(ioc_), stream(ioc_), strand_(ioc_.get_executor()), manager(manager) {}
+
+ // TODO: either close() shouldn't be throwing, or the dtor shouldn't be calling it
+ HTTPSemiSyncClient::~HTTPSemiSyncClient() { if (stream.socket().is_open()) this->close(); }
+
+ void HTTPSemiSyncClient::connect() {
+ boost::system::error_code ec;
+ auto const results = resolver.resolve(host, port, ec);
+ if (ec) throw DynamicException("Error while resolving the HTTP Client: " + ec.message());
+ stream.connect(results, ec);
+ if (ec) throw DynamicException("Error while connecting the HTTP Client: " + ec.message());
+ Printer::safePrint("HTTPSemiSyncClient connected to " + host + ":" + port);
+ }
+
+ void HTTPSemiSyncClient::close() {
+ boost::system::error_code ec;
+ stream.socket().shutdown(tcp::socket::shutdown_both, ec);
+ if (ec) throw DynamicException("Error while closing the HTTP Client: " + ec.message());
+ }
+
+ std::string HTTPSemiSyncClient::makeHTTPRequestInternal(const std::shared_ptr reqBody) {
+ namespace http = boost::beast::http; // from
+
+ boost::system::error_code ec;
+ // Set up an HTTP POST/GET request message
+ http::request req{ http::verb::post , "/", 11};
+
+ req.set(http::field::host, host);
+ req.set(http::field::user_agent, BOOST_BEAST_VERSION_STRING);
+ req.set(http::field::accept, "application/json");
+ req.set(http::field::content_type, "application/json");
+ req.body() = *reqBody;
+ req.prepare_payload();
+
+ // Send the HTTP request to the remote host
+ http::write(stream, req, ec);
+ if (ec) throw DynamicException("Error while writing the HTTP request: " + ec.message());
+
+ boost::beast::flat_buffer buffer;
+ // Declare a container to hold the response
+ http::response res;
+
+ // Receive the HTTP response
+ http::read(stream, buffer, res, ec);
+ if (ec) throw DynamicException("Error while reading the HTTP response: " + ec.message() + " " + std::to_string(ec.value()));
+
+ // Write only the body answer to output
+ return {
+ boost::asio::buffers_begin(res.body().data()),
+ boost::asio::buffers_end(res.body().data())
+ };
+ }
+
+ void HTTPSemiSyncClient::makeHTTPRequest(std::string &&reqBody) {
+ // DO NOT forget to post to the strand!!!
+ // each write/read should be SEQUENTIAL
+ boost::asio::post(strand_, [this, reqBodyPtr = std::make_shared(std::move(reqBody))]() {
+ auto response = this->makeHTTPRequestInternal(reqBodyPtr);
+ this->manager.handleHTTPResponse(response);
+ });
+ }
+}
\ No newline at end of file
diff --git a/src/bins/btv-server/src/httpclient.h b/src/bins/btv-server/src/httpclient.h
new file mode 100644
index 00000000..c4ebf2d3
--- /dev/null
+++ b/src/bins/btv-server/src/httpclient.h
@@ -0,0 +1,50 @@
+#ifndef HTTPASYNCCLIENT_H
+#define HTTPASYNCCLIENT_H
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include "utils.h"
+
+namespace beast = boost::beast; // from
+namespace http = beast::http; // from
+namespace websocket = beast::websocket; // from
+namespace net = boost::asio; // from
+namespace ssl = boost::asio::ssl; // from
+using tcp = boost::asio::ip::tcp; // from
+
+namespace BTVServer {
+ // Forward declaration
+ class Manager;
+ class HTTPSemiSyncClient {
+ private:
+ const std::string host;
+ const std::string port;
+ Manager& manager;
+ tcp::resolver resolver;
+ beast::tcp_stream stream;
+ net::strand strand_;
+ std::string makeHTTPRequestInternal(const std::shared_ptr reqBody);
+ uint64_t highestBlock = 0;
+
+ public:
+ HTTPSemiSyncClient(const std::string& host, const std::string& port, net::io_context& ioc_, Manager& manager);
+ ~HTTPSemiSyncClient() noexcept;
+ HTTPSemiSyncClient(const HTTPSemiSyncClient&) = delete;
+ HTTPSemiSyncClient& operator=(const HTTPSemiSyncClient&) = delete;
+ HTTPSemiSyncClient(HTTPSemiSyncClient&&) = delete;
+ HTTPSemiSyncClient& operator=(HTTPSemiSyncClient&&) = delete;
+
+ void connect();
+ void close();
+
+ void makeHTTPRequest(std::string&& reqBody);
+ };
+}
+
+
+#endif // HTTPASYNCCLIENT_H
\ No newline at end of file
diff --git a/src/bins/btv-server/src/manager.cpp b/src/bins/btv-server/src/manager.cpp
new file mode 100644
index 00000000..e2504692
--- /dev/null
+++ b/src/bins/btv-server/src/manager.cpp
@@ -0,0 +1,336 @@
+#include "manager.h"
+
+#include "bins/network-sim/src/httpclient.h"
+#include "contract/abi.h"
+#include "net/http/jsonrpc/methods.h"
+
+namespace BTVServer {
+ Manager::Manager() : world_(), ioc_(8), server_(*this, ioc_, tcp::endpoint(tcp::v4(), 29345)), httpClient_("149.112.84.202", "8095", ioc_, *this) {}
+ Manager::~Manager() {
+ this->server_.close();
+ this->ioc_.stop();
+ Utils::safePrint("Manager destroyed");
+ this->httpClient_.close();
+
+ }
+ void Manager::handleHTTPResponse(const std::string& reqBody) {
+ static auto lastTimeResponded = std::chrono::system_clock::now();
+ // ONLY USED TO GET LOGS FROM THE SERVER!
+ // {"jsonrpc":"2.0","id":1,"result":[{"address":"0x30c37f6b1d6321c4398238525046c604c7b26150","blockHash":"0x02bb902e386b9b8baf792294f158897f0677a0d114105f886b9dd73ce8cec7c9","blockNumber":"0x0000000000002621","data":"0x0000000000000000000000000000000000000000000000000000000000000026000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000004","logIndex":"0x0000000000000000","removed":false,"topics":["0x88c1435105c4190f4c8be13e5dbc689ebf4dbec75a17e63a51697ce761b5a1d2"],"transactionHash":"0x84cb3ac1ac98c1f481e3fa861230e100696896a04275d7af850dc90ff9930618","transactionIndex":"0x0000000000000000"}]}
+ json response = json::parse(reqBody);
+ /*
+ void PlayerMoved(const EventParam& playerId, const EventParam& x, const EventParam& y, const EventParam& z) {
+ this->emitEvent("PlayerMoved", std::make_tuple(playerId, x, y, z));
+ }
+ void PlayerLogin(const EventParam& playerId, const EventParam& x, const EventParam& y, const EventParam& z) {
+ this->emitEvent("PlayerLogin", std::make_tuple(playerId, x, y, z));
+ }
+ void PlayerLogout(const EventParam& playerId) {
+ this->emitEvent("PlayerLogout", std::make_tuple(playerId));
+ }
+ void BlockChanged(const EventParam& playerId, const EventParam& x, const EventParam& y, const EventParam& z, const EventParam& blockType, const EventParam& timestamp) {
+ this->emitEvent("BlockChanged", std::make_tuple(playerId, x, y, z, blockType, timestamp));
+ }
+ void ClaimedEnergy(const EventParam& playerId, const EventParam& value) {
+ this->emitEvent("ClaimedEnergy", std::make_tuple(playerId, value));
+ }
+ void PlayerDead(const EventParam& playerId) {
+ this->emitEvent("PlayerDead", std::make_tuple(playerId));
+ }
+ */
+ static Hash PlayerMovedTopic = ABI::EventEncoder::encodeSignature("PlayerMoved");
+ static Hash PlayerLoginTopic = ABI::EventEncoder::encodeSignature("PlayerLogin");
+ static Hash PlayerLogoutTopic = ABI::EventEncoder::encodeSignature("PlayerLogout");
+ static Hash BlockChangedTopic = ABI::EventEncoder::encodeSignature("BlockChanged");
+ static Hash ClaimedEnergyTopic = ABI::EventEncoder::encodeSignature("ClaimedEnergy");
+ static Hash PlayerDeadTopic = ABI::EventEncoder::encodeSignature("PlayerDead");
+ json jsonRpcResponse;
+ jsonRpcResponse["id"] = response.at("id");
+ jsonRpcResponse["jsonrpc"] = "2.0";
+ jsonRpcResponse["result"] = json::array();
+ try {
+ // For every object within the logs, we need to construct its respective JSON object and insert it into the result array
+ for (const auto& log : response.at("result")) {
+ // We need to replace the lastProcessedBlock based on the latest provided by the logs we requested
+ uint64_t blockNumber = Utils::fromBigEndian(Hex::toBytes(log.at("blockNumber").get()));
+ if (blockNumber > this->lastProcessedBlock) {
+ this->lastProcessedBlock = blockNumber;
+ }
+ json eventUpdate;
+ // Now we just need to separate the logs by the topics previously defined
+ // We dont need to double check anything because we are interested in ALL logs
+ // and our requested is correctly built to only get logs from the BTV contract
+ Hash logTopic = Hash(Hex::toBytes(log.at("topics").at(0).get()));
+ if (logTopic == PlayerMovedTopic) {
+ auto data = ABI::Decoder::decodeData(Hex::toBytes(log.at("data").get()));
+ const auto& [playerId, x, y, z] = data;
+ eventUpdate = {
+ {"method", "PlayerMoved"},
+ {"playerId", playerId},
+ {"x", x},
+ {"y", y},
+ {"z", z}
+ };
+ }
+ if (logTopic == PlayerLoginTopic) {
+ auto data = ABI::Decoder::decodeData(Hex::toBytes(log.at("data").get()));
+ const auto& [playerId, x, y, z] = data;
+ eventUpdate = {
+ {"method", "PlayerLogin"},
+ {"playerId", playerId},
+ {"x", x},
+ {"y", y},
+ {"z", z}
+ };
+ }
+ if (logTopic == PlayerLogoutTopic) {
+ auto data = ABI::Decoder::decodeData(Hex::toBytes(log.at("data").get()));
+ const auto& [playerId] = data;
+ eventUpdate = {
+ {"method", "PlayerLogout"},
+ {"playerId", playerId}
+ };
+ }
+ if (logTopic == BlockChangedTopic) {
+ auto data = ABI::Decoder::decodeData(Hex::toBytes(log.at("data").get()));
+ const auto& [playerId, x, y, z, blockType, timestamp] = data;
+ eventUpdate = {
+ {"method", "BlockChanged"},
+ {"playerId", playerId},
+ {"x", x},
+ {"y", y},
+ {"z", z},
+ {"blockType", blockType},
+ {"timestamp", timestamp}
+ };
+ // We also need to update the world!
+ std::unique_lock lock(this->worldMutex_);
+ auto block = this->world_.getBlock(BTVUtils::WorldBlockPos{x, y, z});
+ block->setBlockType(static_cast(blockType));
+ block->setPlacer(playerId);
+ block->setModificationTimestamp(timestamp);
+ }
+ if (logTopic == ClaimedEnergyTopic) {
+ auto data = ABI::Decoder::decodeData(Hex::toBytes(log.at("data").get()));
+ const auto& [playerId, value] = data;
+ eventUpdate = {
+ {"method", "ClaimedEnergy"},
+ {"playerId", playerId},
+ {"value", value.str()} // VALUE IS A STRING BECAUSE JSON CANNOT SUPPORT UINT256_T!!! PAY ATTENTION TO THIS
+ };
+ }
+ if (logTopic == PlayerDeadTopic) {
+ auto data = ABI::Decoder::decodeData(Hex::toBytes(log.at("data").get()));
+ const auto& [playerId] = data;
+ eventUpdate = {
+ {"method", "PlayerDead"},
+ {"playerId", playerId}
+ };
+ }
+ jsonRpcResponse["result"].push_back(eventUpdate);
+ }
+ } catch (std::exception &e) {
+ Printer::safePrint("Error while processing response: " + std::string(e.what()) + " with message " + reqBody);
+ }
+ // Now we need to broadcast the update object to all players!
+ this->broadcastTooAllPlayers(jsonRpcResponse);
+ // Lmao lets request the logs again!
+ // We wait at least 100ms
+ std::this_thread::sleep_until(lastTimeResponded + std::chrono::milliseconds(100));
+ lastTimeResponded = std::chrono::system_clock::now();
+ this->httpClient_.makeHTTPRequest(makeRequestMethod("eth_getLogs", json::array({
+ {
+ {"address", btvContractAddress_.hex(true)},
+ {"fromBlock", Hex::fromBytes(Utils::uintToBytes(this->lastProcessedBlock), true).forRPC()},
+ {"toBlock", "latest"}
+ }
+ })).dump());
+ }
+ void Manager::registerPlayer(const uint64_t& id, std::weak_ptr session) {
+ this->players_.insert({id, session});
+ }
+ void Manager::removePlayer(const uint64_t& id) {
+ this->players_.erase(id);
+ }
+ void Manager::handlePlayerRequest(std::weak_ptr session, const std::string& msg) {
+ // Post this to the io_context
+ this->ioc_.post([this, session, msg]() {
+ try {
+ json j = json::parse(msg);
+ if (!j.contains("method")) {
+ throw std::runtime_error("Method not found");
+ }
+ if (!j.at("method").is_string()) {
+ throw std::runtime_error("Method is not a string");
+ }
+ if (j.at("method").get() == "getChunks") {
+ if (!j.contains("params")) {
+ throw std::runtime_error("Params not found");
+ }
+ if (!j.at("params").is_array()) {
+ throw std::runtime_error("Params is not an array");
+ }
+ json response = {
+ {"jsonrpc", "2.0"},
+ {"id", j.at("id")},
+ {"result", json::array()}
+ };
+ for (const auto& param : j.at("params")) {
+ if (!param.contains("x") || !param.contains("y")) {
+ throw std::runtime_error("Param does not contain x or y");
+ }
+ if (!param.at("x").is_number_integer() || !param.at("y").is_number_integer()) {
+ throw std::runtime_error("Param x or y is not an integer");
+ }
+ int32_t x = param.at("x").get();
+ int32_t y = param.at("y").get();
+ if (x < -32 || x > 31 || y < -32 || y > 31) {
+ throw std::runtime_error("Invalid x or y");
+ }
+ std::shared_lock lock(this->worldMutex_);
+ BTVUtils::Chunk chunk = *this->world_.getChunk({x, y});
+ json chunkJson = {
+ {"x", x},
+ {"y", y},
+ {"data", Hex::fromBytes(chunk.serialize(), true).get()}
+ };
+ response["result"].push_back(chunkJson);
+ }
+ session.lock()->write(response.dump());
+ } else {
+ throw std::runtime_error("Method not allowed");
+ }
+ } catch (std::exception &e) {
+ Printer::safePrint("Error while processing player request: " + std::string(e.what()) + " with message " + msg + " with size " + std::to_string(msg.size()) + " disconnecting player");
+ if (auto realSession = session.lock()) {
+ realSession->stop();
+ this->players_.erase(realSession->getId());
+ }
+ }
+ });
+ }
+ void Manager::start() {
+ // To properly start, we need to start querying the blockchain for the logs from BTV contract
+ // For that, we need to initialize the HTTP client.
+ Printer::safePrint("Are you COMPLETELY sure that the blockchain is NOT moving?");
+ std::string answer;
+ std::cin >> answer;
+ this->loadWorld();
+
+ this->httpClient_.connect();
+ this->server_.setup();
+ // Now we need to initialize the thread vector that will be executing the io_context
+ std::vector threads;
+ threads.reserve(7); // 7 because the main thread will also be running the io_context
+ for (int i = 0; i < 7; i++) {
+ threads.emplace_back([this]() {
+ Printer::safePrint("Running io_context");
+ this->ioc_.run();
+ Printer::safePrint("io_context has stopped");
+ });
+ }
+ // Before running the io_context in the main thread, we MUST start requesting for the logs! Otherwise it will be a bad time for us
+ Printer::safePrint("Making the first request for the logs");
+ this->httpClient_.makeHTTPRequest(makeRequestMethod("eth_getLogs", json::array({
+ {
+ {"address", btvContractAddress_.hex(true)},
+ {"fromBlock", Hex::fromBytes(Utils::uintToBytes(this->lastProcessedBlock), true).forRPC()},
+ {"toBlock", "latest"}
+ }
+ })).dump());
+ Printer::safePrint("Request sent");
+ // Now we need to run the io_context on the main thread
+ this->ioc_.run();
+ Printer::safePrint("Joining all other threads");
+ for (auto& thread : threads) {
+ thread.join();
+ }
+ Printer::safePrint("Manager is successfully shutting down");
+ }
+
+ void Manager::loadWorld() {
+ Printer::safePrint("Connecting to the blockchain...");
+ HTTPSyncClient client("149.112.84.202", "8095");
+ client.connect();
+ Printer::safePrint("Connected");
+ // We need to request ALL the chunks from the blockchain
+ // That means a range (x, y) from (-32, -32) to (31, 31)
+ auto now = std::chrono::system_clock::now();
+
+ for (int x = -32; x < 32; x++) {
+ Printer::safePrint("Requesting chunks for x = " + std::to_string(x) + " total Y: 64");
+ json requestArr = json::array();
+ for (int y = -32; y < 32; y++) {
+ requestArr.push_back(buildGetChunkRequest(x, y, y + 32));
+ }
+ auto response = client.makeHTTPRequest(requestArr.dump());
+
+ json chunkRequestResponse = json::parse(response);
+ // Create a range of -32 to 31 numbers (number -> bool) so we can check if ALL
+ // chunks were successfully deserialized
+ std::map chunksReceived;
+ for (int y = -32; y < 32; y++) {
+ chunksReceived[y] = false;
+ }
+ assert(chunksReceived.size() == 64);
+ assert(chunkRequestResponse.is_array());
+ assert(chunkRequestResponse.size() == 64);
+ for (const auto& chunkResponse : chunkRequestResponse) {
+ uint64_t chunkId = chunkResponse["id"].get();
+ auto data = ABI::Decoder::decodeData(Hex::toBytes(chunkResponse["result"].get()));
+ const auto& chunkData = std::get<0>(data);
+ *this->world_.getChunk({x, chunkId - 32}) = BTVUtils::Chunk::deserialize(chunkData);
+ if (this->world_.getChunk({x, chunkId - 32})->serialize() != chunkData) {
+ Printer::safePrint("Chunk (" + std::to_string(x) + ", " + std::to_string(chunkId - 32) + ") does not match!");
+ throw std::runtime_error("Chunk does not match!");
+ }
+ chunksReceived.at(chunkId - 32) = true;
+ }
+ for (const auto& [y, received] : chunksReceived) {
+ if (!received) {
+ Printer::safePrint("Chunk (" + std::to_string(x) + ", " + std::to_string(y) + ") was not received");
+ throw std::runtime_error("Chunk was not received");
+ }
+ }
+ }
+ auto after = std::chrono::system_clock::now();
+ Printer::safePrint("Time taken to request all 4096 chunks: " + std::to_string(std::chrono::duration_cast(after - now).count()) + "ms");
+ Printer::safePrint("Getting the latest block from the network");
+ auto latestBlock = client.makeHTTPRequest(makeRequestMethod("eth_blockNumber", json::array()).dump());
+ // Result will have the number value hex encoded
+ this->lastProcessedBlock = Utils::fromBigEndian(Hex::toBytes(json::parse(latestBlock)["result"].get()));
+ Utils::safePrint("Latest block: " + std::to_string(this->lastProcessedBlock));
+ client.close();
+ }
+
+ void Manager::broadcastTooAllPlayers(const json &msg) {
+ std::string message = msg.dump();
+ this->players_.visit_all([message](auto& player) {
+ if (auto session = player.second.lock()) {
+ Printer::safePrint("Broadcast to: " + std::to_string(player.first));
+ session->write(message);
+ }
+ });
+ // Clear up the map from bad objects!
+ this->players_.erase_if([](auto& player) {
+ return player.second.expired();
+ });
+ }
+
+
+ json Manager::buildGetChunkRequest(const int32_t& x, const int32_t& y, const uint64_t& id) {
+ Functor getChunksFunctor = ABI::FunctorEncoder::encode("getChunk");
+ Bytes data;
+ Utils::appendBytes(data, UintConv::uint32ToBytes(getChunksFunctor.value));
+ Utils::appendBytes(data, ABI::Encoder::encodeData(x, y));
+
+ json req = {
+ {"to", btvContractAddress_.hex(true)},
+ {"data", Hex::fromBytes(data, true).get()}
+ };
+ return makeRequestMethod("eth_call",
+ json::array( {req}), id
+ );
+ }
+}
+
diff --git a/src/bins/btv-server/src/manager.h b/src/bins/btv-server/src/manager.h
new file mode 100644
index 00000000..2f65ed07
--- /dev/null
+++ b/src/bins/btv-server/src/manager.h
@@ -0,0 +1,50 @@
+#ifndef BTVSERVER_MANAGER_H
+#define BTVSERVER_MANAGER_H
+
+
+#include "websocketserver.h"
+#include "../../../contract/templates/btvcommon.h"
+#include "websocketsession.h"
+#include "httpclient.h"
+#include
+
+namespace BTVServer {
+ class Manager {
+ private:
+ /**
+ * World class
+ * - 1024x1024 area => 64x64 chunks
+ * - Each chunk is 16x64x16
+ * - chunk coords in range [-32..31]
+ */
+ BTVUtils::World world_;
+ net::io_context ioc_;
+ WebsocketServer server_;
+ HTTPSemiSyncClient httpClient_;
+ Address btvContractAddress_ = Address(Hex::toBytes("0x30C37F6B1d6321C4398238525046c604C7b26150"));
+ std::shared_mutex worldMutex_;
+ boost::concurrent_flat_map> players_;
+ uint64_t lastProcessedBlock = 0;
+
+
+ public:
+ Manager();
+ ~Manager();
+
+ void handleHTTPResponse(const std::string& reqBody);
+ void registerPlayer(const uint64_t& playerId, std::weak_ptr session);
+ void removePlayer(const uint64_t& playerId);
+ void handlePlayerRequest(std::weak_ptr session, const std::string& msg);
+ void start();
+ void loadWorld();
+ void broadcastTooAllPlayers(const json& msg);
+ json buildGetChunkRequest(const int32_t& x, const int32_t& y, const uint64_t& id = 1);
+ };
+
+
+}
+
+
+
+
+#endif // BTVSERVER_MANAGER_H
\ No newline at end of file
diff --git a/src/bins/btv-server/src/socketlistener.cpp b/src/bins/btv-server/src/socketlistener.cpp
new file mode 100644
index 00000000..0f65cc4e
--- /dev/null
+++ b/src/bins/btv-server/src/socketlistener.cpp
@@ -0,0 +1,62 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include "socketlistener.h"
+
+namespace BTVServer {
+ SocketListener::SocketListener(
+ net::io_context& ioc, const tcp::endpoint& ep, Manager& manager
+ ) : ioc_(ioc), acc_(net::make_strand(ioc)), manager_(manager)
+ {
+ beast::error_code ec;
+ this->acc_.open(ep.protocol(), ec); // Open the acceptor
+ if (ec) { fail("SocketListener", ec, "Failed to open the acceptor"); return; }
+ this->acc_.set_option(net::socket_base::reuse_address(true), ec); // Allow address reuse
+ if (ec) { fail("SocketListener", ec, "Failed to set address reuse"); return; }
+ this->acc_.bind(ep, ec); // Bind to the server address
+ if (ec) { fail("SocketListener", ec, "Failed to bind to server address"); return; }
+ this->acc_.listen(net::socket_base::max_listen_connections, ec); // Start listening for connections
+ if (ec) { fail("SocketListener", ec, "Failed to start listening"); return; }
+ }
+
+ void SocketListener::setup() {
+ Printer::safePrint("Starting HTTP Listener at: " + this->acc_.local_endpoint().address().to_string() + ":" + std::to_string(this->acc_.local_endpoint().port()));
+ this->do_accept(); // Start accepting connections
+ }
+
+
+ void SocketListener::do_accept() {
+ this->acc_.async_accept(net::make_strand(this->ioc_), beast::bind_front_handler(
+ &SocketListener::on_accept, this
+ ));
+ }
+
+ void SocketListener::on_accept(beast::error_code ec, tcp::socket sock) {
+ if (ec) {
+ fail("SocketListener", ec, "Failed to accept connection");
+ } else {
+ std::make_shared(
+ std::move(sock), this->manager_
+ )->start(); // Create the http session and run it
+ }
+ this->do_accept(); // Accept another connection
+ }
+
+ void SocketListener::start() {
+ net::dispatch(this->acc_.get_executor(), beast::bind_front_handler(
+ &SocketListener::setup, this
+ ));
+ }
+
+ void SocketListener::close() {
+ boost::system::error_code ec;
+ this->acc_.close(ec);
+ if (ec) {
+ fail("SocketListener", ec, "Failed to close the acceptor");
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/bins/btv-server/src/socketlistener.h b/src/bins/btv-server/src/socketlistener.h
new file mode 100644
index 00000000..5e2709ee
--- /dev/null
+++ b/src/bins/btv-server/src/socketlistener.h
@@ -0,0 +1,58 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef SOCKETLISTENER_H
+#define SOCKETLISTENER_H
+
+#include "websocketsession.h"
+
+namespace BTVServer {
+ class Manager;
+ /// Class for listening to, accepting and dispatching incoming connections/sessions.
+ class SocketListener {
+ private:
+ Manager& manager_; ///< Reference to the faucet manager.
+ /// Provides core I/O functionality.
+ net::io_context& ioc_;
+
+ /// Accepts incoming connections.
+ tcp::acceptor acc_;
+
+ /// Start the Listener itself
+ void setup();
+
+ /// Accept an incoming connection from the endpoint. The new connection gets its own strand.
+ void do_accept();
+
+ /**
+ * Callback for do_accept().
+ * Automatically listens to another session when finished dispatching.
+ * @param ec The error code to parse.
+ * @param sock The socket to use for creating the HTTP session.
+ */
+ void on_accept(beast::error_code ec, tcp::socket sock);
+
+ public:
+ /**
+ * Constructor.
+ * @param ioc Reference to the core I/O functionality object.
+ * @param ep The endpoint (host and port) to listen to.
+ * @param docroot Reference pointer to the root directory of the endpoint.
+ * @param state Reference pointer to the blockchain's state.
+ * @param storage Reference pointer to the blockchain's storage.
+ * @param p2p Reference pointer to the P2P connection manager.
+ * @param options Reference pointer to the options singleton.
+ */
+ SocketListener(
+ net::io_context& ioc, const tcp::endpoint& ep, Manager& manager
+ );
+
+ void start(); ///< Start accepting incoming connections.
+ void close(); ///< Stop accepting incoming connections.
+ };
+}
+#endif // SOCKETLISTENER_H
\ No newline at end of file
diff --git a/src/bins/btv-server/src/utils.cpp b/src/bins/btv-server/src/utils.cpp
new file mode 100644
index 00000000..632c5c1e
--- /dev/null
+++ b/src/bins/btv-server/src/utils.cpp
@@ -0,0 +1,7 @@
+#include "utils.h"
+
+namespace BTVServer {
+ void fail(const std::string& class_, boost::system::error_code ec, const std::string& what) {
+ Printer::safePrint(class_ + "::fail: " + what + ": " + ec.what());
+ }
+}
\ No newline at end of file
diff --git a/src/bins/btv-server/src/utils.h b/src/bins/btv-server/src/utils.h
new file mode 100644
index 00000000..c80cee27
--- /dev/null
+++ b/src/bins/btv-server/src/utils.h
@@ -0,0 +1,78 @@
+#ifndef BTVSERVER_UTILS_H
+#define BTVSERVER_UTILS_H
+
+#include "../../../libs/json.hpp"
+#include "../../../utils/strings.h"
+#include
+#include
+#include
+#include
+#include
+
+#include "utils/utils.h"
+#include "utils/tx.h"
+#include
+
+namespace BTVServer {
+
+ void fail(const std::string& class_, boost::system::error_code ec, const std::string& what);
+
+ template
+ json makeRequestMethod(const std::string& method, const T& params, const uint64_t& id = 1) {
+ return json({
+ {"jsonrpc", "2.0"},
+ {"id", id},
+ {"method", method},
+ {"params", params}
+ });
+ }
+
+ class Printer {
+ private:
+ std::mutex printMutex;
+ std::unique_ptr> printQueue;
+ std::future printerFuture;
+ bool run = true;
+
+ Printer() {
+ printerFuture = std::async(std::launch::async, &Printer::print, this);
+ }
+
+ ~Printer() {
+ run = false;
+ printerFuture.get();
+ }
+
+ void print() {
+ while(run) {
+ std::unique_ptr> toPrint;
+ {
+ std::lock_guard lock(this->printMutex);
+ if (this->printQueue == nullptr) {
+ // Do absolutely nothing
+ } else {
+ toPrint = std::move(this->printQueue);
+ this->printQueue = nullptr;
+ }
+ }
+ if (toPrint != nullptr) {
+ for (const auto& str : *toPrint) {
+ std::cout << str << std::endl;
+ }
+ }
+ std::this_thread::sleep_for(std::chrono::milliseconds(10));
+ }
+ }
+ public:
+ static void safePrint (std::string&& str) {
+ static Printer printer;
+ std::lock_guard lock(printer.printMutex);
+ if (printer.printQueue == nullptr) {
+ printer.printQueue = std::make_unique>();
+ }
+ printer.printQueue->emplace_back(std::move(str));
+ }
+ };
+};
+
+#endif // BTVSERVER_UTILS_H
\ No newline at end of file
diff --git a/src/bins/btv-server/src/websocketserver.cpp b/src/bins/btv-server/src/websocketserver.cpp
new file mode 100644
index 00000000..d017c5b1
--- /dev/null
+++ b/src/bins/btv-server/src/websocketserver.cpp
@@ -0,0 +1,26 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include "websocketserver.h"
+
+namespace BTVServer {
+ bool WebsocketServer::setup() {
+ // Setup tells the listener to start listening on the port
+ Printer::safePrint("Websocket Server Setup");
+ this->listener_.start();
+ Printer::safePrint("Websocket Server Setup: DONE");
+ return true;
+ }
+
+ void WebsocketServer::close() {
+ // Close tells the listener to stop listening on the port
+ Printer::safePrint("Websocket Server Close");
+ this->listener_.close();
+ Printer::safePrint("Websocket Server Close: DONE");
+ }
+
+}
\ No newline at end of file
diff --git a/src/bins/btv-server/src/websocketserver.h b/src/bins/btv-server/src/websocketserver.h
new file mode 100644
index 00000000..50b22b9c
--- /dev/null
+++ b/src/bins/btv-server/src/websocketserver.h
@@ -0,0 +1,59 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef WEBSOCKETSERVER_H
+#define WEBSOCKETSERVER_H
+
+#include "socketlistener.h"
+
+
+namespace BTVServer {
+ /// Abstraction of an Websocket server.
+ class Manager;
+ class WebsocketServer {
+ private:
+ Manager& manager_; ///< Reference to the faucet manager.
+
+ /// Provides core I/O functionality ({x} = max threads the object can use).
+ net::io_context& ioc_;
+
+ /// Pointer to the Websocket listener.
+ SocketListener listener_;
+
+ /// The endpoint where the server is running.
+ tcp::endpoint tcpEndpoint_;
+
+ public:
+ /// The run function (effectively starts the server).
+ bool setup();
+ /**
+ * Constructor. Does NOT automatically start the server.
+ * @param state Reference pointer to the blockchain's state.
+ * @param storage Reference pointer to the blockchain's storage.
+ * @param p2p Reference pointer to the P2P connection manager.
+ * @param options Reference pointer to the options singleton.
+ */
+ WebsocketServer(Manager& manager, net::io_context& ioc, const tcp::endpoint& endpoint) :
+ manager_(manager),
+ ioc_(ioc),
+ listener_(ioc, endpoint, manager)
+ {
+ std::cout << "Constructing at port: " << endpoint.port() << std::endl;
+ }
+
+ /**
+ * Destructor.
+ * Automatically stops the server.
+ */
+ ~WebsocketServer() { }
+
+ void close();
+
+ };
+}
+
+#endif // WEBSOCKETSERVER_H
\ No newline at end of file
diff --git a/src/bins/btv-server/src/websocketsession.cpp b/src/bins/btv-server/src/websocketsession.cpp
new file mode 100644
index 00000000..29c2daab
--- /dev/null
+++ b/src/bins/btv-server/src/websocketsession.cpp
@@ -0,0 +1,105 @@
+#include "websocketsession.h"
+#include "manager.h"
+
+
+namespace BTVServer {
+ WebsocketSession::WebsocketSession(tcp::socket&& socket, Manager& manager)
+ : manager_(manager), ws_(std::move(socket)), strand_(ws_.get_executor()) {
+ auto rand = Utils::randBytes(8);
+ std::memcpy(&id_, rand.data(), 8);
+ }
+
+ WebsocketSession::~WebsocketSession() {
+ if (this->registered_) {
+ this->manager_.removePlayer(this->id_);
+ }
+ }
+ void WebsocketSession::doAccept() {
+ // Accept the websocket handshake
+ ws_.async_accept(beast::bind_front_handler(&WebsocketSession::onAccept, shared_from_this()));
+ }
+
+ void WebsocketSession::onAccept(beast::error_code ec) {
+ if (ec) {
+ this->onError();
+ return fail("WebsocketSession", ec, "accept");
+ }
+ // Register the websocket session into the manager
+ this->registered_ = true;
+ this->manager_.registerPlayer(this->id_, weak_from_this());
+ // Start reading messages from the server
+ this->doRead();
+ }
+
+ void WebsocketSession::doRead() {
+ // Read a message into our buffer
+ ws_.async_read(buffer_, beast::bind_front_handler(&WebsocketSession::onRead, shared_from_this()));
+ }
+
+ void WebsocketSession::onRead(beast::error_code ec, std::size_t bytes_transferred) {
+ if (ec) {
+ this->onError();
+ return fail("WebsocketSession", ec, "read");
+ }
+
+ // Send to manager
+ manager_.handlePlayerRequest(
+ weak_from_this(), boost::beast::buffers_to_string(buffer_.data())
+ );
+ // Clear the buffer
+ buffer_.consume(buffer_.size());
+ // Read again
+ doRead();
+ }
+
+ void WebsocketSession::onWrite(beast::error_code ec, std::size_t bytes_transferred) {
+ if (ec) {
+ return fail("WebsocketSession", ec, "write");
+ }
+ std::unique_lock lock(writeQueueMutex_);
+ if (writeQueue_.empty()) {
+ writeMsg_.reset();
+ } else {
+ writeMsg_ = std::move(writeQueue_.front());
+ writeQueue_.pop_front();
+ ws_.async_write(net::buffer(*writeMsg_), beast::bind_front_handler(&WebsocketSession::onWrite, shared_from_this()));
+ }
+ }
+
+ void WebsocketSession::onError() {
+ // If it is NOT closed, close it, and set closed_ to true
+ if (!this->closed_.exchange(true)) {
+ Printer::safePrint("Closing the websocket session");
+ ws_.async_close(websocket::close_code::normal, beast::bind_front_handler(&WebsocketSession::onClose, shared_from_this()));
+ }
+ }
+
+ void WebsocketSession::onClose(beast::error_code ec) {
+ if (ec) {
+ return fail("WebsocketSession", ec, "close");
+ }
+ // Do nothing
+ }
+
+ void WebsocketSession::write(const std::string& msg) {
+ // Send the message
+ auto messagePtr = std::make_unique(msg);
+ std::unique_lock lock(writeQueueMutex_);
+ if (this->writeMsg_ == nullptr) {
+ this->writeMsg_ = std::move(messagePtr);
+ ws_.async_write(net::buffer(*this->writeMsg_), beast::bind_front_handler(&WebsocketSession::onWrite, shared_from_this()));
+ } else {
+ writeQueue_.push_back(std::move(messagePtr));
+ }
+ }
+
+ void WebsocketSession::stop() {
+ // Close the WebSocket connection
+ ws_.async_close(websocket::close_code::normal, beast::bind_front_handler(&WebsocketSession::onClose, shared_from_this()));
+ }
+
+ void WebsocketSession::start() {
+ // Accept the websocket handshake
+ doAccept();
+ }
+}
\ No newline at end of file
diff --git a/src/bins/btv-server/src/websocketsession.h b/src/bins/btv-server/src/websocketsession.h
new file mode 100644
index 00000000..e4dbf963
--- /dev/null
+++ b/src/bins/btv-server/src/websocketsession.h
@@ -0,0 +1,57 @@
+#ifndef WEBSOCKETSESSION_H
+#define WEBSOCKETSESSION_H
+#include
+#include
+#include
+#include
+#include
+#include "utils.h"
+
+namespace beast = boost::beast; // from
+namespace http = beast::http; // from
+namespace websocket = beast::websocket; // from
+namespace net = boost::asio; // from
+using tcp = boost::asio::ip::tcp; // from
+
+// Forward declaration
+namespace BTVServer {
+ class Manager;
+ class WebsocketSession : public std::enable_shared_from_this {
+ private:
+ Manager& manager_;
+ beast::websocket::stream ws_;
+ beast::flat_buffer buffer_; // Must persist between reads
+ net::strand strand_; // Strand to post write operations to
+ std::atomic_bool closed_ = false;
+ std::atomic_bool registered_ = false;
+ std::unique_ptr writeMsg_;
+ std::deque> writeQueue_;
+ std::mutex writeQueueMutex_;
+ uint64_t id_ = 0;
+
+ void doAccept();
+ void onAccept(beast::error_code ec);
+ void doRead();
+ void onRead(beast::error_code ec, std::size_t bytes_transferred);
+ void onWrite(beast::error_code ec, std::size_t bytes_transferred);
+ void onClose(beast::error_code ec);
+ void onError();
+
+ public:
+ WebsocketSession(tcp::socket&& ioc, Manager& manager);
+ ~WebsocketSession();
+
+ void write(const std::string& msg);
+ void stop();
+ void start();
+ const uint64_t& getId() {
+ return id_;
+ }
+ };
+}
+
+
+
+
+
+#endif // WEBSOCKETSESSION_H
\ No newline at end of file
diff --git a/src/bins/contractabigenerator/CMakeLists.txt b/src/bins/contractabigenerator/CMakeLists.txt
new file mode 100644
index 00000000..542fc16a
--- /dev/null
+++ b/src/bins/contractabigenerator/CMakeLists.txt
@@ -0,0 +1,16 @@
+# Compile and link the ABI generator executable
+add_executable(contractabigenerator "main.cpp")
+
+add_dependencies(contractabigenerator bdk_lib)
+
+target_include_directories(contractabigenerator PRIVATE
+ bdk_lib ${OPENSSL_INCLUDE_DIR} ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+)
+
+target_link_libraries(contractabigenerator
+ bdk_lib ${SPEEDB_LIBRARY} ${SNAPPY_LIBRARY} ${Boost_LIBRARIES}
+ ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} ${SECP256K1_LIBRARY}
+ ${ETHASH_LIBRARY} ${KECCAK_LIBRARY}
+)
+
diff --git a/src/main-contract-abi.cpp b/src/bins/contractabigenerator/main.cpp
similarity index 82%
rename from src/main-contract-abi.cpp
rename to src/bins/contractabigenerator/main.cpp
index 09243383..8ab18d33 100644
--- a/src/main-contract-abi.cpp
+++ b/src/bins/contractabigenerator/main.cpp
@@ -1,5 +1,5 @@
/*
-Copyright (c) [2023-2024] [Sparq Network]
+Copyright (c) [2023-2024] [AppLayer Developers]
This software is distributed under the MIT License.
See the LICENSE.txt file in the project root for more information.
diff --git a/src/bins/faucet-api/CMakeLists.txt b/src/bins/faucet-api/CMakeLists.txt
new file mode 100644
index 00000000..2fa88de3
--- /dev/null
+++ b/src/bins/faucet-api/CMakeLists.txt
@@ -0,0 +1,67 @@
+if (BUILD_FAUCET)
+ add_library(rollup_faucet_lib STATIC
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/jsonrpc/encoding.h
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/jsonrpc/decoding.h
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/jsonrpc/encoding.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/jsonrpc/decoding.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/httplistener.h
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/httpparser.h
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/httpserver.h
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/httpsession.h
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/faucetmanager.h
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/httplistener.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/httpparser.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/httpserver.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/httpsession.cpp
+ ${CMAKE_SOURCE_DIR}/src/bins/faucet-api/src/faucetmanager.cpp
+ )
+
+ target_include_directories(rollup_faucet_lib PRIVATE
+ ${CMAKE_SOURCE_DIR}/include ${OPENSSL_INCLUDE_DIR} bdk_lib
+ ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+ )
+
+ target_link_libraries(rollup_faucet_lib PRIVATE bdk_lib
+ ${CRYPTOPP_LIBRARIES} ${SCRYPT_LIBRARY} ${SECP256K1_LIBRARY}
+ ${ETHASH_LIBRARY} ${KECCAK_LIBRARY} ${SPEEDB_LIBRARY}
+ ${Boost_LIBRARIES} ${OPENSSL_LIBRARIES} bdk_lib
+ )
+
+ # Compile and link the faucet-api executable
+ add_executable(faucet-api "main.cpp")
+
+ add_dependencies(faucet-api bdk_lib rollup_faucet_lib)
+
+ target_include_directories(faucet-api PRIVATE
+ bdk_lib rollup_faucet_lib ${OPENSSL_INCLUDE_DIR}
+ ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+ )
+
+ target_link_libraries(faucet-api
+ bdk_lib rollup_faucet_lib
+ ${SPEEDB_LIBRARY} ${SNAPPY_LIBRARY} ${Boost_LIBRARIES}
+ ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} ${SECP256K1_LIBRARY}
+ ${ETHASH_LIBRARY} ${KECCAK_LIBRARY}
+ )
+
+ # Compile and link the faucet-api executable
+ add_executable(faucet-tester "main-tester.cpp")
+
+ add_dependencies(faucet-tester bdk_lib rollup_faucet_lib)
+
+ target_include_directories(faucet-tester PRIVATE
+ bdk_lib rollup_faucet_lib ${OPENSSL_INCLUDE_DIR}
+ ${ETHASH_INCLUDE_DIR} ${KECCAK_INCLUDE_DIR}
+ ${SPEEDB_INCLUDE_DIR} ${SECP256K1_INCLUDE_DIR}
+ )
+
+ target_link_libraries(faucet-tester
+ bdk_lib rollup_faucet_lib
+ ${SPEEDB_LIBRARY} ${SNAPPY_LIBRARY} ${Boost_LIBRARIES}
+ ${OPENSSL_LIBRARIES} ${ZLIB_LIBRARIES} ${SECP256K1_LIBRARY}
+ ${ETHASH_LIBRARY} ${KECCAK_LIBRARY}
+ )
+endif()
+
diff --git a/src/bins/faucet-api/main-tester.cpp b/src/bins/faucet-api/main-tester.cpp
new file mode 100644
index 00000000..674ce95e
--- /dev/null
+++ b/src/bins/faucet-api/main-tester.cpp
@@ -0,0 +1,97 @@
+#include "src/faucetmanager.h"
+#include
+
+// This is a "simulator", it will request the number of iterations to start banging the faucet endpoint
+// The HTTP endpoint (for HTTP client) (IP:PORT)
+int main() {
+ std::vector faucetWorkers;
+ std::pair httpEndpoint;
+ uint64_t iterations;
+
+ std::cout << "Welcome to the faucet API provider tester" << std::endl;
+ std::cout << "This API provider is designed to generate random accounts and request funds from the faucet" << std::endl;
+ std::cout << "It will dump the privkeys to the faucettester.txt" << std::endl;
+
+ std::cout << "Please provide the HTTP endpoint (IP:PORT) (empty for default: 127.0.0.1:28888): " << std::endl;
+ std::string httpEndpointStr;
+ std::getline(std::cin, httpEndpointStr);
+ if (httpEndpointStr.empty()) {
+ httpEndpoint = std::make_pair(net::ip::address_v4::from_string("127.0.0.1"), 28888);
+ } else {
+ std::vector parts;
+ boost::split(parts, httpEndpointStr, boost::is_any_of(":"));
+ if (parts.size() != 2) {
+ throw DynamicException("Invalid HTTP endpoint");
+ }
+ try {
+ httpEndpoint = std::make_pair(net::ip::address_v4::from_string(parts[0]), std::stoul(parts[1]));
+ } catch (const std::exception& e) {
+ throw DynamicException("Invalid HTTP endpoint");
+ }
+ }
+
+ // Ask for a iteration quantity to start banging the faucet endpoint
+ std::cout << "Please type the number of iterations to start banging the faucet endpoint (empty for default: 25000): " << std::endl;
+ std::string iterationsStr;
+ std::getline(std::cin, iterationsStr);
+ if (iterationsStr.empty()) {
+ iterations = 25000;
+ } else {
+ for (const auto& c : iterationsStr) {
+ if (!std::isdigit(c)) {
+ throw DynamicException("Invalid iterations");
+ }
+ }
+ iterations = std::stoull(iterationsStr);
+ }
+
+ std::cout << "Creating worker accounts..." << std::endl;
+
+ for (uint64_t i = 0; i < iterations; i++) {
+ faucetWorkers.emplace_back(PrivKey(Utils::randBytes(32)));
+ }
+
+ std::cout << "Worker accounts created size: " << faucetWorkers.size() << std::endl;
+ std::cout << "Dumping privkeys to faucettester.txt" << std::endl;
+ std::ofstream file("faucettester.txt");
+ for (const auto& worker : faucetWorkers) {
+ file << worker.privKey.hex(true) << std::endl;
+ }
+ file.close();
+
+ std::cout << "Creating the requests..." << std::endl;
+ std::vector requests;
+ for (const auto& worker : faucetWorkers) {
+ requests.push_back(Faucet::Manager::makeDripToAddress(worker.address));
+ }
+
+ std::cout << "Requests created size: " << requests.size() << std::endl;
+ std::cout << "Creating HTTP client..." << std::endl;
+
+ HTTPSyncClient client(httpEndpoint.first.to_string(), std::to_string(httpEndpoint.second));
+
+
+ client.connect();
+
+ std::cout << "Type anything to start banging the faucet endpoint" << std::endl;
+ std::string dummy;
+ std::getline(std::cin, dummy);
+
+
+ for (uint64_t i = 0 ; i < requests.size(); i++) {
+ if (i % 100 == 0) {
+ std::cout << "Iteration: " << i << std::endl;
+ }
+ std::this_thread::sleep_for(std::chrono::milliseconds(1)); /// Sleep for 1ms to avoid spamming the endpoint too much lol
+ std::string response = client.makeHTTPRequest(requests[i]);
+ json j = json::parse(response);
+ if (!j.contains("result")) {
+ std::cout << "Error: " << j.dump(2) << std::endl;
+ }
+ if (j["result"] != "0x1") {
+ std::cout << "Error: " << j.dump(2) << std::endl;
+ }
+ }
+
+ return 0;
+}
\ No newline at end of file
diff --git a/src/bins/faucet-api/main.cpp b/src/bins/faucet-api/main.cpp
new file mode 100644
index 00000000..a194d9a3
--- /dev/null
+++ b/src/bins/faucet-api/main.cpp
@@ -0,0 +1,113 @@
+#include "src/faucetmanager.h"
+#include
+// In order to construct the faucet manager, need to load the following:
+// const std::vector& faucetWorkers,
+// const uint64_t& chainId,
+// const std::pair& httpEndpoint,
+// const uint16_t& port
+// For that we ask the user:
+// A file path to a list of private keyssss (one hex per line)
+// The chain ID
+// The HTTP endpoint (for HTTP client) (IP:PORT)
+// The port for the server
+int main() {
+ Log::logToCout = true;
+ std::vector faucetWorkers;
+ uint64_t chainId;
+ std::pair httpEndpoint;
+ uint16_t port;
+
+
+
+ std::cout << "Welcome to the faucet API provider" << std::endl;
+ std::cout << "This API provider is designed to load a list of keys from a file and provide a faucet service" << std::endl;
+ std::cout << "Using the keys provided to sign transactions" << std::endl;
+
+ std::cout << "Please type the file path to the list of private keys (emtpy for default: \"privkeys.txt\"): " << std::endl;
+ std::string filePath;
+ std::getline(std::cin, filePath);
+ if (filePath.empty()) {
+ filePath = "privkeys.txt";
+ }
+ if (!std::filesystem::is_regular_file(filePath)) {
+ throw DynamicException("Invalid file path for private keys");
+ }
+
+ std::ifstream file(filePath);
+ std::string line;
+ while (std::getline(file, line)) {
+ Bytes key = Hex::toBytes(line);
+ if (key.size() != 32) {
+ throw DynamicException("Invalid private key");
+ }
+ faucetWorkers.push_back(WorkerAccount(PrivKey(key)));
+ }
+
+ std::cout << "Please provide the chain Id (empty for default: 808080): " << std::endl;
+ std::string chainIdStr;
+ std::getline(std::cin, chainIdStr);
+
+ if (chainIdStr.empty()) {
+ chainId = 808080;
+ } else {
+ for (const auto& c : chainIdStr) {
+ if (!std::isdigit(c)) {
+ throw DynamicException("Invalid chain Id");
+ }
+ }
+ chainId = std::stoull(chainIdStr);
+ }
+
+ std::cout << "Please provide the HTTP endpoint (IP:PORT) (empty for default: 127.0.0.1:8090): " << std::endl;
+ std::string httpEndpointStr;
+ std::getline(std::cin, httpEndpointStr);
+ if (httpEndpointStr.empty()) {
+ httpEndpoint = std::make_pair(net::ip::address_v4::from_string("127.0.0.1"), 8090);
+ } else {
+ std::vector parts;
+ boost::split(parts, httpEndpointStr, boost::is_any_of(":"));
+ if (parts.size() != 2) {
+ throw DynamicException("Invalid HTTP endpoint");
+ }
+ try {
+ httpEndpoint = std::make_pair(net::ip::address_v4::from_string(parts[0]), std::stoul(parts[1]));
+ } catch (const std::exception& e) {
+ throw DynamicException("Invalid HTTP endpoint");
+ }
+ }
+
+ std::cout << "Please provide the port for the server (empty for default: 28888): " << std::endl;
+ std::string portStr;
+ std::getline(std::cin, portStr);
+ if (portStr.empty()) {
+ port = 28888;
+ } else {
+ for (const auto& c : portStr) {
+ if (!std::isdigit(c)) {
+ throw DynamicException("Invalid port");
+ }
+ }
+ port = std::stoull(portStr);
+ }
+
+ std::cout << "Loaded: " << faucetWorkers.size() << " PrivKeys" << std::endl;
+ std::cout << "ChainID: " << chainId << std::endl;
+ std::cout << "HTTP endpoint: " << httpEndpoint.first << ":" << httpEndpoint.second << std::endl;
+ std::cout << "Port: " << port << std::endl;
+ std::cout << "Please type anything to start the faucet" << std::endl;
+ std::string start;
+ std::getline(std::cin, start);
+
+ Faucet::Manager manager(faucetWorkers, chainId, httpEndpoint, port);
+ manager.setup();
+ manager.run();
+
+
+
+
+
+
+
+
+ return 0;
+}
\ No newline at end of file
diff --git a/src/bins/faucet-api/src/faucetmanager.cpp b/src/bins/faucet-api/src/faucetmanager.cpp
new file mode 100644
index 00000000..7864edb5
--- /dev/null
+++ b/src/bins/faucet-api/src/faucetmanager.cpp
@@ -0,0 +1,173 @@
+#include "faucetmanager.h"
+template
+std::string makeRequestMethod(const std::string& method, const T& params) {
+ return json({
+ {"jsonrpc", "2.0"},
+ {"id", 1},
+ {"method", method},
+ {"params", params}
+ }).dump();
+}
+
+
+namespace Faucet {
+ bool FaucetWorker::run() {
+ bool log = true;
+ while(!this->stop_) {
+ try {
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+ std::unique_ptr> dripQueue;
+ {
+ std::unique_lock lock(this->manager_.dripMutex_);
+ if (this->manager_.dripQueue_ == nullptr) {
+ if (log) {
+ Utils::safePrint("No more addresses to drip to, sleeping for 100ms");
+ log = false;
+ }
+ continue;
+ }
+ log = true;
+ dripQueue = std::move(this->manager_.dripQueue_);
+ // If the dripQueue is bigger than the number of accounts
+ // We can only process the amount of accounts available in the Manager::faucetWorkers_.size()
+ // Meaning the remaining accounts needs to be replaced back into the queue.
+ if (dripQueue->size() > this->manager_.faucetWorkers_.size()) {
+ this->manager_.dripQueue_ = std::make_unique>(dripQueue->begin() + this->manager_.faucetWorkers_.size(), dripQueue->end());
+ // Resize the dripQueue to the size of the number of accounts
+ dripQueue->resize(this->manager_.faucetWorkers_.size());
+ } else {
+ this->manager_.dripQueue_ = nullptr;
+ }
+ Utils::safePrint("Dripping to " + std::to_string(dripQueue->size()) + " addresses");
+ }
+
+ std::vector sendTxPackets;
+ std::vector> sendTxHashes;
+ for (uint64_t i = 0; i < dripQueue->size(); ++i) {
+ const auto& address = dripQueue->at(i);
+ Utils::safePrint("Dripping to address: " + address.hex(true).get());
+ sendTxPackets.emplace_back(this->manager_.createTransactions(
+ this->manager_.faucetWorkers_[i],
+ 1000000000000000000,
+ this->manager_.chainId_,
+ address
+ ));
+
+ }
+
+ Utils::safePrint("Sending " + std::to_string(sendTxPackets.size()) + " faucet transactions to the network");
+
+ for (auto& tx : sendTxPackets) {
+ std::this_thread::sleep_for(std::chrono::microseconds(3));
+ auto response = this->client_.makeHTTPRequest(tx);
+ auto json = json::parse(response);
+ if (json.contains("error")) {
+ throw std::runtime_error("Error while sending transactions: sent: " + tx + " received: " + json.dump());
+ }
+ sendTxHashes.emplace_back(Hex::toBytes(json["result"].get()), false);
+ }
+
+ Utils::safePrint("Confirming " + std::to_string(sendTxHashes.size()) + " faucet transactions to the network");
+
+ for (uint64_t i = 0; i < sendTxHashes.size(); ++i) {
+ while (sendTxHashes[i].second == false) {
+ std::this_thread::sleep_for(std::chrono::microseconds(3));
+ auto response = this->client_.makeHTTPRequest(makeRequestMethod("eth_getTransactionReceipt", json::array({sendTxHashes[i].first.hex(true).get()})));
+ auto json = json::parse(response);
+ if (json.contains("error")) {
+ throw std::runtime_error("Error while confirming transactions: sent: " + sendTxHashes[i].first.hex(true).get() + " received: " + json.dump());
+ }
+ if (json["result"].is_null()) {
+ continue;
+ }
+ sendTxHashes[i].second = true;
+ this->manager_.faucetWorkers_[i].nonce += 1;
+ }
+ }
+ // Update nonce
+ } catch (std::exception& e) {
+ LOGERRORP(std::string("Error while processing dripToAddress: ") + e.what());
+ }
+ }
+ return true;
+ }
+
+ void FaucetWorker::start() {
+ this->stop_ = false;
+ if (this->runFuture_.valid()) {
+ throw std::runtime_error("FaucetWorker already running");
+ }
+ this->runFuture_ = std::async(std::launch::async, &FaucetWorker::run, this);
+ }
+
+ void FaucetWorker::stop() {
+ if (!this->runFuture_.valid()) {
+ throw std::runtime_error("FaucetWorker not running");
+ }
+ this->stop_ = true;
+ this->runFuture_.get();
+ }
+
+
+
+ std::string Manager::makeDripToAddress(const Address& address) {
+ return makeRequestMethod("dripToAddress", json::array({address.hex(true).get()}));
+ }
+
+ void Manager::setup() {
+ std::cout << "Setting up the faucet manager" << std::endl;
+ std::cout << "Requesting nonces from the network" << std::endl;
+
+ for (auto& worker : this->faucetWorkers_) {
+ HTTPSyncClient client(this->httpEndpoint_.first.to_string(), std::to_string(this->httpEndpoint_.second));
+ client.connect();
+ auto response = client.makeHTTPRequest(makeRequestMethod("eth_getTransactionCount", json::array({worker.address.hex(true).get(), "latest"})));
+ auto json = json::parse(response);
+ if (json.contains("error")) {
+ throw std::runtime_error("Error while getting nonce: " + response);
+ }
+ worker.nonce = Hex(json["result"].get()).getUint();
+ }
+ std::cout << "Nonces received!" << std::endl;
+ }
+
+ void Manager::run() {
+ std::cout << "Running faucet service..." << std::endl;
+ this->faucetWorker_.start();
+ this->server_.run();
+ }
+
+
+ std::string Manager::createTransactions(WorkerAccount& account,
+ const uint256_t& txNativeBalance,
+ const uint64_t& chainId,
+ const Address& to) {
+ return makeRequestMethod("eth_sendRawTransaction",
+ json::array({Hex::fromBytes(
+ TxBlock(
+ to,
+ account.address,
+ {},
+ chainId,
+ account.nonce,
+ txNativeBalance,
+ 1000000000,
+ 1000000000,
+ 21000,
+ account.privKey).rlpSerialize()
+ ,true).forRPC()}));
+ }
+
+ void Manager::processDripToAddress(const Address& address) {
+ // Firstly, lock the current state and check if existed, then grab the current worker account and move the index.
+ std::unique_lock lock(this->dripMutex_);
+ if (this->dripQueue_ == nullptr) {
+ this->dripQueue_ = std::make_unique>();
+ }
+ this->dripQueue_->emplace_back(address);
+ }
+
+ void Manager::dripToAddress(const Address& address) {
+ this->threadPool_.push_task(&Manager::processDripToAddress, this, address);
+ }
+}
\ No newline at end of file
diff --git a/src/bins/faucet-api/src/faucetmanager.h b/src/bins/faucet-api/src/faucetmanager.h
new file mode 100644
index 00000000..7b628b8f
--- /dev/null
+++ b/src/bins/faucet-api/src/faucetmanager.h
@@ -0,0 +1,162 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef FAUCETMANAGER_H
+#define FAUCETMANAGER_H
+
+#include "httpserver.h"
+#include "net/http/httpclient.h"
+#include "utils/utils.h"
+#include "utils/tx.h"
+#include "net/http/httpclient.h"
+#include
+#include "libs/BS_thread_pool_light.hpp"
+#include
+#include
+#include "utils/safehash.h"
+
+/// Helper struct that abstracts a worker account.
+struct WorkerAccount {
+ const PrivKey privKey; ///< Private key of the account.
+ const Address address; ///< Address of the account.
+ uint256_t nonce; ///< Current nonce of the account.
+ std::mutex inUse_; ///< Mutex for when the account is in use.
+
+ /**
+ * Constructor.
+ * @param privKey Private key of the account.
+ */
+ explicit WorkerAccount (const PrivKey& privKey) : privKey(privKey), address(Secp256k1::toAddress(Secp256k1::toUPub(privKey))), nonce(0) {}
+
+ /// Copy constructor.
+ WorkerAccount(const WorkerAccount& other) : privKey(other.privKey), address(other.address), nonce(other.nonce) {}
+};
+
+/// Namespace for faucet-related functionalities.
+namespace Faucet {
+ // Forward declaration.
+ class Manager;
+
+ /**
+ * Helper worker class for the faucet.
+ * Consumes Manager::dripQueue_, setting it to nullptr after copying it.
+ * Locks dripMutex_ to get the next list to drip to.
+ */
+ class FaucetWorker {
+ private:
+ Manager& manager_; ///< Reference to the manager.
+ HTTPSyncClient client_; ///< Reference to the HTTP sync client.
+ std::future runFuture_; ///< Future for the run function so we know when it should stop.
+ std::atomic stop_ = false; ///< Flag that tells the worker to stop.
+ bool run(); ///< Start the worker loop.
+
+ public:
+ /**
+ * Constructor.
+ * @param manager Reference to the manager.
+ * @param httpEndpoint The endpoint to operate on.
+ */
+ FaucetWorker(Manager& manager, const std::pair& httpEndpoint)
+ : manager_(manager), client_(httpEndpoint.first.to_string(), std::to_string(httpEndpoint.second))
+ { this->client_.connect(); }
+
+ /// Destructor.
+ ~FaucetWorker() { this->client_.close(); this->stop(); }
+
+ FaucetWorker(const FaucetWorker& other) = delete; ///< Copy constructor (deleted, Rule of Zero).
+ FaucetWorker& operator=(const FaucetWorker& other) = delete; ///< Copy assignment operator (deleted, Rule of Zero).
+ FaucetWorker(FaucetWorker&& other) = delete; ///< Move constructor (deleted, Rule of Zero).
+ FaucetWorker& operator=(FaucetWorker&& other) = delete; ///< Move assignment operator (deleted, Rule of Zero).
+
+ void start(); ///< Start the worker.
+ void stop(); ///< Stop the worker.
+ };
+
+ /// Faucet manager class.
+ class Manager {
+ private:
+ FaucetWorker faucetWorker_; ///< Worker object.
+ BS::thread_pool_light threadPool_; ///< Thread pool.
+ std::vector faucetWorkers_; ///< List of worker objects.
+ const uint64_t chainId_; ///< CHain ID that the faucet is operating on.
+ HTTPServer server_; ///< HTTP server object.
+ const std::pair httpEndpoint_; ///< HTTP endpoint to be used for the client
+ const uint16_t port_; ///< Port to be used for the server
+ std::mutex dripMutex_; ///< Mutex for managing read/write access to the drip list.
+ std::unique_ptr> dripQueue_; ///< List of drip addresses to iterate on.
+ std::mutex lastIndexMutex_; ///< Mutex for managing read/write access to the last index.
+ uint64_t lastIndex_ = 0; ///< Last index.
+ std::shared_mutex accountsMutex_; ///< Mutex for managing read/write access to the accounts list.
+ std::unordered_set accounts_; ///< List of accounts.
+
+ public:
+
+ /**
+ * Constructor.
+ * @param faucetWorkers List of faucet worker objects.
+ * @param chainId Chain ID that the faucet will operate on.
+ * @param httpEndpoint Endpoint that the faucet will iterate on.
+ * @param port Port that the faucet will operate on.
+ */
+ Manager(
+ const std::vector& faucetWorkers,
+ const uint64_t& chainId,
+ const std::pair& httpEndpoint,
+ const uint16_t& port
+ ) : faucetWorkers_(faucetWorkers),
+ chainId_(chainId),
+ httpEndpoint_(httpEndpoint),
+ port_(port),
+ server_(port, *this),
+ threadPool_(8),
+ faucetWorker_(*this, httpEndpoint) {}
+
+ Manager(const Manager& other) = delete; ///< Copy constructor (deleted, Rule of Zero).
+ Manager& operator=(const Manager& other) = delete; ///< Copy assignment operator (deleted, Rule of Zero).
+ Manager(Manager&& other) = delete; ///< Move constructor (deleted, Rule of Zero).
+ Manager& operator=(Manager&& other) = delete; ///< Move assignment operator (deleted, Rule of Zero).
+
+ /**
+ * Request a drip to a given address.
+ * @param address The address to drip into.
+ * @return A string containing the result of the drip request.
+ */
+ static std::string makeDripToAddress(const Address& address);
+
+ /**
+ * Make a new "send" transaction (eth_sendRawTransaction).
+ * @param account The account to be used.
+ * @param txNativeBalance The transaction native balance to be used.
+ * @param chainId The chain ID to be used.
+ * @param to The address to send the transaction(s) to.
+ * @return The resulting json string of the eth_sendRawTransaction operation.
+ */
+ static std::string createTransactions(
+ WorkerAccount& account, const uint256_t& txNativeBalance,
+ const uint64_t& chainId, const Address& to
+ );
+
+ void setup(); ///< Setup the faucet.
+ void run(); ///< Run the faucet.
+
+ /**
+ * Process the next drip request in the queue for a given address.
+ * @param address The address to process the drip request of.
+ */
+ void processDripToAddress(const Address& address);
+
+ /**
+ * Execute the drip request to a given address.
+ * @param address The address to execute the drip on.
+ */
+ void dripToAddress(const Address& address);
+
+ friend class FaucetWorker;
+ };
+};
+
+#endif // FAUCETMANAGER_H
diff --git a/src/bins/faucet-api/src/httplistener.cpp b/src/bins/faucet-api/src/httplistener.cpp
new file mode 100644
index 00000000..c634e2ca
--- /dev/null
+++ b/src/bins/faucet-api/src/httplistener.cpp
@@ -0,0 +1,48 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include "httplistener.h"
+
+namespace Faucet {
+ HTTPListener::HTTPListener(
+ net::io_context& ioc, tcp::endpoint ep, const std::shared_ptr& docroot, Manager& faucet
+ ) : ioc_(ioc), acc_(net::make_strand(ioc)), docroot_(docroot), faucet_(faucet)
+ {
+ beast::error_code ec;
+ this->acc_.open(ep.protocol(), ec); // Open the acceptor
+ if (ec) { fail("HTTPListener", __func__, ec, "Failed to open the acceptor"); return; }
+ this->acc_.set_option(net::socket_base::reuse_address(true), ec); // Allow address reuse
+ if (ec) { fail("HTTPListener", __func__, ec, "Failed to set address reuse"); return; }
+ this->acc_.bind(ep, ec); // Bind to the server address
+ if (ec) { fail("HTTPListener", __func__, ec, "Failed to bind to server address"); return; }
+ this->acc_.listen(net::socket_base::max_listen_connections, ec); // Start listening for connections
+ if (ec) { fail("HTTPListener", __func__, ec, "Failed to start listening"); return; }
+ }
+
+ void HTTPListener::do_accept() {
+ this->acc_.async_accept(net::make_strand(this->ioc_), beast::bind_front_handler(
+ &HTTPListener::on_accept, this->shared_from_this()
+ ));
+ }
+
+ void HTTPListener::on_accept(beast::error_code ec, tcp::socket sock) {
+ if (ec) {
+ fail("HTTPListener", __func__, ec, "Failed to accept connection");
+ } else {
+ std::make_shared(
+ std::move(sock), this->docroot_, this->faucet_
+ )->start(); // Create the http session and run it
+ }
+ this->do_accept(); // Accept another connection
+ }
+
+ void HTTPListener::start() {
+ net::dispatch(this->acc_.get_executor(), beast::bind_front_handler(
+ &HTTPListener::do_accept, this->shared_from_this()
+ ));
+ }
+}
diff --git a/src/bins/faucet-api/src/httplistener.h b/src/bins/faucet-api/src/httplistener.h
new file mode 100644
index 00000000..3c9c8d59
--- /dev/null
+++ b/src/bins/faucet-api/src/httplistener.h
@@ -0,0 +1,51 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef HTTPLISTENER_H
+#define HTTPLISTENER_H
+
+#include "httpparser.h"
+#include "httpsession.h"
+
+/// Namespace for faucet-related functionalities.
+namespace Faucet {
+ class Manager; // Forward declaration.
+
+ /// Class for listening to, accepting and dispatching incoming connections/sessions.
+ class HTTPListener : public std::enable_shared_from_this {
+ private:
+ Manager& faucet_; ///< Reference to the faucet manager.
+ net::io_context& ioc_; ///< Provides core I/O functionality.
+ tcp::acceptor acc_; ///< Accepts incoming connections.
+ const std::shared_ptr docroot_; ///< Pointer to the root directory of the endpoint.
+
+ void do_accept(); ///< Accept an incoming connection from the endpoint. The new connection gets its own strand.
+
+ /**
+ * Callback for do_accept().
+ * Automatically listens to another session when finished dispatching.
+ * @param ec The error code to parse.
+ * @param sock The socket to use for creating the HTTP session.
+ */
+ void on_accept(beast::error_code ec, tcp::socket sock);
+
+ public:
+ /**
+ * Constructor.
+ * @param ioc Reference to the core I/O functionality object.
+ * @param ep The endpoint (host and port) to listen to.
+ * @param docroot Reference pointer to the root directory of the endpoint.
+ * @param faucet Reference to the faucet manager.
+ */
+ HTTPListener(
+ net::io_context& ioc, tcp::endpoint ep, const std::shared_ptr& docroot, Manager& faucet
+ );
+
+ void start(); ///< Start accepting incoming connections.
+ };
+}
+#endif // HTTPLISTENER_H
diff --git a/src/bins/faucet-api/src/httpparser.cpp b/src/bins/faucet-api/src/httpparser.cpp
new file mode 100644
index 00000000..01297d9b
--- /dev/null
+++ b/src/bins/faucet-api/src/httpparser.cpp
@@ -0,0 +1,61 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include "httpparser.h"
+
+namespace Faucet {
+ std::string parseJsonRpcRequest(
+ const std::string& body, Manager& faucet
+ ) {
+ json ret;
+ uint64_t id = 0;
+ try {
+ json request = json::parse(body);
+ if (!JsonRPC::Decoding::checkJsonRPCSpec(request)) {
+ ret["error"]["code"] = -32600;
+ ret["error"]["message"] = "Invalid request - does not conform to JSON-RPC 2.0 spec";
+ return ret.dump();
+ }
+
+ auto RequestMethod = JsonRPC::Decoding::getMethod(request);
+ switch (RequestMethod) {
+ case JsonRPC::Methods::invalid:
+ Utils::safePrint("INVALID METHOD: " + request["method"].get());
+ ret["error"]["code"] = -32601;
+ ret["error"]["message"] = "Method not found";
+ break;
+ case JsonRPC::Methods::dripToAddress:
+ JsonRPC::Decoding::dripToAddress(request, faucet);
+ ret = JsonRPC::Encoding::dripToAddress();
+ break;
+ default:
+ ret["error"]["code"] = -32601;
+ ret["error"]["message"] = "Method not found";
+ break;
+ }
+ if (request["id"].is_string()) {
+ ret["id"] = request["id"].get();
+ } else if (request["id"].is_number()) {
+ ret["id"] = request["id"].get();
+ } else if(request["id"].is_null()) {
+ ret["id"] = nullptr;
+ } else {
+ throw DynamicException("Invalid id type");
+ }
+ } catch (std::exception &e) {
+ json error;
+ error["id"] = id;
+ error["jsonrpc"] = 2.0;
+ error["error"]["code"] = -32603;
+ error["error"]["message"] = "Internal error: " + std::string(e.what());
+ return error.dump();
+ }
+ // Set back to the original id
+ return ret.dump();
+ }
+}
+
diff --git a/src/bins/faucet-api/src/httpparser.h b/src/bins/faucet-api/src/httpparser.h
new file mode 100644
index 00000000..8d0cbcc6
--- /dev/null
+++ b/src/bins/faucet-api/src/httpparser.h
@@ -0,0 +1,163 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef HTTPPARSER_H
+#define HTTPPARSER_H
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+
+#include "../utils/utils.h"
+#include "../utils/options.h"
+#include "jsonrpc/methods.h"
+#include "jsonrpc/decoding.h"
+#include "jsonrpc/encoding.h"
+
+namespace beast = boost::beast; // from
+namespace http = beast::http; // from
+namespace websocket = beast::websocket; // from
+namespace net = boost::asio; // from
+using tcp = boost::asio::ip::tcp; // from
+
+// It is preferable to use forward declarations here.
+// The parser functions never access any of these members, only passes them around.
+namespace Faucet {
+ class Manager;
+ /**
+ * Parse a JSON-RPC request into a JSON-RPC response, handling all requests and errors.
+ * @param body The request string.
+ * @param faucet Reference to the faucet manager.
+ * @return The response string.
+ */
+ std::string parseJsonRpcRequest(
+ const std::string& body, Manager& faucet
+ );
+
+ /**
+ * Produce an HTTP response for a given request.
+ * The type of the response object depends on the contents of the request,
+ * so the interface requires the caller to pass a generic lambda to receive the response.
+ * @param docroot The root directory of the endpoint.
+ * @param req The request to handle.
+ * @param send TODO: we're missing details on this, Allocator, Body, the function itself and where it's used
+ * @param faucet Reference to the faucet manager.
+ */
+ template void handle_request(
+ [[maybe_unused]] beast::string_view docroot,
+ http::request>&& req,
+ Send&& send, Manager& faucet
+ ) {
+ // Returns a bad request response
+ const auto bad_request = [&req](beast::string_view why){
+ http::response res{http::status::bad_request, req.version()};
+ res.set(http::field::server, BOOST_BEAST_VERSION_STRING);
+ res.set(http::field::content_type, "text/html");
+ res.keep_alive(req.keep_alive());
+ res.body() = std::string(why);
+ res.prepare_payload();
+ return res;
+ };
+
+ // Returns a not found response
+ const auto not_found = [&req](beast::string_view target){
+ http::response res{http::status::not_found, req.version()};
+ res.set(http::field::server, BOOST_BEAST_VERSION_STRING);
+ res.set(http::field::content_type, "text/html");
+ res.keep_alive(req.keep_alive());
+ res.body() = "The resource '" + std::string(target) + "' was not found.";
+ res.prepare_payload();
+ return res;
+ };
+
+ // Returns a server error response
+ const auto server_error = [&req](beast::string_view what) {
+ http::response res{http::status::internal_server_error, req.version()};
+ res.set(http::field::server, BOOST_BEAST_VERSION_STRING);
+ res.set(http::field::content_type, "text/html");
+ res.keep_alive(req.keep_alive());
+ res.body() = "An error occurred: '" + std::string(what) + "'";
+ res.prepare_payload();
+ return res;
+ };
+
+ // Make sure we can handle the method
+ if (req.method() != http::verb::post && req.method() != http::verb::options)
+ return send(bad_request("Unknown HTTP-method"));
+
+ // Request path must be absolute and not contain ".."
+ if (
+ req.target().empty() || req.target()[0] != '/' ||
+ req.target().find("..") != beast::string_view::npos
+ ) return send(bad_request("Illegal request-target"));
+
+ // Respond to OPTIONS, Metamask requests it
+ if (req.method() == http::verb::options) {
+ http::response res{http::status::ok, req.version()};
+ res.set(http::field::server, BOOST_BEAST_VERSION_STRING);
+ res.set(http::field::access_control_allow_origin, "*");
+ res.set(http::field::access_control_allow_methods, "POST, GET");
+ res.set(http::field::access_control_allow_headers, "content-type");
+ res.set(http::field::accept_encoding, "deflate");
+ res.set(http::field::accept_language, "en-US");
+ res.keep_alive(req.keep_alive());
+ return send(std::move(res));
+ }
+ Utils::safePrint("HTTP Request: " + req.body());
+ std::string request = req.body();
+ std::string answer = parseJsonRpcRequest(
+ request, faucet
+ );
+
+ http::response res{http::status::ok, req.version()};
+ res.set(http::field::server, BOOST_BEAST_VERSION_STRING);
+ res.set(http::field::access_control_allow_origin, "*");
+ res.set(http::field::access_control_allow_methods, "POST, GET");
+ res.set(http::field::access_control_allow_headers, "content-type");
+ res.set(http::field::content_type, "application/json");
+ res.set(http::field::connection, "keep-alive");
+ res.set(http::field::strict_transport_security, "max-age=0");
+ res.set(http::field::vary, "Origin");
+ res.set(http::field::access_control_allow_credentials, "true");
+ res.body() = answer;
+ res.keep_alive(req.keep_alive());
+ res.prepare_payload();
+ return send(std::move(res));
+ }
+}
+
+#endif // HTTPPARSER_H
diff --git a/src/bins/faucet-api/src/httpserver.cpp b/src/bins/faucet-api/src/httpserver.cpp
new file mode 100644
index 00000000..168356e1
--- /dev/null
+++ b/src/bins/faucet-api/src/httpserver.cpp
@@ -0,0 +1,33 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include "httpserver.h"
+namespace Faucet {
+ bool HTTPServer::run() {
+ // Create and launch a listening port
+ const boost::asio::ip::address address = net::ip::make_address("0.0.0.0");
+ auto docroot = std::make_shared(".");
+ this->listener_ = std::make_shared(
+ this->ioc_, tcp::endpoint{address, this->port_}, docroot, this->faucet_
+ );
+ this->listener_->start();
+
+ // Run the I/O service on the requested number of threads (4)
+ std::vector v;
+ v.reserve(4 - 1);
+ for (int i = 4 - 1; i > 0; i--) v.emplace_back([&]{ this->ioc_.run(); });
+ LOGINFO(std::string("HTTP Server Started at port: ") + std::to_string(port_));
+ this->ioc_.run();
+
+ // If we get here, it means we got a SIGINT or SIGTERM. Block until all the threads exit
+ for (std::thread& t : v) t.join();
+ LOGINFO("HTTP Server Stopped");
+ return true;
+ }
+
+}
+
diff --git a/src/bins/faucet-api/src/httpserver.h b/src/bins/faucet-api/src/httpserver.h
new file mode 100644
index 00000000..525ce6df
--- /dev/null
+++ b/src/bins/faucet-api/src/httpserver.h
@@ -0,0 +1,60 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef HTTPSERVER_H
+#define HTTPSERVER_H
+
+#include "httpparser.h"
+#include "httplistener.h"
+
+/// Namespace for faucet-related functionalities.
+namespace Faucet {
+ class Manager; // Forward declaration.
+
+ /// Abstraction of an HTTP server.
+ class HTTPServer {
+ private:
+ Manager& faucet_; ///< Reference to the faucet manager.
+
+ /// Provides core I/O functionality ({x} = max threads the object can use).
+ net::io_context ioc_{4};
+
+ /// Pointer to the HTTP listener.
+ std::shared_ptr listener_;
+
+ /// The port where the server is running.
+ const unsigned short port_;
+
+
+ /// Future for the run function so we know when it should stop.
+ std::future runFuture_;
+
+ public:
+ /// The run function (effectively starts the server).
+ bool run();
+
+ /**
+ * Constructor. Does NOT automatically start the server.
+ * @param port The port where the server runs.
+ * @param faucet Reference to the faucet manager.
+ */
+ HTTPServer(const uint16_t& port, Manager& faucet) : port_(port), faucet_(faucet) {
+ std::cout << "Starting at port: " << port_ << std::endl;
+ }
+
+ /// Destructor. Automatically stops the server.
+ ~HTTPServer() {}
+
+ /**
+ * Check if the server is currently active and running.
+ * @return `true` if the server is running, `false` otherwise.
+ */
+ bool running() const { return this->runFuture_.valid(); }
+ };
+}
+
+#endif // HTTPSERVER_H
diff --git a/src/bins/faucet-api/src/httpsession.cpp b/src/bins/faucet-api/src/httpsession.cpp
new file mode 100644
index 00000000..510943ff
--- /dev/null
+++ b/src/bins/faucet-api/src/httpsession.cpp
@@ -0,0 +1,94 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include "httpsession.h"
+
+namespace Faucet {
+ HTTPQueue::HTTPQueue(HTTPSession& session) : session_(session) {
+ assert(this->limit_ > 0);
+ this->items_.reserve(this->limit_);
+ }
+
+ bool HTTPQueue::full() const { return this->items_.size() >= this->limit_; }
+
+ bool HTTPQueue::on_write() {
+ BOOST_ASSERT(!this->items_.empty());
+ bool wasFull = this->full();
+ this->items_.erase(this->items_.begin());
+ if (!this->items_.empty()) (*this->items_.front())();
+ return wasFull;
+ }
+
+ template void HTTPQueue::operator()(
+ http::message&& msg
+ ) {
+ // This holds a work item
+ struct work_impl : work {
+ HTTPSession& session;
+ http::message msg; // This msg is internal
+ work_impl(HTTPSession& session, http::message&& msg)
+ : session(session), msg(std::move(msg)) {}
+ void operator()() override {
+ http::async_write(
+ session.stream_, msg, beast::bind_front_handler(
+ &HTTPSession::on_write, session.shared_from_this(), msg.need_eof()
+ )
+ );
+ }
+ };
+
+ // Allocate and store the work, and if there was no previous work, start this one
+ this->items_.push_back(boost::make_unique(this->session_, std::move(msg))); // This msg is from the header
+ if (this->items_.size() == 1) (*this->items_.front())();
+ }
+
+ void HTTPSession::do_read() {
+ this->parser_.emplace(); // Construct a new parser for each message
+ this->parser_->body_limit(512000); // Apply a reasonable limit to body size in bytes to prevent abuse
+ // Read a request using the parser-oriented interface
+ http::async_read(this->stream_, this->buf_, *this->parser_, beast::bind_front_handler(
+ &HTTPSession::on_read, this->shared_from_this()
+ ));
+ }
+
+ void HTTPSession::on_read(beast::error_code ec, std::size_t bytes) {
+ boost::ignore_unused(bytes);
+ // This means the other side closed the connection
+ if (ec == http::error::end_of_stream) return this->do_close();
+ if (ec) return fail("HTTPSession", __func__, ec, "Failed to close connection");
+ // Send the response
+ handle_request(
+ *this->docroot_, this->parser_->release(), this->queue_, this->faucet_
+ );
+ // If queue still has free space, try to pipeline another request
+ if (!this->queue_.full()) this->do_read();
+ }
+
+ void HTTPSession::on_write(bool close, beast::error_code ec, std::size_t bytes) {
+ boost::ignore_unused(bytes);
+ if (ec) return fail("HTTPSession", __func__, ec, "Failed to write to buffer");
+ // This means we should close the connection, usually because the
+ // response indicated the "Connection: close" semantic
+ if (close) return this->do_close();
+ // Inform the queue that a write was completed and read another request
+ if (this->queue_.on_write()) this->do_read();
+ }
+
+ void HTTPSession::do_close() {
+ // Send a TCP shutdown
+ beast::error_code ec;
+ this->stream_.socket().shutdown(tcp::socket::shutdown_send, ec);
+ // At this point the connection is closed gracefully
+ }
+
+ void HTTPSession::start() {
+ net::dispatch(this->stream_.get_executor(), beast::bind_front_handler(
+ &HTTPSession::do_read, this->shared_from_this()
+ ));
+ }
+}
+
diff --git a/src/bins/faucet-api/src/httpsession.h b/src/bins/faucet-api/src/httpsession.h
new file mode 100644
index 00000000..b0424a6e
--- /dev/null
+++ b/src/bins/faucet-api/src/httpsession.h
@@ -0,0 +1,126 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef HTTPSESSION_H
+#define HTTPSESSION_H
+
+#include "httpparser.h"
+
+namespace Faucet {
+ // Forward declarations.
+ class Manager;
+ class HTTPSession; // HTTPQueue depends on HTTPSession and vice-versa
+
+ /// Heler class used for HTTP pipelining.
+ class HTTPQueue {
+ private:
+ /// Type-erased, saved work item.
+ struct work {
+ virtual ~work() = default; ///< Default destructor.
+ virtual void operator()() = 0; ///< Default call operator.
+ };
+
+ unsigned int limit_ = 8; ///< Maximum number of responses to queue.
+ HTTPSession& session_; ///< Reference to the HTTP session that is handling the queue.
+ std::vector> items_; ///< Array of pointers to work structs.
+
+ public:
+ /**
+ * Constructor.
+ * @param session Reference to the HTTP session that will handle the queue.
+ */
+ explicit HTTPQueue(HTTPSession& session);
+
+ /**
+ * Check if the queue limit was hit.
+ * @return `true` if queue is full, `false` otherwise.
+ */
+ bool full() const;
+
+ /**
+ * Callback for when a message is sent.
+ * @return `true` if the caller should read a message, `false` otherwise.
+ */
+ bool on_write();
+
+ /**
+ * Call operator.
+ * Called by the HTTP handler to send a response.
+ * @param msg The message to send as a response.
+ */
+ template void operator()(
+ http::message&& msg
+ );
+ };
+
+ /// Class that handles an HTTP connection session.
+ class HTTPSession : public std::enable_shared_from_this {
+ private:
+ Manager& faucet_; ///< Reference pointer to the faucet singleton.
+ /// TCP/IP stream socket.
+ beast::tcp_stream stream_;
+
+ /// Internal buffer to read and write from.
+ beast::flat_buffer buf_;
+
+ /// Pointer to the root directory of the endpoint.
+ std::shared_ptr docroot_;
+
+ /// Queue object that the session is responsible for.
+ HTTPQueue queue_;
+
+ /**
+ * HTTP/1 parser for producing a request message.
+ * The parser is stored in an optional container so we can construct it
+ * from scratch at the beginning of each new message.
+ */
+ boost::optional> parser_;
+
+ /// Read whatever is on the internal buffer.
+ void do_read();
+
+ /**
+ * Callback for do_read().
+ * Tries to pipeline another request if the queue isn't full.
+ * @param ec The error code to parse.
+ * @param bytes The number of read bytes.
+ */
+ void on_read(beast::error_code ec, std::size_t bytes);
+
+ /**
+ * Callback for when HTTPQueue writes something.
+ * Automatically reads another request.
+ * @param close If `true`, calls do_close() at the end.
+ * @param ec The error code to parse.
+ * @param bytes The number of written bytes.
+ */
+ void on_write(bool close, beast::error_code ec, std::size_t bytes);
+
+ /// Send a TCP shutdown and close the connection.
+ void do_close();
+
+ public:
+ /**
+ * Constructor.
+ * @param sock The socket to take ownership of.
+ * @param docroot Reference pointer to the root directory of the endpoint.
+ * @param faucet Reference to the faucet manager.
+ */
+ HTTPSession(tcp::socket&& sock,
+ const std::shared_ptr& docroot, Manager& faucet
+ ) : stream_(std::move(sock)), docroot_(docroot), queue_(*this), faucet_(faucet)
+ {
+ stream_.expires_never();
+ }
+
+ /// Start the HTTP session.
+ void start();
+
+ friend class HTTPQueue;
+ };
+}
+#endif // HTTPSESSION_H
diff --git a/src/bins/faucet-api/src/jsonrpc/decoding.cpp b/src/bins/faucet-api/src/jsonrpc/decoding.cpp
new file mode 100644
index 00000000..c4cf0e9b
--- /dev/null
+++ b/src/bins/faucet-api/src/jsonrpc/decoding.cpp
@@ -0,0 +1,59 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include "decoding.h"
+#include "../faucetmanager.h"
+
+namespace Faucet {
+namespace JsonRPC::Decoding {
+
+ bool checkJsonRPCSpec(const json& request) {
+ try {
+ // "jsonrpc": "2.0" is a MUST
+ if (!request.contains("jsonrpc")) return false;
+ if (request["jsonrpc"].get() != "2.0") return false;
+
+ // "method" is a MUST
+ if (!request.contains("method")) return false;
+
+ // Params MUST be Object or Array.
+ if (
+ request.contains("params") && (!request["params"].is_object() && !request["params"].is_array())
+ ) return false;
+
+ return true;
+ } catch (std::exception& e) {
+ SLOGERROR(std::string("Error while checking json RPC spec: ") + e.what());
+ throw DynamicException("Error while checking json RPC spec: " + std::string(e.what()));
+ }
+ }
+
+ Methods getMethod(const json& request) {
+ try {
+ const std::string& method = request["method"].get();
+ auto it = methodsLookupTable.find(method);
+ if (it == methodsLookupTable.end()) return Methods::invalid;
+ return it->second;
+ } catch (std::exception& e) {
+ SLOGERROR(std::string("Error while getting method: ") + e.what());
+ throw DynamicException("Error while checking json RPC spec: " + std::string(e.what()));
+ }
+ }
+ // https://www.jsonrpc.org/specification
+ void dripToAddress(const json& request, Manager& faucet) {
+ static const std::regex addFilter("^0x[0-9,a-f,A-F]{40}$");
+ try {
+ const auto address = request["params"].at(0).get();
+ if (!std::regex_match(address, addFilter)) throw DynamicException("Invalid address hex");
+ faucet.dripToAddress(Address(Hex::toBytes(address)));
+ } catch (std::exception& e) {
+ SLOGERROR(std::string("Error while decoding dripToAddress: ") + e.what());
+ throw DynamicException("Error while decoding dripToAddress: " + std::string(e.what()));
+ }
+ }
+}
+}
diff --git a/src/bins/faucet-api/src/jsonrpc/decoding.h b/src/bins/faucet-api/src/jsonrpc/decoding.h
new file mode 100644
index 00000000..beb387a8
--- /dev/null
+++ b/src/bins/faucet-api/src/jsonrpc/decoding.h
@@ -0,0 +1,51 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef JSONRPC_DECODING_H
+#define JSONRPC_DECODING_H
+
+#include
+
+#include "utils/utils.h"
+
+#include "methods.h"
+
+// Forward declarations.
+class Storage;
+
+namespace Faucet {
+ class Manager;
+ /**
+ * Namespace for decoding JSON-RPC data.
+ * All functions require a JSON object that is the request itself to be operated on.
+ */
+ namespace JsonRPC::Decoding {
+ /**
+ * Helper function to get the method of the JSON-RPC request.
+ * @param request The request object.
+ * @return The method inside the request, or `invalid` if the method is not found.
+ */
+ Methods getMethod(const json& request);
+
+ /**
+ * Helper function to check if a given JSON-RPC request is valid.
+ * Does NOT check if the method called is valid, only if the request follows JSON-RPC 2.0 spec.
+ * @param request The request object.
+ * @return `true` if request is valid, `false` otherwise.
+ */
+ bool checkJsonRPCSpec(const json& request);
+
+ /**
+ * Helper function to check if a given JSON-RPC request is valid.
+ * Does NOT check if the method called is valid, only if the request follows JSON-RPC 2.0 spec.
+ * @param request The request object.
+ * @param faucet Reference to the faucet manager.
+ */
+ void dripToAddress(const json& request, Manager& faucet);
+ }
+}
+#endif /// JSONRPC_DECODING_H
diff --git a/src/bins/faucet-api/src/jsonrpc/encoding.cpp b/src/bins/faucet-api/src/jsonrpc/encoding.cpp
new file mode 100644
index 00000000..3f4ae504
--- /dev/null
+++ b/src/bins/faucet-api/src/jsonrpc/encoding.cpp
@@ -0,0 +1,20 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#include "encoding.h"
+
+namespace Faucet {
+ namespace JsonRPC::Encoding {
+ json dripToAddress() {
+ json ret;
+ ret["jsonrpc"] = 2.0;
+ ret["result"] = "0x1";
+ return ret;
+ }
+ }
+}
+
diff --git a/src/bins/faucet-api/src/jsonrpc/encoding.h b/src/bins/faucet-api/src/jsonrpc/encoding.h
new file mode 100644
index 00000000..fcd1a1ad
--- /dev/null
+++ b/src/bins/faucet-api/src/jsonrpc/encoding.h
@@ -0,0 +1,24 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef JSONRPC_ENCODING_H
+#define JSONRPC_ENCODING_H
+
+#include "utils/utils.h"
+
+namespace Faucet {
+ /// Namespace for encoding JSON-RPC data.
+ namespace JsonRPC::Encoding {
+ /**
+ * Encode a "dripToAddress" request.
+ * @return The formed request.
+ */
+ json dripToAddress();
+ }
+}
+
+#endif // JSONRPC_ENCODING_H
diff --git a/src/bins/faucet-api/src/jsonrpc/methods.h b/src/bins/faucet-api/src/jsonrpc/methods.h
new file mode 100644
index 00000000..e550c9c9
--- /dev/null
+++ b/src/bins/faucet-api/src/jsonrpc/methods.h
@@ -0,0 +1,48 @@
+/*
+Copyright (c) [2023-2024] [AppLayer Developers]
+
+This software is distributed under the MIT License.
+See the LICENSE.txt file in the project root for more information.
+*/
+
+#ifndef JSONRPC_METHODS_H
+#define JSONRPC_METHODS_H
+
+#include