diff --git a/.debug/.gitignore b/.debug/.gitignore deleted file mode 100644 index a5baada1..00000000 --- a/.debug/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!.gitignore - diff --git a/.gitignore b/.gitignore index d9abce8f..b5503e9e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,37 @@ +# Artifacts and Cache +build/ +include/ +lib/ +bin/ +bin_tests/ +dist/ +cmake-build-*/ +.cmake/ +gstore-*.tar.gz +CMakeUserPresets.json + +# Runtime directories +.tmp/ +.debug/ +.obj/ +backup/ +logs/ +upload/ +temp/ +fun/ + +# config files +# TODO: add a separate config file folder +/backup.json +/conf.ini +/init.conf +/ipAllow.config +/ipDeny.config +/README.txt +/slog.properties +/slog.stdout.properties +/*.log + # Executables *.pyc *.exe @@ -7,19 +41,20 @@ *.x86_64 *.hex bin/g* -scripts/update_test -scripts/dataset_test +tests/update_test +tests/dataset_test api/cpp/example/example -Parser/Sparql* +src/Parser/Sparql* #result logs *.txt *.log logs +!CMakeLists.txt #backup files -backup.json +defaults/backup.json #download files *.torrent @@ -90,6 +125,7 @@ tags .vs *.iml .idea +.vscode # latex files *.aux @@ -100,6 +136,7 @@ tags # queries *.sql *.sh +!docker-entrypoint.sh # modules node_modules diff --git a/.objs/.gitignore b/.objs/.gitignore deleted file mode 100644 index a5baada1..00000000 --- a/.objs/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!.gitignore - diff --git a/.tmp/.gitignore b/.tmp/.gitignore deleted file mode 100644 index a5baada1..00000000 --- a/.tmp/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!.gitignore - diff --git a/.tmp/web/.gitignore b/.tmp/web/.gitignore deleted file mode 100644 index a5baada1..00000000 --- a/.tmp/web/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!.gitignore - diff --git a/.vscode/c_cpp_properties.json b/.vscode/c_cpp_properties.json deleted file mode 100644 index a9f5fe19..00000000 --- a/.vscode/c_cpp_properties.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "configurations": [ - { - "name": "Linux", - "includePath": [ - "${workspaceFolder}/**" - ], - "defines": [], - "compilerPath": "/usr/bin/gcc", - "cStandard": "c17", - "cppStandard": "gnu++14", - "intelliSenseMode": "linux-gcc-x64" - } - ], - "version": 4 -} \ No newline at end of file diff --git a/3rdparty/.gitignore b/3rdparty/.gitignore new file mode 100644 index 00000000..6a0c6d1e --- /dev/null +++ b/3rdparty/.gitignore @@ -0,0 +1,6 @@ +* +!.gitignore +!*.tar* +!*.t[bgx]z +!*.zip +!*.sh \ No newline at end of file diff --git a/3rdparty/antlr4-cpp-runtime-4.sh b/3rdparty/antlr4-cpp-runtime-4.sh new file mode 100644 index 00000000..0b6d1f37 --- /dev/null +++ b/3rdparty/antlr4-cpp-runtime-4.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +mkdir -p ../include/ +rm -rf antlr4-cpp-runtime-4 +tar -xzvf antlr4-cpp-runtime-4.tar.gz; +cd antlr4-cpp-runtime-4 || echo "./antlr4-cpp-runtime-4 not found" +cmake . + +# if GSTORE_USE_DYNAMIC_LIB is defined, then we need to copy the dynamic library to the lib directory +# otherwise, we need to copy the static library to the lib directory +if [ -n "$GSTORE_USE_DYNAMIC_LIB" ]; then + make -j2 antlr4_shared + cp dist/libantlr4-runtime.so* ../../lib/libantlr4-runtime.so + cd ../../lib/ || echo "./lib not found" +else + make -j$(nproc) antlr4_static + cp dist/libantlr4-runtime.a ../../lib/ +fi +cp -r runtime/src/* ../../include/ \ No newline at end of file diff --git a/tools/antlr4-cpp-runtime-4.tar.gz b/3rdparty/antlr4-cpp-runtime-4.tar.gz similarity index 100% rename from tools/antlr4-cpp-runtime-4.tar.gz rename to 3rdparty/antlr4-cpp-runtime-4.tar.gz diff --git a/3rdparty/init.sh b/3rdparty/init.sh new file mode 100644 index 00000000..69d735b1 --- /dev/null +++ b/3rdparty/init.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +BLUE='\033[0;34m' +NC='\033[0m' + +cd $(dirname "${BASH_SOURCE[0]}") + +mkdir -p ../lib ../include + +for f in *.sh; do + if [ "$f" != "init.sh" ]; then + echo "${BLUE}Running $f${NC}" + bash "$f" + if [ $? -ne 0 ]; then + echo "${BLUE}Failed to run $f${NC}" + exit 1 + fi + fi +done + diff --git a/3rdparty/workflow-0.10.3.sh b/3rdparty/workflow-0.10.3.sh new file mode 100644 index 00000000..5ef32227 --- /dev/null +++ b/3rdparty/workflow-0.10.3.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +rm -rf workflow +tar -xzvf workflow-0.10.3.tar.gz +cd ./workflow || echo "./workflow not found" + +make -j$(nproc) +# if GSTORE_USE_DYNAMIC_LIB is defined, then we need to copy the dynamic library to the lib directory +# otherwise, we need to copy the static library to the lib directory + +if [ -n "$GSTORE_USE_DYNAMIC_LIB" ]; then + cp _lib/libworkflow.so* ../../lib/ + cd ../../lib/ || echo "./lib not found" +else + cp _lib/libworkflow.a ../../lib/libworkflow.a +fi +cp _include/workflow ../../include/workflow -r diff --git a/tools/workflow-0.10.3.tar.gz b/3rdparty/workflow-0.10.3.tar.gz similarity index 100% rename from tools/workflow-0.10.3.tar.gz rename to 3rdparty/workflow-0.10.3.tar.gz diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 00000000..2d7b798b --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,182 @@ +# SPDX-License-Identifier: BSD-3-Clause +# @authors: Runzhi He +# @date: 2023-12-20 + +cmake_minimum_required(VERSION 3.16) + +project(gStore LANGUAGES CXX) + +# gStore requires gcc 8 or higher +if(NOT CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_VERSION VERSION_LESS "8") + message(FATAL_ERROR "gStore requires gcc 8 or higher, got ${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION}") +endif() + +### set compiler flags +# use -O2 for Release +set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O2") +# ignore unused-result, panic on return-type +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-result -Werror=return-type") +# check CMAKE_BUILD_TYPE, if not set, set to Release +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE Release) + message(WARNING "Build type not set, default to Release") +endif() +# Check if mold is available and set it as the linker +find_program(MOLD_LINKER mold) +if(MOLD_LINKER) + message(STATUS "mold linker found: ${MOLD_LINKER}, adding -fuse-ld=mold to linker flags") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=mold") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=mold") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=mold") +else() + message(STATUS "mold linker not found, using default linker") +endif() +# coverage settings +if(COVERAGE) + message(STATUS "Enabling coverage reporting") + # halt if the build_type is not Debug + if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug") + message(FATAL_ERROR "Coverage requires Debug build type") + endif() + # set compiler flags + add_link_options(--coverage) + add_compile_options(--coverage) +endif () + +# print build type and flags +message(STATUS "Build type: ${CMAKE_BUILD_TYPE}") +message(STATUS "CXX Flags: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE}}") + +# helper for IDEs to find the include path +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +# Add include/ and lib/ +include_directories(${CMAKE_SOURCE_DIR}/include) +link_directories(${CMAKE_SOURCE_DIR}/lib) +# TODO: The line below is a workaround for millions of relative includes. Remove it when all relative includes are replaced with target_include_directories. +include_directories(${CMAKE_INCLUDE_PATH}) + +# Set output directories (use -DGSTORE_ROOT_DIR=... to override) +SET(GSTORE_ROOT_DIR ${CMAKE_SOURCE_DIR}) +SET(GSTORE_EXE_DIR ${GSTORE_ROOT_DIR}/bin) +SET(GSTORE_LIB_DIR ${GSTORE_ROOT_DIR}/lib) +SET(GSTORE_TEST_DIR ${GSTORE_ROOT_DIR}/bin_tests) + +# if option USE_DYNAMIC_LIB is set, set env var GSTORE_USE_DYNAMIC_LIB to 1 +SET(USE_DYNAMIC_LIB ON CACHE BOOL "Use dynamic libraries") +if(USE_DYNAMIC_LIB OR ENV{GSTORE_USE_DYNAMIC_LIB}) + set(ENV{GSTORE_USE_DYNAMIC_LIB} 1) + # Set rpath so that gStore can find its shared libraries + set(CMAKE_INSTALL_RPATH "$ORIGIN/../lib") +endif() + +### Dependencies managed by conan +# boost-system, boost-regex, boost-thread +find_package(Boost REQUIRED CONFIG COMPONENTS system regex thread) +# minizip: minizip::minizip, minizip/zip.h +find_package(minizip REQUIRED CONFIG) +# openssl: OpenSSL::SSL, OpenSSL::Crypto +find_package(OpenSSL REQUIRED CONFIG) +# indicators: indicators::indicators, indicators/progress_bar.hpp +find_package(indicators REQUIRED CONFIG) +## antlr4-runtime: antlr4_shared, antlr4_static +#find_package(antlr4-runtime REQUIRED CONFIG) +# rapidjson: rapidjson, rapidjson/document.h +find_package(RapidJSON REQUIRED CONFIG) +# logcplus: log4cplus::log4cplus, log4cplus/logger.h +find_package(log4cplus REQUIRED CONFIG) +# curl: CURL::libcurl, curl/curl.h +find_package(CURL REQUIRED CONFIG) + +### Dependencies managed by system package manager +find_package(Threads REQUIRED) +find_package(PkgConfig REQUIRED) +# readline: have to link for license compatibility with GPL-3 +find_library(LIB_READLINE NAMES readline REQUIRED) +# jemalloc +find_library(LIB_JEMALLOC NAMES jemalloc REQUIRED) +# openmp: OpenMP::OpenMP_CXX # the elegance below requires cmake 3.9 or higher +find_package(OpenMP REQUIRED) +# uuid: uuid::uuid (required by antlr4-cppruntime) +find_library(LIB_UUID NAMES uuid REQUIRED) +# Backward: Backward::Backward +find_package(Backward REQUIRED) +# libdwarf: libdwarf::libdwarf +find_package(libdwarf REQUIRED) +# libelf libelf::libelf +find_package(libelf REQUIRED) + +### Prepare unmanaged dependencies +add_custom_target( + prepare + COMMAND [ -d ${CMAKE_SOURCE_DIR}/include ] && [ -d ${CMAKE_SOURCE_DIR}/lib ] || bash ${CMAKE_SOURCE_DIR}/3rdparty/init.sh + COMMENT "Prepare unmanaged dependencies" +) + +add_custom_target( + clean_prepare + COMMAND rm -rvf ${CMAKE_SOURCE_DIR}/include ${CMAKE_SOURCE_DIR}/lib/*.a + COMMAND find ${CMAKE_SOURCE_DIR}/3rdparty -type d -mindepth 1 -maxdepth 1 -exec rm -rvf {} + + COMMENT "Clean unmanaged dependencies" +) + +add_custom_target( + uninstall + # remove all files in bin/, lib/, and scripts/ that are not *.sh or *.py + COMMAND rm -rvf ${GSTORE_EXE_DIR} + COMMAND rm -rvf ${GSTORE_LIB_DIR}/libgcsr.so ${GSTORE_LIB_DIR}/libgpathqueryhandler.so + COMMAND find ${GSTORE_TEST_DIR} -type f -not -name '*.*' -exec rm -v {} + + COMMENT "Clean installed files" +) + +add_custom_target( + tarball + COMMAND cd ../ && ${CMAKE_COMMAND} -E tar "cfzv" "gstore-src.tar.gz" -- + "src" "api" "defaults" "tests" "3rdparty" "data" "scripts" "conanfile.py" "CMakeLists.txt" "README.md" "LICENSE" + COMMENT "Creating source tarball" + VERBATIM +) + +string(TOLOWER "${CMAKE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR}" GSTORE_PLATFORM_NAME) +add_custom_target( + package + COMMAND cd ../ && ${CMAKE_COMMAND} -E tar "cfzv" "gstore-${GSTORE_PLATFORM_NAME}.tar.gz" -- + "bin" "lib" "defaults" "data/system" "scripts" "README.md" "LICENSE" + COMMENT "Creating binary tarball" + VERBATIM +) + +# Add large file support on 32-bit builds +if(CMAKE_SIZEOF_VOID_P EQUAL 4) + add_definitions(-D_FILE_OFFSET_BITS=64) + add_definitions(-D_LARGEFILE_SOURCE) + add_definitions(-D_LARGEFILE64_SOURCE) +endif() + +add_subdirectory(src) +add_subdirectory(tests) + +# rules for ctest +include(CTest) +enable_testing() +add_test(NAME BasicTest COMMAND bash ${CMAKE_SOURCE_DIR}/tests/basic_test.sh) +add_test(NAME ParserTest COMMAND bash ${CMAKE_SOURCE_DIR}/tests/parser_test.sh) +add_test(NAME DatasetTest COMMAND dataset_test) +add_test(NAME UpdateTest COMMAND update_test 100) +add_test(NAME TransactionTest COMMAND transaction_test) +add_test(NAME RunTransaction COMMAND run_transaction) +add_test(NAME DebugTest COMMAND debug_test) + +set(GSTORE_CI_TESTS + BasicTest + ParserTest + DatasetTest + UpdateTest + TransactionTest + RunTransaction + DebugTest +) + +foreach (test ${GSTORE_CI_TESTS}) + set_tests_properties(${test} PROPERTIES TIMEOUT 300 WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}) +endforeach () diff --git a/COVERAGE/readme b/COVERAGE/readme deleted file mode 100644 index e69de29b..00000000 diff --git a/Dockerfile b/Dockerfile index 4a0633fc..f402bd5a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,20 +1,71 @@ -FROM lsvih/gcc-cmake-boost:v1 -RUN apt -qqy update \ - && apt install -qqy --no-install-recommends \ - libssl-dev libcurl4-openssl-dev libreadline-dev uuid-dev \ - && ldconfig -v \ - && echo "* - nofile 65535" >> /etc/security/limits.conf \ - && echo "* - noproc 65535" >> /etc/security/limits.conf +# SPDX-License-Identifier: BSD-3-Clause + +FROM ubuntu:22.04 AS builder + +LABEL vendor="pkumod" +LABEL description="gStore RDF Database Engine" + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + cmake \ + ninja-build \ + mold \ + python3-pip \ + pkg-config \ + uuid-dev \ + libjemalloc-dev \ + libreadline-dev \ + libssl-dev + +RUN mkdir -p /src -COPY . /usr/src/gstore WORKDIR /usr/src/gstore -ENV LANG C.UTF-8 +# Install conan dependencies\ +RUN pip3 install conan && conan profile detect + +COPY conanfile.py /usr/src/gstore/ + +RUN conan install . --output-folder=build --build=missing -s "build_type=Release" -s "compiler.libcxx=libstdc++11" -s "compiler.cppstd=17" + +# Copy gStore source code; run `make tarball` to generate this file +ADD gstore-src.tar.gz /usr/src/gstore + +RUN mkdir -p build && cd build && \ + cmake .. -G Ninja \ + -DCMAKE_TOOLCHAIN_FILE=conan_toolchain.cmake \ + -DCMAKE_CXX_FLAGS="-fuse-ld=mold" \ + -DCMAKE_BUILD_TYPE=Release + +RUN mkdir -p build && cd build && \ + ninja -v prepare && \ + ninja -v install + +FROM ubuntu:22.04 AS runtime + +RUN apt-get update && apt-get install -y \ + libgomp1 \ + libssl3 \ + libjemalloc2 \ + libreadline8 \ + libuuid1 \ + && rm -rf /var/lib/apt/lists/* + +COPY --from=builder /usr/src/gstore/bin/ /bin/ +COPY --from=builder /usr/src/gstore/lib/ /lib/ +COPY --from=builder /usr/src/gstore/defaults/ /defaults/ +COPY --from=builder /usr/src/gstore/data/ /data/ +COPY --from=builder /usr/src/gstore/scripts/init.sh /docker-entrypoint.sh + +WORKDIR /app/ +VOLUME [ "/app/" ] + +RUN echo "* - nofile 65535" >> /etc/security/limits.conf \ + && echo "* - noproc 65535" >> /etc/security/limits.conf + +EXPOSE 9000 + +ENTRYPOINT [ "bash", "/docker-entrypoint.sh" ] -RUN make pre -j && make -j FIRST_BUILD=y \ - && apt autoclean && apt clean \ - && rm -rf /usr/src/gstore/.git \ - && rm -rf /usr/src/gstore/tools \ - && rm -rf /tmp/* /var/tmp/* \ - && rm -rf /usr/share/doc/* \ - && rm -rf /var/lib/apt/lists/* +CMD [ "/bin/ghttp" ] \ No newline at end of file diff --git a/Parser/SPARQL/SPARQLLexer.tokens b/Parser/SPARQL/SPARQLLexer.tokens deleted file mode 100644 index 05184f17..00000000 --- a/Parser/SPARQL/SPARQLLexer.tokens +++ /dev/null @@ -1,236 +0,0 @@ -T__0=1 -T__1=2 -T__2=3 -T__3=4 -T__4=5 -T__5=6 -T__6=7 -T__7=8 -T__8=9 -T__9=10 -T__10=11 -T__11=12 -T__12=13 -T__13=14 -T__14=15 -T__15=16 -T__16=17 -T__17=18 -T__18=19 -T__19=20 -T__20=21 -T__21=22 -T__22=23 -T__23=24 -T__24=25 -T__25=26 -T__26=27 -T__27=28 -T__28=29 -K_NOW=30 -K_YEAR=31 -K_UNION=32 -K_IF=33 -K_ASK=34 -K_ASC=35 -K_CONCAT=36 -K_IN=37 -K_UNDEF=38 -K_INSERT=39 -K_MONTH=40 -K_DEFAULT=41 -K_SELECT=42 -K_FLOOR=43 -K_TZ=44 -K_COPY=45 -K_CEIL=46 -K_HOURS=47 -K_DATATYPE=48 -K_ISNUMERIC=49 -K_STRUUID=50 -K_CONSTRUCT=51 -K_ADD=52 -K_BOUND=53 -K_NAMED=54 -K_TIMEZONE=55 -K_MIN=56 -K_ISBLANK=57 -K_UUID=58 -K_BIND=59 -K_CLEAR=60 -K_INTO=61 -K_AS=62 -K_ALL=63 -K_IRI=64 -K_BASE=65 -K_BY=66 -K_DROP=67 -K_LOAD=68 -K_WITH=69 -K_BNODE=70 -K_WHERE=71 -K_AVG=72 -K_SAMPLE=73 -K_UCASE=74 -K_SERVICE=75 -K_MINUS=76 -K_SAMETERM=77 -K_STRSTARTS=78 -K_STR=79 -K_MOVE=80 -K_HAVING=81 -K_COALESCE=82 -K_STRBEFORE=83 -K_ABS=84 -K_ISLITERAL=85 -K_STRAFTER=86 -K_STRLEN=87 -K_LANG=88 -K_CREATE=89 -K_DESC=90 -K_MAX=91 -K_FILTER=92 -K_USING=93 -K_NOT=94 -K_STRENDS=95 -K_OFFSET=96 -K_CONTAINS=97 -K_PREFIX=98 -K_MINUTES=99 -K_REPLACE=100 -K_REGEX=101 -K_DELETE=102 -K_SEPARATOR=103 -K_DAY=104 -K_SILENT=105 -K_STRLANG=106 -K_ORDER=107 -K_ROUND=108 -K_GRAPH=109 -K_SECONDS=110 -K_URI=111 -K_DISTINCT=112 -K_EXISTS=113 -K_GROUP=114 -K_SUM=115 -K_REDUCED=116 -K_FROM=117 -K_LANGMATCHES=118 -K_ISURI=119 -K_TO=120 -K_ISIRI=121 -K_RAND=122 -K_STRDT=123 -K_COUNT=124 -K_DESCRIBE=125 -K_VALUES=126 -K_LCASE=127 -K_OPTIONAL=128 -K_LIMIT=129 -K_SUBSTR=130 -K_SIMPLECYCLEPATH=131 -K_SIMPLECYCLEBOOLEAN=132 -K_CYCLEPATH=133 -K_CYCLEBOOLEAN=134 -K_SHORTESTPATH=135 -K_SHORTESTPATHLEN=136 -K_KHOPREACHABLE=137 -K_KHOPENUMERATE=138 -K_KHOPREACHABLEPATH=139 -K_PPR=140 -K_TRIANGLECOUNTING=141 -K_CLOSENESSCENTRALITY=142 -K_BFSCOUNT=143 -K_PR=144 -K_ALPHA=145 -K_MAXITER=146 -K_TOL=147 -K_SSSP=148 -K_SSSPLEN=149 -K_LABELPROP=150 -K_WCC=151 -K_CLUSTERCOEFF=152 -K_MAXIMUMKPLEX=153 -K_CORETRUSS=154 -K_PFN=155 -K_KHOPCOUNT=156 -K_KHOPNEIGHBOR=157 -K_SHORTESTPATHCOUNT=158 -K_LOUVAIN=159 -K_IC14=160 -K_INCREASE=161 -KK_INSERTDATA=162 -KK_DELETEDATA=163 -KK_DELETEWHERE=164 -KK_ENCODE_FOR_URI=165 -KK_MD5=166 -KK_SHA1=167 -KK_SHA256=168 -KK_SHA384=169 -KK_SHA512=170 -KK_GROUP_CONCAT=171 -IRIREF=172 -PNAME_NS=173 -PNAME_LN=174 -BLANK_NODE_LABEL=175 -VAR1=176 -VAR2=177 -LANGTAG=178 -INTEGER=179 -DECIMAL=180 -DOUBLE=181 -INTEGER_POSITIVE=182 -DECIMAL_POSITIVE=183 -DOUBLE_POSITIVE=184 -INTEGER_NEGATIVE=185 -DECIMAL_NEGATIVE=186 -DOUBLE_NEGATIVE=187 -EXPONENT=188 -STRING_LITERAL1=189 -STRING_LITERAL2=190 -STRING_LITERAL_LONG1=191 -STRING_LITERAL_LONG2=192 -ECHAR=193 -NIL=194 -WS=195 -ANON=196 -PN_CHARS_BASE=197 -PN_CHARS_U=198 -VARNAME=199 -PN_CHARS=200 -PN_PREFIX=201 -PN_LOCAL=202 -PLX=203 -PERCENT=204 -HEX=205 -PN_LOCAL_ESC=206 -COMMENT=207 -'*'=1 -'('=2 -')'=3 -'{'=4 -'}'=5 -';'=6 -'.'=7 -','=8 -'a'=9 -'|'=10 -'/'=11 -'^'=12 -'?'=13 -'+'=14 -'!'=15 -'['=16 -']'=17 -'||'=18 -'&&'=19 -'='=20 -'!='=21 -'<'=22 -'>'=23 -'<='=24 -'>='=25 -'-'=26 -'^^'=27 -'true'=28 -'false'=29 diff --git a/Parser/SPARQL/SPARQLVisitor.cpp b/Parser/SPARQL/SPARQLVisitor.cpp deleted file mode 100644 index 0456f60b..00000000 --- a/Parser/SPARQL/SPARQLVisitor.cpp +++ /dev/null @@ -1,7 +0,0 @@ - -// Generated from ./SPARQL.g4 by ANTLR 4.7.2 - - -#include "SPARQLVisitor.h" - - diff --git a/Server/web/logo.png b/Server/web/logo.png deleted file mode 100644 index 1689bd4d..00000000 Binary files a/Server/web/logo.png and /dev/null differ diff --git a/api/http/cpp/example/Benchmark b/api/http/cpp/example/Benchmark deleted file mode 100755 index 3b8a2376..00000000 Binary files a/api/http/cpp/example/Benchmark and /dev/null differ diff --git a/api/http/cpp/example/GET-example b/api/http/cpp/example/GET-example deleted file mode 100755 index f88052cf..00000000 Binary files a/api/http/cpp/example/GET-example and /dev/null differ diff --git a/api/http/cpp/example/POST-example b/api/http/cpp/example/POST-example deleted file mode 100755 index 259dfb9d..00000000 Binary files a/api/http/cpp/example/POST-example and /dev/null differ diff --git a/api/http/cpp/example/Transaction-example b/api/http/cpp/example/Transaction-example deleted file mode 100755 index ade8c25e..00000000 Binary files a/api/http/cpp/example/Transaction-example and /dev/null differ diff --git a/api/http/cpp/src/.gitignore b/api/http/cpp/src/.gitignore new file mode 100644 index 00000000..d1638636 --- /dev/null +++ b/api/http/cpp/src/.gitignore @@ -0,0 +1 @@ +build/ \ No newline at end of file diff --git a/api/http/cpp/src/CMakeLists.txt b/api/http/cpp/src/CMakeLists.txt new file mode 100644 index 00000000..011a49d9 --- /dev/null +++ b/api/http/cpp/src/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required(VERSION 3.4) + +project(gStoreConnector) +# try to find curl by conan +if (NOT CURL_FOUND) + find_package(CURL QUIET) +endif () +# if can't find curl by conan, try to find libcurl by pkg-config +if (NOT CURL_FOUND) + find_package(PkgConfig REQUIRED) + pkg_search_module(CURL REQUIRED) +endif() + +set(CONNECTOR_SOURCES GstoreConnector.cpp) + +add_library(gstore_connector OBJECT GstoreConnector.cpp) + +add_library(gstore_connector_static STATIC $) +target_link_libraries(gstore_connector_static ${CURL_LIBRARIES}) \ No newline at end of file diff --git a/backup.json b/backup.json deleted file mode 100644 index a86f7145..00000000 --- a/backup.json +++ /dev/null @@ -1 +0,0 @@ -{"db_name": "system","backup_timer": "600"} diff --git a/backups/.gitkeep b/backups/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/backups/logs/logs b/backups/logs/logs deleted file mode 100644 index 8b137891..00000000 --- a/backups/logs/logs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/bin/.gitignore b/bin/.gitignore deleted file mode 100644 index a5baada1..00000000 --- a/bin/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!.gitignore - diff --git a/conanfile.py b/conanfile.py new file mode 100644 index 00000000..5759e195 --- /dev/null +++ b/conanfile.py @@ -0,0 +1,56 @@ +from conan import ConanFile + +class GStore(ConanFile): + ### Project Attributes ### + # references: https://docs.conan.io/2/reference/conanfile/attributes.html + name = "gstore" + version = "1.2" + license = "BSD-3-Clause" + homepage = "https://gstore.cn" + url = "https://github.com/pkumod/gStore" + + ### Dependencies ### + # available packages: https://conconan.io/center/recipes + # usage reference: https://github.com/conan-io/conan-center-index/blob/master/recipes/{PACKAGE_NAME}/all + requires = ( + "log4cplus/2.1.0", + "zlib/1.3", + "indicators/2.3", + "rapidjson/1.1.0", + "libcurl/8.5.0", + "openssl/3.2.0", + "minizip/1.2.13", + "backward-cpp/1.6", + ) + build_policy = "missing" + settings = "os", "compiler", "build_type", "arch" + generators = "CMakeDeps", "CMakeToolchain" + + def configure(self): + # set the project to use C++17 + self.settings.compiler.cppstd = "17" + # set log4cplus to use char instead of wchar_t + self.options["log4cplus"].unicode = False + # remove boost from requirements if on loongarch64 + + def requirements(self): + if self.settings.get_safe("arch") != "loongarch64": + self.requires("boost/1.83.0") + else: + self.output.warning("The official Boost wheel (1.83.0) is yet to be fixed on loongarch. " + "Please install boost from your system package manager.") + + def package_info(self): + self.output.info("PackageInfo!: Cppstd version: %s!" % self.settings.compiler.cppstd) + + def layout(self): + # basically a trimmed down version of the basic_layout function + _build_type = self.settings.get_safe("build_type") + if _build_type: + self.folders.build = "cmake-build-{}".format(str(_build_type).lower()) + else: + self.folders.build = "cmake-build" + self.folders.source = "." + self.folders.generators = self.folders.build + self.cpp.build.bindirs = ["bin", "bin_tests"] + self.cpp.build.libdirs = ["lib"] \ No newline at end of file diff --git a/data/.gitignore b/data/.gitignore new file mode 100644 index 00000000..74e5b1d3 --- /dev/null +++ b/data/.gitignore @@ -0,0 +1 @@ +!* \ No newline at end of file diff --git a/scripts/bfs_test/bbug/bbug1.sql b/data/bfs_test/bbug/bbug1.sql similarity index 100% rename from scripts/bfs_test/bbug/bbug1.sql rename to data/bfs_test/bbug/bbug1.sql diff --git a/scripts/bfs_test/bbug/bbug1r.txt b/data/bfs_test/bbug/bbug1r.txt similarity index 100% rename from scripts/bfs_test/bbug/bbug1r.txt rename to data/bfs_test/bbug/bbug1r.txt diff --git a/scripts/bfs_test/bbug/bbug3.sql b/data/bfs_test/bbug/bbug3.sql similarity index 100% rename from scripts/bfs_test/bbug/bbug3.sql rename to data/bfs_test/bbug/bbug3.sql diff --git a/scripts/bfs_test/bbug/bbug3r.txt b/data/bfs_test/bbug/bbug3r.txt similarity index 100% rename from scripts/bfs_test/bbug/bbug3r.txt rename to data/bfs_test/bbug/bbug3r.txt diff --git a/scripts/bfs_test/bbug/bbug4.sql b/data/bfs_test/bbug/bbug4.sql similarity index 100% rename from scripts/bfs_test/bbug/bbug4.sql rename to data/bfs_test/bbug/bbug4.sql diff --git a/scripts/bfs_test/bbug/bbug4r.txt b/data/bfs_test/bbug/bbug4r.txt similarity index 100% rename from scripts/bfs_test/bbug/bbug4r.txt rename to data/bfs_test/bbug/bbug4r.txt diff --git a/scripts/bfs_test/bbug/bbug5.sql b/data/bfs_test/bbug/bbug5.sql similarity index 100% rename from scripts/bfs_test/bbug/bbug5.sql rename to data/bfs_test/bbug/bbug5.sql diff --git a/scripts/bfs_test/bbug/bbug5r.txt b/data/bfs_test/bbug/bbug5r.txt similarity index 100% rename from scripts/bfs_test/bbug/bbug5r.txt rename to data/bfs_test/bbug/bbug5r.txt diff --git a/scripts/bfs_test/lubm/lubm_p0.sql b/data/bfs_test/lubm/lubm_p0.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_p0.sql rename to data/bfs_test/lubm/lubm_p0.sql diff --git a/scripts/bfs_test/lubm/lubm_p0r.txt b/data/bfs_test/lubm/lubm_p0r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_p0r.txt rename to data/bfs_test/lubm/lubm_p0r.txt diff --git a/scripts/bfs_test/lubm/lubm_p2.sql b/data/bfs_test/lubm/lubm_p2.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_p2.sql rename to data/bfs_test/lubm/lubm_p2.sql diff --git a/scripts/bfs_test/lubm/lubm_p2r.txt b/data/bfs_test/lubm/lubm_p2r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_p2r.txt rename to data/bfs_test/lubm/lubm_p2r.txt diff --git a/scripts/bfs_test/lubm/lubm_p3.sql b/data/bfs_test/lubm/lubm_p3.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_p3.sql rename to data/bfs_test/lubm/lubm_p3.sql diff --git a/scripts/bfs_test/lubm/lubm_p3r.txt b/data/bfs_test/lubm/lubm_p3r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_p3r.txt rename to data/bfs_test/lubm/lubm_p3r.txt diff --git a/scripts/bfs_test/lubm/lubm_p4.sql b/data/bfs_test/lubm/lubm_p4.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_p4.sql rename to data/bfs_test/lubm/lubm_p4.sql diff --git a/scripts/bfs_test/lubm/lubm_p4r.txt b/data/bfs_test/lubm/lubm_p4r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_p4r.txt rename to data/bfs_test/lubm/lubm_p4r.txt diff --git a/scripts/bfs_test/lubm/lubm_q0.sql b/data/bfs_test/lubm/lubm_q0.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_q0.sql rename to data/bfs_test/lubm/lubm_q0.sql diff --git a/scripts/bfs_test/lubm/lubm_q0r.txt b/data/bfs_test/lubm/lubm_q0r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_q0r.txt rename to data/bfs_test/lubm/lubm_q0r.txt diff --git a/scripts/bfs_test/lubm/lubm_q1.sql b/data/bfs_test/lubm/lubm_q1.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_q1.sql rename to data/bfs_test/lubm/lubm_q1.sql diff --git a/scripts/bfs_test/lubm/lubm_q1r.txt b/data/bfs_test/lubm/lubm_q1r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_q1r.txt rename to data/bfs_test/lubm/lubm_q1r.txt diff --git a/scripts/bfs_test/lubm/lubm_q2.sql b/data/bfs_test/lubm/lubm_q2.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_q2.sql rename to data/bfs_test/lubm/lubm_q2.sql diff --git a/scripts/bfs_test/lubm/lubm_q2r.txt b/data/bfs_test/lubm/lubm_q2r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_q2r.txt rename to data/bfs_test/lubm/lubm_q2r.txt diff --git a/scripts/bfs_test/lubm/lubm_q3.sql b/data/bfs_test/lubm/lubm_q3.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_q3.sql rename to data/bfs_test/lubm/lubm_q3.sql diff --git a/scripts/bfs_test/lubm/lubm_q3r.txt b/data/bfs_test/lubm/lubm_q3r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_q3r.txt rename to data/bfs_test/lubm/lubm_q3r.txt diff --git a/scripts/bfs_test/lubm/lubm_q4.sql b/data/bfs_test/lubm/lubm_q4.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_q4.sql rename to data/bfs_test/lubm/lubm_q4.sql diff --git a/scripts/bfs_test/lubm/lubm_q4r.txt b/data/bfs_test/lubm/lubm_q4r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_q4r.txt rename to data/bfs_test/lubm/lubm_q4r.txt diff --git a/scripts/bfs_test/lubm/lubm_q5.sql b/data/bfs_test/lubm/lubm_q5.sql similarity index 100% rename from scripts/bfs_test/lubm/lubm_q5.sql rename to data/bfs_test/lubm/lubm_q5.sql diff --git a/scripts/bfs_test/lubm/lubm_q5r.txt b/data/bfs_test/lubm/lubm_q5r.txt similarity index 100% rename from scripts/bfs_test/lubm/lubm_q5r.txt rename to data/bfs_test/lubm/lubm_q5r.txt diff --git a/scripts/bfs_test/num/num3.sql b/data/bfs_test/num/num3.sql similarity index 100% rename from scripts/bfs_test/num/num3.sql rename to data/bfs_test/num/num3.sql diff --git a/scripts/bfs_test/num/num3r.txt b/data/bfs_test/num/num3r.txt similarity index 100% rename from scripts/bfs_test/num/num3r.txt rename to data/bfs_test/num/num3r.txt diff --git a/scripts/bfs_test/small/small_p0.sql b/data/bfs_test/small/small_p0.sql similarity index 100% rename from scripts/bfs_test/small/small_p0.sql rename to data/bfs_test/small/small_p0.sql diff --git a/scripts/bfs_test/small/small_p0r.txt b/data/bfs_test/small/small_p0r.txt similarity index 100% rename from scripts/bfs_test/small/small_p0r.txt rename to data/bfs_test/small/small_p0r.txt diff --git a/scripts/bfs_test/small/small_p1.sql b/data/bfs_test/small/small_p1.sql similarity index 100% rename from scripts/bfs_test/small/small_p1.sql rename to data/bfs_test/small/small_p1.sql diff --git a/scripts/bfs_test/small/small_p1r.txt b/data/bfs_test/small/small_p1r.txt similarity index 100% rename from scripts/bfs_test/small/small_p1r.txt rename to data/bfs_test/small/small_p1r.txt diff --git a/scripts/bfs_test/small/small_p2.sql b/data/bfs_test/small/small_p2.sql similarity index 100% rename from scripts/bfs_test/small/small_p2.sql rename to data/bfs_test/small/small_p2.sql diff --git a/scripts/bfs_test/small/small_p2r.txt b/data/bfs_test/small/small_p2r.txt similarity index 100% rename from scripts/bfs_test/small/small_p2r.txt rename to data/bfs_test/small/small_p2r.txt diff --git a/scripts/bfs_test/small/small_p3.sql b/data/bfs_test/small/small_p3.sql similarity index 100% rename from scripts/bfs_test/small/small_p3.sql rename to data/bfs_test/small/small_p3.sql diff --git a/scripts/bfs_test/small/small_p3r.txt b/data/bfs_test/small/small_p3r.txt similarity index 100% rename from scripts/bfs_test/small/small_p3r.txt rename to data/bfs_test/small/small_p3r.txt diff --git a/scripts/bfs_test/small/small_q0.sql b/data/bfs_test/small/small_q0.sql similarity index 100% rename from scripts/bfs_test/small/small_q0.sql rename to data/bfs_test/small/small_q0.sql diff --git a/scripts/bfs_test/small/small_q0r.txt b/data/bfs_test/small/small_q0r.txt similarity index 100% rename from scripts/bfs_test/small/small_q0r.txt rename to data/bfs_test/small/small_q0r.txt diff --git a/scripts/bfs_test/small/small_q1.sql b/data/bfs_test/small/small_q1.sql similarity index 100% rename from scripts/bfs_test/small/small_q1.sql rename to data/bfs_test/small/small_q1.sql diff --git a/scripts/bfs_test/small/small_q1r.txt b/data/bfs_test/small/small_q1r.txt similarity index 100% rename from scripts/bfs_test/small/small_q1r.txt rename to data/bfs_test/small/small_q1r.txt diff --git a/scripts/bfs_test/small/small_q2.sql b/data/bfs_test/small/small_q2.sql similarity index 100% rename from scripts/bfs_test/small/small_q2.sql rename to data/bfs_test/small/small_q2.sql diff --git a/scripts/bfs_test/small/small_q2r.txt b/data/bfs_test/small/small_q2r.txt similarity index 100% rename from scripts/bfs_test/small/small_q2r.txt rename to data/bfs_test/small/small_q2r.txt diff --git a/scripts/bfs_test/small/small_q3.sql b/data/bfs_test/small/small_q3.sql similarity index 100% rename from scripts/bfs_test/small/small_q3.sql rename to data/bfs_test/small/small_q3.sql diff --git a/scripts/bfs_test/small/small_q3r.txt b/data/bfs_test/small/small_q3r.txt similarity index 100% rename from scripts/bfs_test/small/small_q3r.txt rename to data/bfs_test/small/small_q3r.txt diff --git a/scripts/bfs_test/small/small_s0.sql b/data/bfs_test/small/small_s0.sql similarity index 100% rename from scripts/bfs_test/small/small_s0.sql rename to data/bfs_test/small/small_s0.sql diff --git a/scripts/bfs_test/small/small_s0r.txt b/data/bfs_test/small/small_s0r.txt similarity index 100% rename from scripts/bfs_test/small/small_s0r.txt rename to data/bfs_test/small/small_s0r.txt diff --git a/scripts/bfs_test/small/small_s1.sql b/data/bfs_test/small/small_s1.sql similarity index 100% rename from scripts/bfs_test/small/small_s1.sql rename to data/bfs_test/small/small_s1.sql diff --git a/scripts/bfs_test/small/small_s1r.txt b/data/bfs_test/small/small_s1r.txt similarity index 100% rename from scripts/bfs_test/small/small_s1r.txt rename to data/bfs_test/small/small_s1r.txt diff --git a/scripts/dfs_test/bbug/bbug1.sql b/data/dfs_test/bbug/bbug1.sql similarity index 100% rename from scripts/dfs_test/bbug/bbug1.sql rename to data/dfs_test/bbug/bbug1.sql diff --git a/scripts/dfs_test/bbug/bbug3.sql b/data/dfs_test/bbug/bbug3.sql similarity index 100% rename from scripts/dfs_test/bbug/bbug3.sql rename to data/dfs_test/bbug/bbug3.sql diff --git a/scripts/dfs_test/bbug/bbug4.sql b/data/dfs_test/bbug/bbug4.sql similarity index 100% rename from scripts/dfs_test/bbug/bbug4.sql rename to data/dfs_test/bbug/bbug4.sql diff --git a/scripts/dfs_test/bbug/bbug5.sql b/data/dfs_test/bbug/bbug5.sql similarity index 100% rename from scripts/dfs_test/bbug/bbug5.sql rename to data/dfs_test/bbug/bbug5.sql diff --git a/scripts/dfs_test/lubm/lubm_p0.sql b/data/dfs_test/lubm/lubm_p0.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_p0.sql rename to data/dfs_test/lubm/lubm_p0.sql diff --git a/scripts/dfs_test/lubm/lubm_p2.sql b/data/dfs_test/lubm/lubm_p2.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_p2.sql rename to data/dfs_test/lubm/lubm_p2.sql diff --git a/scripts/dfs_test/lubm/lubm_p3.sql b/data/dfs_test/lubm/lubm_p3.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_p3.sql rename to data/dfs_test/lubm/lubm_p3.sql diff --git a/scripts/dfs_test/lubm/lubm_p4.sql b/data/dfs_test/lubm/lubm_p4.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_p4.sql rename to data/dfs_test/lubm/lubm_p4.sql diff --git a/scripts/dfs_test/lubm/lubm_q0.sql b/data/dfs_test/lubm/lubm_q0.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_q0.sql rename to data/dfs_test/lubm/lubm_q0.sql diff --git a/scripts/dfs_test/lubm/lubm_q1.sql b/data/dfs_test/lubm/lubm_q1.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_q1.sql rename to data/dfs_test/lubm/lubm_q1.sql diff --git a/scripts/dfs_test/lubm/lubm_q2.sql b/data/dfs_test/lubm/lubm_q2.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_q2.sql rename to data/dfs_test/lubm/lubm_q2.sql diff --git a/scripts/dfs_test/lubm/lubm_q3.sql b/data/dfs_test/lubm/lubm_q3.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_q3.sql rename to data/dfs_test/lubm/lubm_q3.sql diff --git a/scripts/dfs_test/lubm/lubm_q4.sql b/data/dfs_test/lubm/lubm_q4.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_q4.sql rename to data/dfs_test/lubm/lubm_q4.sql diff --git a/scripts/dfs_test/lubm/lubm_q5.sql b/data/dfs_test/lubm/lubm_q5.sql similarity index 100% rename from scripts/dfs_test/lubm/lubm_q5.sql rename to data/dfs_test/lubm/lubm_q5.sql diff --git a/scripts/dfs_test/num/num3.sql b/data/dfs_test/num/num3.sql similarity index 100% rename from scripts/dfs_test/num/num3.sql rename to data/dfs_test/num/num3.sql diff --git a/scripts/dfs_test/small/small_p0.sql b/data/dfs_test/small/small_p0.sql similarity index 100% rename from scripts/dfs_test/small/small_p0.sql rename to data/dfs_test/small/small_p0.sql diff --git a/scripts/dfs_test/small/small_p1.sql b/data/dfs_test/small/small_p1.sql similarity index 100% rename from scripts/dfs_test/small/small_p1.sql rename to data/dfs_test/small/small_p1.sql diff --git a/scripts/dfs_test/small/small_p2.sql b/data/dfs_test/small/small_p2.sql similarity index 100% rename from scripts/dfs_test/small/small_p2.sql rename to data/dfs_test/small/small_p2.sql diff --git a/scripts/dfs_test/small/small_p3.sql b/data/dfs_test/small/small_p3.sql similarity index 100% rename from scripts/dfs_test/small/small_p3.sql rename to data/dfs_test/small/small_p3.sql diff --git a/scripts/dfs_test/small/small_q0.sql b/data/dfs_test/small/small_q0.sql similarity index 100% rename from scripts/dfs_test/small/small_q0.sql rename to data/dfs_test/small/small_q0.sql diff --git a/scripts/dfs_test/small/small_q1.sql b/data/dfs_test/small/small_q1.sql similarity index 100% rename from scripts/dfs_test/small/small_q1.sql rename to data/dfs_test/small/small_q1.sql diff --git a/scripts/dfs_test/small/small_q2.sql b/data/dfs_test/small/small_q2.sql similarity index 100% rename from scripts/dfs_test/small/small_q2.sql rename to data/dfs_test/small/small_q2.sql diff --git a/scripts/dfs_test/small/small_q3.sql b/data/dfs_test/small/small_q3.sql similarity index 100% rename from scripts/dfs_test/small/small_q3.sql rename to data/dfs_test/small/small_q3.sql diff --git a/scripts/dfs_test/small/small_s0.sql b/data/dfs_test/small/small_s0.sql similarity index 100% rename from scripts/dfs_test/small/small_s0.sql rename to data/dfs_test/small/small_s0.sql diff --git a/scripts/dfs_test/small/small_s1.sql b/data/dfs_test/small/small_s1.sql similarity index 100% rename from scripts/dfs_test/small/small_s1.sql rename to data/dfs_test/small/small_s1.sql diff --git a/scripts/parser_test/parser_d1.ttl b/data/parser_test/parser_d1.ttl similarity index 100% rename from scripts/parser_test/parser_d1.ttl rename to data/parser_test/parser_d1.ttl diff --git a/scripts/parser_test/parser_d10.ttl b/data/parser_test/parser_d10.ttl similarity index 100% rename from scripts/parser_test/parser_d10.ttl rename to data/parser_test/parser_d10.ttl diff --git a/scripts/parser_test/parser_d11.ttl b/data/parser_test/parser_d11.ttl similarity index 100% rename from scripts/parser_test/parser_d11.ttl rename to data/parser_test/parser_d11.ttl diff --git a/scripts/parser_test/parser_d12.ttl b/data/parser_test/parser_d12.ttl similarity index 100% rename from scripts/parser_test/parser_d12.ttl rename to data/parser_test/parser_d12.ttl diff --git a/scripts/parser_test/parser_d13.ttl b/data/parser_test/parser_d13.ttl similarity index 100% rename from scripts/parser_test/parser_d13.ttl rename to data/parser_test/parser_d13.ttl diff --git a/scripts/parser_test/parser_d14.ttl b/data/parser_test/parser_d14.ttl similarity index 100% rename from scripts/parser_test/parser_d14.ttl rename to data/parser_test/parser_d14.ttl diff --git a/scripts/parser_test/parser_d15.ttl b/data/parser_test/parser_d15.ttl similarity index 100% rename from scripts/parser_test/parser_d15.ttl rename to data/parser_test/parser_d15.ttl diff --git a/scripts/parser_test/parser_d16.ttl b/data/parser_test/parser_d16.ttl similarity index 100% rename from scripts/parser_test/parser_d16.ttl rename to data/parser_test/parser_d16.ttl diff --git a/scripts/parser_test/parser_d17.ttl b/data/parser_test/parser_d17.ttl similarity index 100% rename from scripts/parser_test/parser_d17.ttl rename to data/parser_test/parser_d17.ttl diff --git a/scripts/parser_test/parser_d18.ttl b/data/parser_test/parser_d18.ttl similarity index 100% rename from scripts/parser_test/parser_d18.ttl rename to data/parser_test/parser_d18.ttl diff --git a/scripts/parser_test/parser_d19.ttl b/data/parser_test/parser_d19.ttl similarity index 100% rename from scripts/parser_test/parser_d19.ttl rename to data/parser_test/parser_d19.ttl diff --git a/scripts/parser_test/parser_d2.ttl b/data/parser_test/parser_d2.ttl similarity index 100% rename from scripts/parser_test/parser_d2.ttl rename to data/parser_test/parser_d2.ttl diff --git a/scripts/parser_test/parser_d20.ttl b/data/parser_test/parser_d20.ttl similarity index 100% rename from scripts/parser_test/parser_d20.ttl rename to data/parser_test/parser_d20.ttl diff --git a/scripts/parser_test/parser_d21.ttl b/data/parser_test/parser_d21.ttl similarity index 100% rename from scripts/parser_test/parser_d21.ttl rename to data/parser_test/parser_d21.ttl diff --git a/scripts/parser_test/parser_d22.ttl b/data/parser_test/parser_d22.ttl similarity index 100% rename from scripts/parser_test/parser_d22.ttl rename to data/parser_test/parser_d22.ttl diff --git a/scripts/parser_test/parser_d23.ttl b/data/parser_test/parser_d23.ttl similarity index 100% rename from scripts/parser_test/parser_d23.ttl rename to data/parser_test/parser_d23.ttl diff --git a/scripts/parser_test/parser_d24.ttl b/data/parser_test/parser_d24.ttl similarity index 100% rename from scripts/parser_test/parser_d24.ttl rename to data/parser_test/parser_d24.ttl diff --git a/scripts/parser_test/parser_d25.ttl b/data/parser_test/parser_d25.ttl similarity index 100% rename from scripts/parser_test/parser_d25.ttl rename to data/parser_test/parser_d25.ttl diff --git a/scripts/parser_test/parser_d26.ttl b/data/parser_test/parser_d26.ttl similarity index 100% rename from scripts/parser_test/parser_d26.ttl rename to data/parser_test/parser_d26.ttl diff --git a/scripts/parser_test/parser_d27.ttl b/data/parser_test/parser_d27.ttl similarity index 100% rename from scripts/parser_test/parser_d27.ttl rename to data/parser_test/parser_d27.ttl diff --git a/scripts/parser_test/parser_d28.ttl b/data/parser_test/parser_d28.ttl similarity index 100% rename from scripts/parser_test/parser_d28.ttl rename to data/parser_test/parser_d28.ttl diff --git a/scripts/parser_test/parser_d29.ttl b/data/parser_test/parser_d29.ttl similarity index 100% rename from scripts/parser_test/parser_d29.ttl rename to data/parser_test/parser_d29.ttl diff --git a/scripts/parser_test/parser_d3.ttl b/data/parser_test/parser_d3.ttl similarity index 100% rename from scripts/parser_test/parser_d3.ttl rename to data/parser_test/parser_d3.ttl diff --git a/scripts/parser_test/parser_d30.ttl b/data/parser_test/parser_d30.ttl similarity index 100% rename from scripts/parser_test/parser_d30.ttl rename to data/parser_test/parser_d30.ttl diff --git a/scripts/parser_test/parser_d31.ttl b/data/parser_test/parser_d31.ttl similarity index 100% rename from scripts/parser_test/parser_d31.ttl rename to data/parser_test/parser_d31.ttl diff --git a/scripts/parser_test/parser_d32.ttl b/data/parser_test/parser_d32.ttl similarity index 100% rename from scripts/parser_test/parser_d32.ttl rename to data/parser_test/parser_d32.ttl diff --git a/scripts/parser_test/parser_d33.ttl b/data/parser_test/parser_d33.ttl similarity index 100% rename from scripts/parser_test/parser_d33.ttl rename to data/parser_test/parser_d33.ttl diff --git a/scripts/parser_test/parser_d34.ttl b/data/parser_test/parser_d34.ttl similarity index 100% rename from scripts/parser_test/parser_d34.ttl rename to data/parser_test/parser_d34.ttl diff --git a/scripts/parser_test/parser_d35.ttl b/data/parser_test/parser_d35.ttl similarity index 100% rename from scripts/parser_test/parser_d35.ttl rename to data/parser_test/parser_d35.ttl diff --git a/scripts/parser_test/parser_d36.ttl b/data/parser_test/parser_d36.ttl similarity index 100% rename from scripts/parser_test/parser_d36.ttl rename to data/parser_test/parser_d36.ttl diff --git a/scripts/parser_test/parser_d37.ttl b/data/parser_test/parser_d37.ttl similarity index 100% rename from scripts/parser_test/parser_d37.ttl rename to data/parser_test/parser_d37.ttl diff --git a/scripts/parser_test/parser_d38.ttl b/data/parser_test/parser_d38.ttl similarity index 100% rename from scripts/parser_test/parser_d38.ttl rename to data/parser_test/parser_d38.ttl diff --git a/scripts/parser_test/parser_d39.ttl b/data/parser_test/parser_d39.ttl similarity index 100% rename from scripts/parser_test/parser_d39.ttl rename to data/parser_test/parser_d39.ttl diff --git a/scripts/parser_test/parser_d4.ttl b/data/parser_test/parser_d4.ttl similarity index 100% rename from scripts/parser_test/parser_d4.ttl rename to data/parser_test/parser_d4.ttl diff --git a/scripts/parser_test/parser_d40.ttl b/data/parser_test/parser_d40.ttl similarity index 100% rename from scripts/parser_test/parser_d40.ttl rename to data/parser_test/parser_d40.ttl diff --git a/scripts/parser_test/parser_d41.ttl b/data/parser_test/parser_d41.ttl similarity index 100% rename from scripts/parser_test/parser_d41.ttl rename to data/parser_test/parser_d41.ttl diff --git a/scripts/parser_test/parser_d42.ttl b/data/parser_test/parser_d42.ttl similarity index 100% rename from scripts/parser_test/parser_d42.ttl rename to data/parser_test/parser_d42.ttl diff --git a/scripts/parser_test/parser_d43.ttl b/data/parser_test/parser_d43.ttl similarity index 100% rename from scripts/parser_test/parser_d43.ttl rename to data/parser_test/parser_d43.ttl diff --git a/scripts/parser_test/parser_d44.ttl b/data/parser_test/parser_d44.ttl similarity index 100% rename from scripts/parser_test/parser_d44.ttl rename to data/parser_test/parser_d44.ttl diff --git a/scripts/parser_test/parser_d45.ttl b/data/parser_test/parser_d45.ttl similarity index 100% rename from scripts/parser_test/parser_d45.ttl rename to data/parser_test/parser_d45.ttl diff --git a/scripts/parser_test/parser_d46.ttl b/data/parser_test/parser_d46.ttl similarity index 100% rename from scripts/parser_test/parser_d46.ttl rename to data/parser_test/parser_d46.ttl diff --git a/scripts/parser_test/parser_d47.ttl b/data/parser_test/parser_d47.ttl similarity index 100% rename from scripts/parser_test/parser_d47.ttl rename to data/parser_test/parser_d47.ttl diff --git a/scripts/parser_test/parser_d48.ttl b/data/parser_test/parser_d48.ttl similarity index 100% rename from scripts/parser_test/parser_d48.ttl rename to data/parser_test/parser_d48.ttl diff --git a/scripts/parser_test/parser_d49.ttl b/data/parser_test/parser_d49.ttl similarity index 100% rename from scripts/parser_test/parser_d49.ttl rename to data/parser_test/parser_d49.ttl diff --git a/scripts/parser_test/parser_d5.ttl b/data/parser_test/parser_d5.ttl similarity index 100% rename from scripts/parser_test/parser_d5.ttl rename to data/parser_test/parser_d5.ttl diff --git a/scripts/parser_test/parser_d50.ttl b/data/parser_test/parser_d50.ttl similarity index 100% rename from scripts/parser_test/parser_d50.ttl rename to data/parser_test/parser_d50.ttl diff --git a/scripts/parser_test/parser_d6.ttl b/data/parser_test/parser_d6.ttl similarity index 100% rename from scripts/parser_test/parser_d6.ttl rename to data/parser_test/parser_d6.ttl diff --git a/scripts/parser_test/parser_d7.ttl b/data/parser_test/parser_d7.ttl similarity index 100% rename from scripts/parser_test/parser_d7.ttl rename to data/parser_test/parser_d7.ttl diff --git a/scripts/parser_test/parser_d8.ttl b/data/parser_test/parser_d8.ttl similarity index 100% rename from scripts/parser_test/parser_d8.ttl rename to data/parser_test/parser_d8.ttl diff --git a/scripts/parser_test/parser_d9.ttl b/data/parser_test/parser_d9.ttl similarity index 100% rename from scripts/parser_test/parser_d9.ttl rename to data/parser_test/parser_d9.ttl diff --git a/scripts/parser_test/parser_q1.sql b/data/parser_test/parser_q1.sql similarity index 100% rename from scripts/parser_test/parser_q1.sql rename to data/parser_test/parser_q1.sql diff --git a/scripts/parser_test/parser_q10.sql b/data/parser_test/parser_q10.sql similarity index 100% rename from scripts/parser_test/parser_q10.sql rename to data/parser_test/parser_q10.sql diff --git a/scripts/parser_test/parser_q11.sql b/data/parser_test/parser_q11.sql similarity index 100% rename from scripts/parser_test/parser_q11.sql rename to data/parser_test/parser_q11.sql diff --git a/scripts/parser_test/parser_q12.sql b/data/parser_test/parser_q12.sql similarity index 100% rename from scripts/parser_test/parser_q12.sql rename to data/parser_test/parser_q12.sql diff --git a/scripts/parser_test/parser_q13.sql b/data/parser_test/parser_q13.sql similarity index 100% rename from scripts/parser_test/parser_q13.sql rename to data/parser_test/parser_q13.sql diff --git a/scripts/parser_test/parser_q14.sql b/data/parser_test/parser_q14.sql similarity index 100% rename from scripts/parser_test/parser_q14.sql rename to data/parser_test/parser_q14.sql diff --git a/scripts/parser_test/parser_q15.sql b/data/parser_test/parser_q15.sql similarity index 100% rename from scripts/parser_test/parser_q15.sql rename to data/parser_test/parser_q15.sql diff --git a/scripts/parser_test/parser_q16.sql b/data/parser_test/parser_q16.sql similarity index 100% rename from scripts/parser_test/parser_q16.sql rename to data/parser_test/parser_q16.sql diff --git a/scripts/parser_test/parser_q17.sql b/data/parser_test/parser_q17.sql similarity index 100% rename from scripts/parser_test/parser_q17.sql rename to data/parser_test/parser_q17.sql diff --git a/scripts/parser_test/parser_q18.sql b/data/parser_test/parser_q18.sql similarity index 100% rename from scripts/parser_test/parser_q18.sql rename to data/parser_test/parser_q18.sql diff --git a/scripts/parser_test/parser_q19.sql b/data/parser_test/parser_q19.sql similarity index 100% rename from scripts/parser_test/parser_q19.sql rename to data/parser_test/parser_q19.sql diff --git a/scripts/parser_test/parser_q2.sql b/data/parser_test/parser_q2.sql similarity index 100% rename from scripts/parser_test/parser_q2.sql rename to data/parser_test/parser_q2.sql diff --git a/scripts/parser_test/parser_q20.sql b/data/parser_test/parser_q20.sql similarity index 100% rename from scripts/parser_test/parser_q20.sql rename to data/parser_test/parser_q20.sql diff --git a/scripts/parser_test/parser_q21.sql b/data/parser_test/parser_q21.sql similarity index 100% rename from scripts/parser_test/parser_q21.sql rename to data/parser_test/parser_q21.sql diff --git a/scripts/parser_test/parser_q22.sql b/data/parser_test/parser_q22.sql similarity index 100% rename from scripts/parser_test/parser_q22.sql rename to data/parser_test/parser_q22.sql diff --git a/scripts/parser_test/parser_q23.sql b/data/parser_test/parser_q23.sql similarity index 100% rename from scripts/parser_test/parser_q23.sql rename to data/parser_test/parser_q23.sql diff --git a/scripts/parser_test/parser_q24.sql b/data/parser_test/parser_q24.sql similarity index 100% rename from scripts/parser_test/parser_q24.sql rename to data/parser_test/parser_q24.sql diff --git a/scripts/parser_test/parser_q25.sql b/data/parser_test/parser_q25.sql similarity index 100% rename from scripts/parser_test/parser_q25.sql rename to data/parser_test/parser_q25.sql diff --git a/scripts/parser_test/parser_q26.sql b/data/parser_test/parser_q26.sql similarity index 100% rename from scripts/parser_test/parser_q26.sql rename to data/parser_test/parser_q26.sql diff --git a/scripts/parser_test/parser_q27.sql b/data/parser_test/parser_q27.sql similarity index 100% rename from scripts/parser_test/parser_q27.sql rename to data/parser_test/parser_q27.sql diff --git a/scripts/parser_test/parser_q28.sql b/data/parser_test/parser_q28.sql similarity index 100% rename from scripts/parser_test/parser_q28.sql rename to data/parser_test/parser_q28.sql diff --git a/scripts/parser_test/parser_q29.sql b/data/parser_test/parser_q29.sql similarity index 100% rename from scripts/parser_test/parser_q29.sql rename to data/parser_test/parser_q29.sql diff --git a/scripts/parser_test/parser_q3.sql b/data/parser_test/parser_q3.sql similarity index 100% rename from scripts/parser_test/parser_q3.sql rename to data/parser_test/parser_q3.sql diff --git a/scripts/parser_test/parser_q30.sql b/data/parser_test/parser_q30.sql similarity index 100% rename from scripts/parser_test/parser_q30.sql rename to data/parser_test/parser_q30.sql diff --git a/scripts/parser_test/parser_q31.sql b/data/parser_test/parser_q31.sql similarity index 100% rename from scripts/parser_test/parser_q31.sql rename to data/parser_test/parser_q31.sql diff --git a/scripts/parser_test/parser_q32.sql b/data/parser_test/parser_q32.sql similarity index 100% rename from scripts/parser_test/parser_q32.sql rename to data/parser_test/parser_q32.sql diff --git a/scripts/parser_test/parser_q33.sql b/data/parser_test/parser_q33.sql similarity index 100% rename from scripts/parser_test/parser_q33.sql rename to data/parser_test/parser_q33.sql diff --git a/scripts/parser_test/parser_q34.sql b/data/parser_test/parser_q34.sql similarity index 100% rename from scripts/parser_test/parser_q34.sql rename to data/parser_test/parser_q34.sql diff --git a/scripts/parser_test/parser_q35.sql b/data/parser_test/parser_q35.sql similarity index 100% rename from scripts/parser_test/parser_q35.sql rename to data/parser_test/parser_q35.sql diff --git a/scripts/parser_test/parser_q36.sql b/data/parser_test/parser_q36.sql similarity index 100% rename from scripts/parser_test/parser_q36.sql rename to data/parser_test/parser_q36.sql diff --git a/scripts/parser_test/parser_q37.sql b/data/parser_test/parser_q37.sql similarity index 100% rename from scripts/parser_test/parser_q37.sql rename to data/parser_test/parser_q37.sql diff --git a/scripts/parser_test/parser_q38.sql b/data/parser_test/parser_q38.sql similarity index 100% rename from scripts/parser_test/parser_q38.sql rename to data/parser_test/parser_q38.sql diff --git a/scripts/parser_test/parser_q39.sql b/data/parser_test/parser_q39.sql similarity index 100% rename from scripts/parser_test/parser_q39.sql rename to data/parser_test/parser_q39.sql diff --git a/scripts/parser_test/parser_q4.sql b/data/parser_test/parser_q4.sql similarity index 100% rename from scripts/parser_test/parser_q4.sql rename to data/parser_test/parser_q4.sql diff --git a/scripts/parser_test/parser_q40.sql b/data/parser_test/parser_q40.sql similarity index 100% rename from scripts/parser_test/parser_q40.sql rename to data/parser_test/parser_q40.sql diff --git a/scripts/parser_test/parser_q41.sql b/data/parser_test/parser_q41.sql similarity index 100% rename from scripts/parser_test/parser_q41.sql rename to data/parser_test/parser_q41.sql diff --git a/scripts/parser_test/parser_q42.sql b/data/parser_test/parser_q42.sql similarity index 100% rename from scripts/parser_test/parser_q42.sql rename to data/parser_test/parser_q42.sql diff --git a/scripts/parser_test/parser_q43.sql b/data/parser_test/parser_q43.sql similarity index 100% rename from scripts/parser_test/parser_q43.sql rename to data/parser_test/parser_q43.sql diff --git a/scripts/parser_test/parser_q44.sql b/data/parser_test/parser_q44.sql similarity index 100% rename from scripts/parser_test/parser_q44.sql rename to data/parser_test/parser_q44.sql diff --git a/scripts/parser_test/parser_q45.sql b/data/parser_test/parser_q45.sql similarity index 100% rename from scripts/parser_test/parser_q45.sql rename to data/parser_test/parser_q45.sql diff --git a/scripts/parser_test/parser_q46.sql b/data/parser_test/parser_q46.sql similarity index 100% rename from scripts/parser_test/parser_q46.sql rename to data/parser_test/parser_q46.sql diff --git a/scripts/parser_test/parser_q47.sql b/data/parser_test/parser_q47.sql similarity index 100% rename from scripts/parser_test/parser_q47.sql rename to data/parser_test/parser_q47.sql diff --git a/scripts/parser_test/parser_q48.sql b/data/parser_test/parser_q48.sql similarity index 100% rename from scripts/parser_test/parser_q48.sql rename to data/parser_test/parser_q48.sql diff --git a/scripts/parser_test/parser_q49.sql b/data/parser_test/parser_q49.sql similarity index 100% rename from scripts/parser_test/parser_q49.sql rename to data/parser_test/parser_q49.sql diff --git a/scripts/parser_test/parser_q5.sql b/data/parser_test/parser_q5.sql similarity index 100% rename from scripts/parser_test/parser_q5.sql rename to data/parser_test/parser_q5.sql diff --git a/scripts/parser_test/parser_q50.sql b/data/parser_test/parser_q50.sql similarity index 100% rename from scripts/parser_test/parser_q50.sql rename to data/parser_test/parser_q50.sql diff --git a/scripts/parser_test/parser_q6.sql b/data/parser_test/parser_q6.sql similarity index 100% rename from scripts/parser_test/parser_q6.sql rename to data/parser_test/parser_q6.sql diff --git a/scripts/parser_test/parser_q7.sql b/data/parser_test/parser_q7.sql similarity index 100% rename from scripts/parser_test/parser_q7.sql rename to data/parser_test/parser_q7.sql diff --git a/scripts/parser_test/parser_q8.sql b/data/parser_test/parser_q8.sql similarity index 100% rename from scripts/parser_test/parser_q8.sql rename to data/parser_test/parser_q8.sql diff --git a/scripts/parser_test/parser_q9.sql b/data/parser_test/parser_q9.sql similarity index 100% rename from scripts/parser_test/parser_q9.sql rename to data/parser_test/parser_q9.sql diff --git a/data/parser_test/parser_r1.txt b/data/parser_test/parser_r1.txt new file mode 100644 index 00000000..557c41b0 --- /dev/null +++ b/data/parser_test/parser_r1.txt @@ -0,0 +1,6 @@ +---------------------------------------------------- +| ?name | ?mbox | +---------------------------------------------------- +| "Johnny Lee Outlaw" | | +| "Peter Goodguy" | | +---------------------------------------------------- diff --git a/data/parser_test/parser_r10.txt b/data/parser_test/parser_r10.txt new file mode 100644 index 00000000..20655fc2 --- /dev/null +++ b/data/parser_test/parser_r10.txt @@ -0,0 +1,6 @@ +------------------------------------------------------------------------- +| ?title | ?price | +------------------------------------------------------------------------- +| "SPARQL Tutorial" | | +| "The Semantic Web" | "23"^^ | +------------------------------------------------------------------------- diff --git a/data/parser_test/parser_r11.txt b/data/parser_test/parser_r11.txt new file mode 100644 index 00000000..71b72f14 --- /dev/null +++ b/data/parser_test/parser_r11.txt @@ -0,0 +1,6 @@ +-------------------------------------------------------------------------- +| ?name | ?mbox | ?hpage | +-------------------------------------------------------------------------- +| "Alice" | | | +| "Bob" | | | +-------------------------------------------------------------------------- diff --git a/data/parser_test/parser_r12.txt b/data/parser_test/parser_r12.txt new file mode 100644 index 00000000..1726048a --- /dev/null +++ b/data/parser_test/parser_r12.txt @@ -0,0 +1,8 @@ +------------------------------------ +| ?title | +------------------------------------ +| "SPARQL Query Language Tutorial" | +| "SPARQL" | +| "SPARQL Protocol Tutorial" | +| "SPARQL (updated)" | +------------------------------------ diff --git a/data/parser_test/parser_r13.txt b/data/parser_test/parser_r13.txt new file mode 100644 index 00000000..5b38be58 --- /dev/null +++ b/data/parser_test/parser_r13.txt @@ -0,0 +1,8 @@ +----------------------------------------------------------------- +| ?x | ?y | +----------------------------------------------------------------- +| "SPARQL Query Language Tutorial" | | +| "SPARQL" | | +| | "SPARQL Protocol Tutorial" | +| | "SPARQL (updated)" | +----------------------------------------------------------------- diff --git a/data/parser_test/parser_r14.txt b/data/parser_test/parser_r14.txt new file mode 100644 index 00000000..a9cb2578 --- /dev/null +++ b/data/parser_test/parser_r14.txt @@ -0,0 +1,6 @@ +---------------------------------------------- +| ?title | ?author | +---------------------------------------------- +| "SPARQL Query Language Tutorial" | "Alice" | +| "SPARQL Protocol Tutorial" | "Bob" | +---------------------------------------------- diff --git a/data/parser_test/parser_r15.txt b/data/parser_test/parser_r15.txt new file mode 100644 index 00000000..5c1de4c5 --- /dev/null +++ b/data/parser_test/parser_r15.txt @@ -0,0 +1,8 @@ +-------------------------- +| ?s | +-------------------------- +| | +| | +| | +| | +-------------------------- diff --git a/data/parser_test/parser_r16.txt b/data/parser_test/parser_r16.txt new file mode 100644 index 00000000..816ac310 --- /dev/null +++ b/data/parser_test/parser_r16.txt @@ -0,0 +1,5 @@ +---------------------------------------------------------------- +| ?s | ?p | ?o | +---------------------------------------------------------------- +| | | | +---------------------------------------------------------------- diff --git a/data/parser_test/parser_r17.txt b/data/parser_test/parser_r17.txt new file mode 100644 index 00000000..816ac310 --- /dev/null +++ b/data/parser_test/parser_r17.txt @@ -0,0 +1,5 @@ +---------------------------------------------------------------- +| ?s | ?p | ?o | +---------------------------------------------------------------- +| | | | +---------------------------------------------------------------- diff --git a/data/parser_test/parser_r18.txt b/data/parser_test/parser_r18.txt new file mode 100644 index 00000000..334de828 --- /dev/null +++ b/data/parser_test/parser_r18.txt @@ -0,0 +1,5 @@ +----------- +| ?name | +----------- +| "Alice" | +----------- diff --git a/data/parser_test/parser_r19.txt b/data/parser_test/parser_r19.txt new file mode 100644 index 00000000..ddf06d01 --- /dev/null +++ b/data/parser_test/parser_r19.txt @@ -0,0 +1,6 @@ +------------------------------ +| ?nameX | ?nameY | ?nickY | +------------------------------ +| "Alice" | "Bob" | | +| "Alice" | "Clare" | "CT" | +------------------------------ diff --git a/data/parser_test/parser_r2.txt b/data/parser_test/parser_r2.txt new file mode 100644 index 00000000..0c48801e --- /dev/null +++ b/data/parser_test/parser_r2.txt @@ -0,0 +1,4 @@ +------ +| ?v | +------ +------ diff --git a/data/parser_test/parser_r20.txt b/data/parser_test/parser_r20.txt new file mode 100644 index 00000000..ba66c54c --- /dev/null +++ b/data/parser_test/parser_r20.txt @@ -0,0 +1,5 @@ +------------------------------------------------------ +| ?_askResult | +------------------------------------------------------ +| "true"^^ | +------------------------------------------------------ diff --git a/data/parser_test/parser_r21.txt b/data/parser_test/parser_r21.txt new file mode 100644 index 00000000..8621993d --- /dev/null +++ b/data/parser_test/parser_r21.txt @@ -0,0 +1,5 @@ +------------------------------------------------------- +| ?_askResult | +------------------------------------------------------- +| "false"^^ | +------------------------------------------------------- diff --git a/data/parser_test/parser_r22.txt b/data/parser_test/parser_r22.txt new file mode 100644 index 00000000..0e2069b4 --- /dev/null +++ b/data/parser_test/parser_r22.txt @@ -0,0 +1,5 @@ +-------------- +| ?givenName | +-------------- +| "Bob" | +-------------- diff --git a/data/parser_test/parser_r23.txt b/data/parser_test/parser_r23.txt new file mode 100644 index 00000000..334de828 --- /dev/null +++ b/data/parser_test/parser_r23.txt @@ -0,0 +1,5 @@ +----------- +| ?name | +----------- +| "Alice" | +----------- diff --git a/data/parser_test/parser_r24.txt b/data/parser_test/parser_r24.txt new file mode 100644 index 00000000..11b13677 --- /dev/null +++ b/data/parser_test/parser_r24.txt @@ -0,0 +1,6 @@ +--------------------- +| ?name1 | ?name2 | +--------------------- +| "Alice" | "Ms A." | +| "Ms A." | "Alice" | +--------------------- diff --git a/data/parser_test/parser_r25.txt b/data/parser_test/parser_r25.txt new file mode 100644 index 00000000..3625fd77 --- /dev/null +++ b/data/parser_test/parser_r25.txt @@ -0,0 +1,5 @@ +----------------------------------------- +| ?name | ?mbox | +----------------------------------------- +| "Alice" | | +----------------------------------------- diff --git a/data/parser_test/parser_r26.txt b/data/parser_test/parser_r26.txt new file mode 100644 index 00000000..0bccffa7 --- /dev/null +++ b/data/parser_test/parser_r26.txt @@ -0,0 +1,5 @@ +------------------------------ +| ?name | ?mbox | +------------------------------ +| "Bob" | "bob@work.example" | +------------------------------ diff --git a/data/parser_test/parser_r27.txt b/data/parser_test/parser_r27.txt new file mode 100644 index 00000000..3625fd77 --- /dev/null +++ b/data/parser_test/parser_r27.txt @@ -0,0 +1,5 @@ +----------------------------------------- +| ?name | ?mbox | +----------------------------------------- +| "Alice" | | +----------------------------------------- diff --git a/data/parser_test/parser_r28.txt b/data/parser_test/parser_r28.txt new file mode 100644 index 00000000..626e653a --- /dev/null +++ b/data/parser_test/parser_r28.txt @@ -0,0 +1,5 @@ +-------------------------------------------- +| ?name | ?mbox | +-------------------------------------------- +| "Roberto"@es | | +-------------------------------------------- diff --git a/data/parser_test/parser_r29.txt b/data/parser_test/parser_r29.txt new file mode 100644 index 00000000..8a5ff3be --- /dev/null +++ b/data/parser_test/parser_r29.txt @@ -0,0 +1,5 @@ +---------------------------------------------- +| ?title | +---------------------------------------------- +| "Cette S茅rie des Ann茅es Soixante-dix"@fr | +---------------------------------------------- diff --git a/data/parser_test/parser_r3.txt b/data/parser_test/parser_r3.txt new file mode 100644 index 00000000..8bb17c4e --- /dev/null +++ b/data/parser_test/parser_r3.txt @@ -0,0 +1,5 @@ +----------------------------- +| ?v | +----------------------------- +| | +----------------------------- diff --git a/data/parser_test/parser_r30.txt b/data/parser_test/parser_r30.txt new file mode 100644 index 00000000..334de828 --- /dev/null +++ b/data/parser_test/parser_r30.txt @@ -0,0 +1,5 @@ +----------- +| ?name | +----------- +| "Alice" | +----------- diff --git a/data/parser_test/parser_r31.txt b/data/parser_test/parser_r31.txt new file mode 100644 index 00000000..462acf4b --- /dev/null +++ b/data/parser_test/parser_r31.txt @@ -0,0 +1,5 @@ +-------------------------------------------------------------------------------- +| ?y | ?x | +-------------------------------------------------------------------------------- +| | "42"^^ | +-------------------------------------------------------------------------------- diff --git a/data/parser_test/parser_r32.txt b/data/parser_test/parser_r32.txt new file mode 100644 index 00000000..52e72b76 --- /dev/null +++ b/data/parser_test/parser_r32.txt @@ -0,0 +1,7 @@ +----------- +| ?name | +----------- +| "Bob" | +| "Alice" | +| "Carol" | +----------- diff --git a/data/parser_test/parser_r33.txt b/data/parser_test/parser_r33.txt new file mode 100644 index 00000000..023302f0 --- /dev/null +++ b/data/parser_test/parser_r33.txt @@ -0,0 +1,5 @@ +-------------------------------------------------------------------------------------------------------- +| ?y | +-------------------------------------------------------------------------------------------------------- +| "{\"paths\":[{\"src\":\"\",\"dst\":\"\",\"edges\":[{\"fromNode\":2,\"toNode\":3,\"predIRI\":\"<鍠滄>\"},{\"fromNode\":3,\"toNode\":1,\"predIRI\":\"<鍏虫敞>\"},{\"fromNode\":1,\"toNode\":2,\"predIRI\":\"<鍠滄>\"}],\"nodes\":[{\"nodeIndex\":1,\"nodeIRI\":\"\"},{\"nodeIndex\":2,\"nodeIRI\":\"\"},{\"nodeIndex\":3,\"nodeIRI\":\"\"}]}]}" | +-------------------------------------------------------------------------------------------------------- diff --git a/data/parser_test/parser_r34.txt b/data/parser_test/parser_r34.txt new file mode 100644 index 00000000..df39fd9b --- /dev/null +++ b/data/parser_test/parser_r34.txt @@ -0,0 +1,5 @@ +---------------------------------------------------------------------------- +| ?y | +---------------------------------------------------------------------------- +| "{\"paths\":[{\"src\":\"\",\"dst\":\"\",\"length\":3}]}" | +---------------------------------------------------------------------------- diff --git a/data/parser_test/parser_r35.txt b/data/parser_test/parser_r35.txt new file mode 100644 index 00000000..8e13836c --- /dev/null +++ b/data/parser_test/parser_r35.txt @@ -0,0 +1,5 @@ +---------------------------------------------------------------------------------- +| ?y | +---------------------------------------------------------------------------------- +| "{\"paths\":[{\"src\":\"\",\"dst\":\"\",\"value\":\"true\"}]}" | +---------------------------------------------------------------------------------- diff --git a/data/parser_test/parser_r36.txt b/data/parser_test/parser_r36.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r36.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r37.txt b/data/parser_test/parser_r37.txt new file mode 100644 index 00000000..54766285 --- /dev/null +++ b/data/parser_test/parser_r37.txt @@ -0,0 +1,7 @@ +----------------------------------------------------------------------- +| ?x | ?y | ?z | +----------------------------------------------------------------------- +| | | "Alice"@en | +| | | "167.5"^^ | +| | | "-5"^^ | +----------------------------------------------------------------------- diff --git a/data/parser_test/parser_r38.txt b/data/parser_test/parser_r38.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r38.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r39.txt b/data/parser_test/parser_r39.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r39.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r4.txt b/data/parser_test/parser_r4.txt new file mode 100644 index 00000000..5247989f --- /dev/null +++ b/data/parser_test/parser_r4.txt @@ -0,0 +1,5 @@ +----------------------------- +| ?v | +----------------------------- +| | +----------------------------- diff --git a/data/parser_test/parser_r40.txt b/data/parser_test/parser_r40.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r40.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r41.txt b/data/parser_test/parser_r41.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r41.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r42.txt b/data/parser_test/parser_r42.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r42.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r43.txt b/data/parser_test/parser_r43.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r43.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r44.txt b/data/parser_test/parser_r44.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r44.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r45.txt b/data/parser_test/parser_r45.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r45.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r46.txt b/data/parser_test/parser_r46.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r46.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r47.txt b/data/parser_test/parser_r47.txt new file mode 100644 index 00000000..9ba320dc --- /dev/null +++ b/data/parser_test/parser_r47.txt @@ -0,0 +1,5 @@ +-------- +| ?x | +-------- +| | +-------- diff --git a/data/parser_test/parser_r48.txt b/data/parser_test/parser_r48.txt new file mode 100644 index 00000000..0080d8a3 --- /dev/null +++ b/data/parser_test/parser_r48.txt @@ -0,0 +1,12 @@ +-------- +| ?x | +-------- +| | +| | +| | +| | +| | +| | +| | +| | +-------- diff --git a/data/parser_test/parser_r49.txt b/data/parser_test/parser_r49.txt new file mode 100644 index 00000000..0080d8a3 --- /dev/null +++ b/data/parser_test/parser_r49.txt @@ -0,0 +1,12 @@ +-------- +| ?x | +-------- +| | +| | +| | +| | +| | +| | +| | +| | +-------- diff --git a/data/parser_test/parser_r5.txt b/data/parser_test/parser_r5.txt new file mode 100644 index 00000000..f4db36c7 --- /dev/null +++ b/data/parser_test/parser_r5.txt @@ -0,0 +1,5 @@ +----------------------------- +| ?v | +----------------------------- +| | +----------------------------- diff --git a/data/parser_test/parser_r50.txt b/data/parser_test/parser_r50.txt new file mode 100644 index 00000000..044cb29d --- /dev/null +++ b/data/parser_test/parser_r50.txt @@ -0,0 +1,11 @@ +--------------- +| ?x | ?y | +--------------- +| | | +| | | +| | | +| | | +| | | +| | | +| | | +--------------- diff --git a/data/parser_test/parser_r6.txt b/data/parser_test/parser_r6.txt new file mode 100644 index 00000000..9aa0a475 --- /dev/null +++ b/data/parser_test/parser_r6.txt @@ -0,0 +1,5 @@ +--------------------- +| ?title | +--------------------- +| "SPARQL Tutorial" | +--------------------- diff --git a/data/parser_test/parser_r7.txt b/data/parser_test/parser_r7.txt new file mode 100644 index 00000000..64c813d5 --- /dev/null +++ b/data/parser_test/parser_r7.txt @@ -0,0 +1,5 @@ +---------------------- +| ?title | +---------------------- +| "The Semantic Web" | +---------------------- diff --git a/data/parser_test/parser_r8.txt b/data/parser_test/parser_r8.txt new file mode 100644 index 00000000..fac56848 --- /dev/null +++ b/data/parser_test/parser_r8.txt @@ -0,0 +1,5 @@ +------------------------------------------------------------------------- +| ?title | ?price | +------------------------------------------------------------------------- +| "The Semantic Web" | "23"^^ | +------------------------------------------------------------------------- diff --git a/data/parser_test/parser_r9.txt b/data/parser_test/parser_r9.txt new file mode 100644 index 00000000..278dee09 --- /dev/null +++ b/data/parser_test/parser_r9.txt @@ -0,0 +1,7 @@ +----------------------------------------- +| ?name | ?mbox | +----------------------------------------- +| "Bob" | | +| "Alice" | | +| "Alice" | | +----------------------------------------- diff --git a/conf.ini b/defaults/conf.ini similarity index 100% rename from conf.ini rename to defaults/conf.ini diff --git a/init.conf b/defaults/init.conf similarity index 100% rename from init.conf rename to defaults/init.conf diff --git a/ipAllow.config b/defaults/ipAllow.config similarity index 100% rename from ipAllow.config rename to defaults/ipAllow.config diff --git a/ipDeny.config b/defaults/ipDeny.config similarity index 100% rename from ipDeny.config rename to defaults/ipDeny.config diff --git a/slog.properties b/defaults/slog.properties similarity index 100% rename from slog.properties rename to defaults/slog.properties diff --git a/slog.stdout.properties b/defaults/slog.stdout.properties similarity index 100% rename from slog.stdout.properties rename to defaults/slog.stdout.properties diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh new file mode 100644 index 00000000..f668f93c --- /dev/null +++ b/docker-entrypoint.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +BLUE='\033[0;34m' +RED='\033[0;31m' +NC='\033[0m' + +if [ ! -f /app/init.conf ]; then + echo "${BLUE}[INIT] No init.conf file found. Copying default...${NC}" + cp -r /docker-init/* /app/ + + # Check if GSTORE_ROOT_PASSWORD is set + if [ -z "$GSTORE_ROOT_PASSWORD" ]; then + echo "${RED}[INIT] GSTORE_ROOT_PASSWORD is not set. We strongly recommend setting a strong password.${NC}" + else + echo "${BLUE}[INIT] Setting root password...${NC}" + # Replace the line in the file + sed -i -e "s/^#\\?\\s*root_password=.*/root_password=${GSTORE_ROOT_PASSWORD}/" init.conf + fi + +fi + +if [ ! -d /app/bin ]; then + echo "${BLUE}[INIT] Creating directories...${NC}" + mkdir -p bin lib backups data logs .tmp +fi + +if [ ! -d /app/system.db ]; then + echo "${BLUE}[INIT] Creating system.db...${NC}" + /usr/local/bin/ginit --make + + # list all directories in /app/data + for dir in /app/data/*; do + # get the directory name + dir_name=$(basename "$dir") + if [ $dir_name != "system" ] && [ -d "data/$dir_name" ] && [ -f "data/$dir_name/$dir_name.nt" ] ; then + # create the database + echo "${BLUE}[INIT] Creating $dir_name...${NC}" + /usr/local/bin/gbuild -db "$dir_name" -f "data/$dir_name/$dir_name.nt" + fi + done +fi + +echo "${BLUE}[INIT] Command: $@${NC}" + +exec "$@" \ No newline at end of file diff --git a/docs/DOCKER_DEPLOY_EN.md b/docs/DOCKER_DEPLOY_EN.md index 2be570ee..c561c42f 100644 --- a/docs/DOCKER_DEPLOY_EN.md +++ b/docs/DOCKER_DEPLOY_EN.md @@ -18,6 +18,28 @@ After having the correct Docker environment and network, use `git clone` to down After the building, using `docker run -p 9000:80 -it gstore` directly to start and enter the container and execute other operations. +### Playground Mode + +The docker image has been built with some sample data. You can run the following command to start the container in playground mode: + +```bash +docker run --rm -it gstore sh -c "(/usr/local/bin/ghttp &); bash" +``` + +Press Enter to start the bash shell. Then you can play with the sample data in the container. + +```bash +gquery -db small -q data/small/small_q0.sql +``` + +### Server Mode + +Expose the port 9000, persist the data and run the container in background: + +```bash +docker run -d -p 9000:9000 -v /path/to/data:/app gstore +``` + ## 0x02. pulling the mirror directly to run Instead of downloading project or building on your own, input `docker pull pkumodlab/gstore:latest` to pull the mirror which has been automatically built well on the docker hub. Then input `docker run -p 9000:80 -it pkumodlab/gstore:latest` to start and enter the container and execute other operations. diff --git a/docs/gStore_help.html b/docs/gStore_help.html index 893dc961..1cbe8bd4 100644 --- a/docs/gStore_help.html +++ b/docs/gStore_help.html @@ -1,4 +1,4 @@ -锘 + diff --git a/docs/gStore_help.md b/docs/gStore_help.md index 909975ed..82c8c272 100644 --- a/docs/gStore_help.md +++ b/docs/gStore_help.md @@ -1,4 +1,4 @@ -锘# Gstore System +# Gstore System gStore, a graph-based RDF triple store, is a joint research project by Peking University, University of Waterloo and Hong Kong University of Science and Technology. The system is developed and maintained by the database group in Institute of Computer Science and Technology, Peking University, China. A detailed description of gStore can be found at our papers [Zou et al., VLDB 11] and [Zou et al., VLDB Journal 14] in the [Publication](#chapter07) section. This HELP document includes system installment, usage, API, use cases and FAQ. gStore is a open-source project in github under the BSD license. You are welcome to use gStore, report bugs or suggestions, or join us to make gStore better. It is also allowed for you to build all kinds of applications based on gStore, while respecting our work. diff --git a/lib/.gitignore b/lib/.gitignore deleted file mode 100644 index a5baada1..00000000 --- a/lib/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!.gitignore - diff --git a/logs/.gitignore b/logs/.gitignore deleted file mode 100644 index 397b4a76..00000000 --- a/logs/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.log diff --git a/logs/endpoint/README b/logs/endpoint/README deleted file mode 100644 index e69de29b..00000000 diff --git a/makefile b/makefile deleted file mode 100644 index 3c5f44a1..00000000 --- a/makefile +++ /dev/null @@ -1,808 +0,0 @@ -#help for make -#http://www.cnblogs.com/wang_yb/p/3990952.html -#https://segmentfault.com/a/1190000000349917 -#http://blog.csdn.net/cuiyifang/article/details/7910268 - -#to use gprof to analyse efficience of the program: -#http://blog.chinaunix.net/uid-25194149-id-3215487.html - -#to use gcov and lcov -#Notice that optimization should not be used here -#http://blog.163.com/bobile45@126/blog/static/96061992201382025729313/ -#gcov -a main.cpp -#lcov --directory . --capture --output-file dig.info -#genhtml --output-directory . --frames --show-details dig.info - -#to use doxygen+graphviz+htmlhelp to generate document from source code: -#http://www.doxygen.nl/ -#(also include good comments norm) -#http://blog.csdn.net/u010740725/article/details/51387810 - -#CXX=$(shell which clang 2>/dev/null || which gcc) -#ccache, readline, gcov lcov -#http://blog.csdn.net/u012421852/article/details/52138960 -# -# How to speed up the compilation -# https://blog.csdn.net/a_little_a_day/article/details/78251928 -# use make -j4, if error then use make utilizing only one thread -#use -j8 or higher may cause error -#http://blog.csdn.net/cscrazybing/article/details/50789482 -#http://blog.163.com/liuhonggaono1@126/blog/static/10497901201210254622141/ - -#compile parameters - -# WARN: maybe difficult to install ccache in some systems -#CXX = ccache g++ -CXX = g++ -CC = gcc - -#the optimazition level of gcc/g++ -#http://blog.csdn.net/hit_090420216/article/details/44900215 -#NOTICE: -O2 is recommended, while -O3(add loop-unroll and inline-function) is dangerous -#when developing, not use -O because it will disturb the normal -#routine. use it for test and release. -CFLAGS = -c -Wall -O2 -pthread -std=c++11 -Werror=return-type -EXEFLAG = -O2 -pthread -std=c++11 -Werror=return-type -#-coverage for debugging -#CFLAGS = -c -Wall -pthread -O0 -g3 -std=c++11 -gdwarf-2 -#EXEFLAG = -pthread -O0 -g3 -std=c++11 -gdwarf-2 -#-coverage for debugging and with performance -# CFLAGS = -c -Wall -pthread -g3 -std=c++11 -gdwarf-2 -pg -# EXEFLAG = -pthread -g3 -std=c++11 -gdwarf-2 -pg - -#add -lreadline [-ltermcap] if using readline or objs contain readline -# library = -lreadline -L./lib -L/usr/local/lib -lantlr -lgcov -lboost_thread -lboost_filesystem -lboost_system -lboost_regex -lpthread -I/usr/local/include/boost -lcurl -#library = -lreadline -L./lib -L/usr/local/lib -L/usr/lib/ -L./workflow-nossl/_lib -L./workflow-nossl/_include -lantlr4-runtime -lgcov -lboost_thread -lboost_filesystem -lboost_system -lboost_regex -lpthread -I/usr/local/include/boost -lcurl -lworkflow -llog4cplus -#library = -lreadline -L./lib -L/usr/local/lib -L/usr/lib/ -L./tools/workflow-master/_lib -L./tools/workflow-master/_include -lantlr4-runtime -lgcov -lboost_thread -lboost_filesystem -lboost_system -lboost_regex -lpthread -I/usr/local/include/boost -lcurl -llog4cplus -lworkflow -#library = -lreadline -L./lib -L/usr/local/lib -L/usr/lib/ -lantlr4-runtime -lgcov -lboost_thread -lboost_filesystem -lboost_system -lboost_regex -lpthread -I/usr/local/include/boost -lcurl -llog4cplus -Wl,-rpath='/usr/local/lib' -library = -L/usr/lib64 -L./lib -L/usr/local/lib -L/usr/lib -I/usr/local/include/boost -ljemalloc -lreadline -lantlr4-runtime -lgcov -lboost_thread -lboost_system -lboost_regex -lpthread -lcurl -llog4cplus -lz -lminizip -#used for parallelsort -march = -march=native -openmp = -fopenmp ${march} -# library = -ltermcap -lreadline -L./lib -lantlr -lgcov -def64IO = -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE -# load dynamic lib -ldl = -ldl - -FIRST_BUILD ?= %.o -# paths - -objdir = .objs/ - -exedir = bin/ - -testdir = scripts/ - -lib_antlr = lib/libantlr4-runtime.a - -lib_rpc = lib/libworkflow.a - -lib_log = lib/liblog4cplus.a - -api_cpp = api/http/cpp/lib/libgstoreconnector.a - -api_socket = api/socket/cpp/lib/libclient.a - -# objects - -sitreeobj = $(objdir)SITree.o $(objdir)SIStorage.o $(objdir)SINode.o $(objdir)SIIntlNode.o $(objdir)SILeafNode.o $(objdir)SIHeap.o -ivarrayobj = $(objdir)IVArray.o $(objdir)IVEntry.o $(objdir)IVBlockManager.o -isarrayobj = $(objdir)ISArray.o $(objdir)ISEntry.o $(objdir)ISBlockManager.o - -kvstoreobj = $(objdir)KVstore.o $(sitreeobj) $(ivarrayobj) $(isarrayobj) - -utilobj = $(objdir)Slog.o $(objdir)Util.o $(objdir)Bstr.o $(objdir)Stream.o $(objdir)Triple.o $(objdir)VList.o \ - $(objdir)EvalMultitypeValue.o $(objdir)IDTriple.o $(objdir)Version.o $(objdir)Transaction.o $(objdir)Latch.o $(objdir)IPWhiteList.o \ - $(objdir)IPBlackList.o $(objdir)SpinLock.o $(objdir)GraphLock.o $(objdir)WebUrl.o $(objdir)INIParser.o $(objdir)OrderedVector.o \ - $(objdir)CompressFileUtil.o - -topkobj = $(objdir)DynamicTrie.o $(objdir)OrderedList.o $(objdir)Pool.o $(objdir)TopKUtil.o $(objdir)DPBTopKUtil.o $(objdir)TopKSearchPlan.o - -queryobj = $(objdir)SPARQLquery.o $(objdir)BasicQuery.o $(objdir)ResultSet.o $(objdir)IDList.o $(objdir)DFSPlan.o\ - $(objdir)Varset.o $(objdir)QueryTree.o $(objdir)TempResult.o $(objdir)QueryCache.o $(objdir)GeneralEvaluation.o \ - $(objdir)PathQueryHandler.o $(objdir)BGPQuery.o $(objdir)FilterPlan.o - -#signatureobj = $(objdir)SigEntry.o $(objdir)Signature.o - -#vstreeobj = $(objdir)VSTree.o $(objdir)EntryBuffer.o $(objdir)LRUCache.o $(objdir)VNode.o - -stringindexobj = $(objdir)StringIndex.o - -parserobj = $(objdir)RDFParser.o $(objdir)SPARQLParser.o \ - $(objdir)SPARQLLexer.o $(objdir)TurtleParser.o $(objdir)QueryParser.o - -serverobj = $(objdir)Operation.o $(objdir)Server.o $(objdir)Socket.o - -grpcobj = $(objdir)grpc_server.o $(objdir)grpc_server_task.o $(objdir)grpc_message.o \ - $(objdir)grpc_router.o $(objdir)grpc_routetable.o $(objdir)grpc_content.o \ - $(objdir)grpc_status_code.o $(objdir)grpc_multipart_parser.o ${objdir}APIUtil.o - -databaseobj = $(objdir)Database.o $(objdir)Join.o \ - $(objdir)CSR.o $(objdir)Txn_manager.o $(objdir)TableOperator.o $(objdir)PlanTree.o \ - $(objdir)PlanGenerator.o $(objdir)Executor.o $(objdir)Optimizer.o - -trieobj = $(objdir)Trie.o $(objdir)TrieNode.o - -objfile = $(kvstoreobj) $(stringindexobj) $(parserobj) $(serverobj) $(databaseobj) \ - $(utilobj) $(topkobj) $(queryobj) $(trieobj) - -inc = -I./tools/antlr4-cpp-runtime-4/runtime/src -inc_rpc = -I./tools/workflow/_include -inc_log = -I./tools/log4cplus/include -inc_zlib= -I./tools/zlib-1.3/include -#auto generate dependencies -# http://blog.csdn.net/gmpy_tiger/article/details/51849474 -# http://blog.csdn.net/jeffrey0000/article/details/12421317 - -#gtest - -TARGET = $(exedir)gexport $(exedir)gbuild $(exedir)gserver $(exedir)gserver_backup_scheduler \ - $(exedir)gquery $(exedir)gadd $(exedir)gsub $(exedir)ghttp $(exedir)gmonitor \ - $(exedir)gshow $(exedir)shutdown $(exedir)ginit $(exedir)gdrop $(exedir)gbackup \ - $(exedir)grestore $(exedir)gpara $(exedir)rollback $(exedir)grpc $(exedir)gconsole -# TestTarget = $(testdir)update_test $(testdir)dataset_test $(testdir)transaction_test \ - $(testdir)run_transaction $(testdir)workload $(testdir)debug_test -# TARGET = $(exedir)gbuild $(exedir)gdrop $(exedir)gquery $(exedir)ginit - -all: $(TARGET) - @echo "Compilation ends successfully!" - @bash scripts/init.sh - -#BETTER: use for loop to reduce the lines -#NOTICE: g++ -MM will run error if linking failed, like Database.h/../SparlParser.h/../antlr3.h - -#executables begin - -#NOTICE:not include g*.o in objfile due to multiple definitions of main() - -$(exedir)gexport: $(lib_antlr) $(objdir)gexport.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gexport $(objdir)gexport.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)gdrop: $(lib_antlr) $(objdir)gdrop.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gdrop $(objdir)gdrop.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)ginit: $(lib_antlr) $(objdir)ginit.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)ginit $(objdir)ginit.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)shutdown: $(lib_antlr) $(objdir)shutdown.o $(objfile) $(api_cpp) - $(CXX) $(EXEFLAG) -o $(exedir)shutdown $(objdir)shutdown.o $(objfile) $(openmp) -L./api/http/cpp/lib -lgstoreconnector $(library) ${ldl} - -$(exedir)gmonitor: $(lib_antlr) $(objdir)gmonitor.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gmonitor $(objdir)gmonitor.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)gshow: $(lib_antlr) $(objdir)gshow.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gshow $(objdir)gshow.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)gbuild: $(lib_antlr) $(objdir)gbuild.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gbuild $(objdir)gbuild.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)gquery: $(lib_antlr) $(objdir)gquery.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gquery $(objdir)gquery.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)gserver: $(lib_antlr) $(objdir)gserver.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gserver $(objdir)gserver.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)gserver_backup_scheduler: $(lib_antlr) $(objdir)gserver_backup_scheduler.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gserver_backup_scheduler $(objdir)gserver_backup_scheduler.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)ghttp: $(lib_antlr) $(objdir)ghttp.o ./Server/server_http.hpp ./Server/client_http.hpp ./Server/MultipartParser.hpp $(objfile) ${objdir}APIUtil.o - $(CXX) $(EXEFLAG) -o $(exedir)ghttp $(objdir)ghttp.o $(objfile) ${objdir}APIUtil.o $(library) $(inc) $(openmp) ${ldl} - -#$(exedir)gapiserver: $(lib_antlr) $(lib_workflow) $(objdir)gapiserver.o $(objfile) -# $(CXX) $(EXEFLAG) -o $(exedir)gapiserver $(objdir)gapiserver.o $(objfile) $(library) $(openmp) - -$(exedir)grpc: $(lib_antlr) $(lib_rpc) $(objdir)grpc.o $(grpcobj) $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)grpc $(objdir)grpc.o ${grpcobj} $(objfile) $(library) $(inc) ${inc_rpc} -lworkflow -lssl -lcrypto $(openmp) ${ldl} - -$(exedir)gbackup: $(lib_antlr) $(objdir)gbackup.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gbackup $(objdir)gbackup.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)grestore: $(lib_antlr) $(objdir)grestore.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)grestore $(objdir)grestore.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)gpara: $(lib_antlr) $(objdir)gpara.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gpara $(objdir)gpara.o $(objfile) $(library) $(openmp) -L./api/http/cpp/lib -lgstoreconnector $(library) ${ldl} - -$(exedir)rollback: $(lib_antlr) $(objdir)rollback.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)rollback $(objdir)rollback.o $(objfile) $(library) $(openmp) -L./api/http/cpp/lib -lgstoreconnector $(library) ${ldl} - -$(testdir)update_test: $(lib_antlr) $(objdir)update_test.o $(objfile) - $(CXX) $(EXEFLAG) -o $(testdir)update_test $(objdir)update_test.o $(objfile) $(library) $(openmp) ${ldl} - -$(testdir)dataset_test: $(lib_antlr) $(objdir)dataset_test.o $(objfile) - $(CXX) $(EXEFLAG) -o $(testdir)dataset_test $(objdir)dataset_test.o $(objfile) $(library) $(openmp) ${ldl} - -$(testdir)transaction_test: $(lib_antlr) $(objdir)transaction_test.o $(objfile) - $(CXX) $(EXEFLAG) -o $(testdir)transaction_test $(objdir)transaction_test.o $(objfile) $(library) $(openmp) ${ldl} - -$(testdir)run_transaction: $(lib_antlr) $(objdir)run_transaction.o $(objfile) - $(CXX) $(EXEFLAG) -o $(testdir)run_transaction $(objdir)run_transaction.o $(objfile) $(library) $(openmp) -L./api/http/cpp/lib -lgstoreconnector $(library) ${ldl} - -$(testdir)workload: $(lib_antlr) $(objdir)workload.o $(objfile) - $(CXX) $(EXEFLAG) -o $(testdir)workload $(objdir)workload.o $(objfile) $(library) $(openmp) ${ldl} - -$(testdir)debug_test: $(lib_antlr) $(objdir)debug_test.o $(objfile) - $(CXX) $(EXEFLAG) -o $(testdir)debug_test $(objdir)debug_test.o $(objfile) $(library) $(openmp) ${ldl} - -$(exedir)gconsole: $(lib_antlr) $(objdir)gconsole.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gconsole $(objdir)gconsole.o $(objfile) $(library) $(openmp) ${ldl} - -#executables end - - -#objects in Main/ begin - -$(objdir)gexport.o: Main/gexport.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gexport.cpp $(inc) $(inc_log) -o $(objdir)gexport.o $(openmp) - -$(objdir)gdrop.o: Main/gdrop.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gdrop.cpp $(inc) $(inc_log) -o $(objdir)gdrop.o $(openmp) - -$(objdir)ginit.o: Main/ginit.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/ginit.cpp $(inc) $(inc_log) -o $(objdir)ginit.o $(openmp) - -$(objdir)shutdown.o: Main/shutdown.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/shutdown.cpp $(inc) $(inc_log) -o $(objdir)shutdown.o $(openmp) - -$(objdir)gmonitor.o: Main/gmonitor.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gmonitor.cpp $(inc) $(inc_log) -o $(objdir)gmonitor.o $(openmp) - -$(objdir)gshow.o: Main/gshow.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gshow.cpp $(inc) $(inc_log) -o $(objdir)gshow.o $(openmp) - -$(objdir)gbuild.o: Main/gbuild.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gbuild.cpp $(inc) $(inc_log) -o $(objdir)gbuild.o $(openmp) - -$(objdir)gquery.o: Main/gquery.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gquery.cpp $(inc) $(inc_log) -o $(objdir)gquery.o $(openmp) #-DREADLINE_ON - #add -DREADLINE_ON if using readline - -$(objdir)gserver.o: Main/gserver.cpp Server/Server.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gserver.cpp $(inc) $(inc_log) -o $(objdir)gserver.o $(openmp) - -$(objdir)gserver_backup_scheduler.o: Main/gserver_backup_scheduler.cpp Server/Server.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gserver_backup_scheduler.cpp $(inc) $(inc_log) -o $(objdir)gserver_backup_scheduler.o $(openmp) - -$(objdir)ghttp.o: Main/ghttp.cpp Server/server_http.hpp Server/client_http.hpp Server/MultipartParser.hpp Database/Database.h Database/Txn_manager.h Util/Util.h Util/IPWhiteList.h Util/IPBlackList.h Util/CompressFileUtil.h $(lib_antlr) Util/INIParser.h Util/WebUrl.h GRPC/APIUtil.h - $(CXX) $(CFLAGS) Main/ghttp.cpp $(inc) $(inc_log) $(inc_zlib) -o $(objdir)ghttp.o $(def64IO) $(openmp) - -#$(objdir)gapiserver.o: Main/gapiserver.cpp Database/Database.h Database/Txn_manager.h Util/Util.h Util/Util_New.h Util/IPWhiteList.h Util/IPBlackList.h Util/WebUrl.h $(lib_antlr) $(lib_workflow) -# $(CXX) $(CFLAGS) Main/gapiserver.cpp $(inc) $(inc_workflow) -o $(objdir)gapiserver.o $(openmp) - -$(objdir)grpc.o: Main/grpc.cpp GRPC/grpc_server.h GRPC/grpc_status_code.h GRPC/grpc_operation.h GRPC/APIUtil.h Util/CompressFileUtil.h Database/Database.h Database/Txn_manager.h Util/Util.h $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) Main/grpc.cpp $(inc) $(inc_log) $(inc_rpc) $(inc_zlib) -o $(objdir)grpc.o $(def64IO) $(openmp) - -$(objdir)gbackup.o: Main/gbackup.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gbackup.cpp $(inc) $(inc_log) -o $(objdir)gbackup.o $(openmp) - -$(objdir)grestore.o: Main/grestore.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/grestore.cpp $(inc) $(inc_log) -o $(objdir)grestore.o $(openmp) - -$(objdir)gpara.o: Main/gpara.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gpara.cpp $(inc) $(inc_log) -o $(objdir)gpara.o $(openmp) - -$(objdir)rollback.o: Main/rollback.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/rollback.cpp $(inc) $(inc_log) -o $(objdir)rollback.o $(openmp) - -$(objdir)gconsole.o: Main/gconsole.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) Main/gconsole.cpp $(inc) $(inc_log) -o $(objdir)gconsole.o $(openmp) -#objects in Main/ end - -#objects in scripts/ begin - -$(objdir)update_test.o: $(testdir)update_test.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) $(testdir)update_test.cpp $(inc) -o $(objdir)update_test.o $(openmp) - -$(objdir)dataset_test.o: $(testdir)dataset_test.cpp Database/Database.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) $(testdir)dataset_test.cpp $(inc) -o $(objdir)dataset_test.o $(openmp) - -$(objdir)transaction_test.o: $(testdir)transaction_test.cpp Database/Database.h Database/Txn_manager.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) $(testdir)transaction_test.cpp $(inc) -o $(objdir)transaction_test.o $(openmp) - -$(objdir)run_transaction.o: $(testdir)run_transaction.cpp Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) $(testdir)run_transaction.cpp $(inc) $(inc_log) -o $(objdir)run_transaction.o $(openmp) - -$(objdir)workload.o: $(testdir)workload.cpp Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) $(testdir)workload.cpp $(inc) -o $(objdir)workload.o $(openmp) - -$(objdir)debug_test.o: $(testdir)debug_test.cpp Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) $(testdir)debug_test.cpp $(inc) -o $(objdir)debug_test.o $(openmp) - -#objects in scripts/ end - - -#objects in kvstore/ begin - -#objects in sitree/ begin -$(objdir)SITree.o: KVstore/SITree/SITree.cpp KVstore/SITree/SITree.h $(filter $(FIRST_BUILD),$(objdir)Stream.o) - @echo $(FAST_DEPENDENCY_FLAG) - $(CXX) $(CFLAGS) KVstore/SITree/SITree.cpp $(inc_log) -o $(objdir)SITree.o $(openmp) - -$(objdir)SIStorage.o: KVstore/SITree/storage/SIStorage.cpp KVstore/SITree/storage/SIStorage.h $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) KVstore/SITree/storage/SIStorage.cpp $(inc_log) -o $(objdir)SIStorage.o $(def64IO) $(openmp) - -$(objdir)SINode.o: KVstore/SITree/node/SINode.cpp KVstore/SITree/node/SINode.h $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) KVstore/SITree/node/SINode.cpp $(inc_log) -o $(objdir)SINode.o $(openmp) - -$(objdir)SIIntlNode.o: KVstore/SITree/node/SIIntlNode.cpp KVstore/SITree/node/SIIntlNode.h - $(CXX) $(CFLAGS) KVstore/SITree/node/SIIntlNode.cpp $(inc_log) -o $(objdir)SIIntlNode.o $(openmp) - -$(objdir)SILeafNode.o: KVstore/SITree/node/SILeafNode.cpp KVstore/SITree/node/SILeafNode.h - $(CXX) $(CFLAGS) KVstore/SITree/node/SILeafNode.cpp $(inc_log) -o $(objdir)SILeafNode.o $(openmp) - -$(objdir)SIHeap.o: KVstore/SITree/heap/SIHeap.cpp KVstore/SITree/heap/SIHeap.h $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) KVstore/SITree/heap/SIHeap.cpp $(inc_log) -o $(objdir)SIHeap.o $(openmp) -#objects in sitree/ end - -#objects in isarray/ begin -$(objdir)ISArray.o: KVstore/ISArray/ISArray.cpp KVstore/ISArray/ISArray.h $(filter $(FIRST_BUILD),$(objdir)VList.o) - $(CXX) $(CFLAGS) KVstore/ISArray/ISArray.cpp $(inc_log) -o $(objdir)ISArray.o - -$(objdir)ISBlockManager.o: KVstore/ISArray/ISBlockManager.cpp KVstore/ISArray/ISBlockManager.h - $(CXX) $(CFLAGS) KVstore/ISArray/ISBlockManager.cpp $(inc_log) -o $(objdir)ISBlockManager.o - -$(objdir)ISEntry.o: KVstore/ISArray/ISEntry.cpp KVstore/ISArray/ISEntry.h - $(CXX) $(CFLAGS) KVstore/ISArray/ISEntry.cpp $(inc_log) -o $(objdir)ISEntry.o -#objects in isarray/ end - -#objects in ivarray/ begin -$(objdir)IVArray.o: KVstore/IVArray/IVArray.cpp KVstore/IVArray/IVArray.h $(filter $(FIRST_BUILD),$(objdir)VList.o) \ - $(filter $(FIRST_BUILD),$(objdir)Transaction.o) - $(CXX) $(CFLAGS) KVstore/IVArray/IVArray.cpp $(inc_log) -o $(objdir)IVArray.o - -$(objdir)IVBlockManager.o: KVstore/IVArray/IVBlockManager.cpp KVstore/IVArray/IVBlockManager.h - $(CXX) $(CFLAGS) KVstore/IVArray/IVBlockManager.cpp $(inc_log) -o $(objdir)IVBlockManager.o - -$(objdir)IVEntry.o: KVstore/IVArray/IVEntry.cpp KVstore/IVArray/IVEntry.h $(filter $(FIRST_BUILD),$(objdir)Version.o) \ - $(filter $(FIRST_BUILD),$(objdir)GraphLock.o) - $(CXX) $(CFLAGS) KVstore/IVArray/IVEntry.cpp $(inc_log) -o $(objdir)IVEntry.o - -#objects in ivarray/ end - -$(objdir)KVstore.o: KVstore/KVstore.cpp KVstore/KVstore.h KVstore/Tree.h - $(CXX) $(CFLAGS) KVstore/KVstore.cpp $(inc) $(inc_log) -o $(objdir)KVstore.o $(openmp) - -#objects in kvstore/ end - - -#objects in Database/ begin - - -$(objdir)Database.o: Database/Database.cpp Database/Database.h $(filter $(FIRST_BUILD),$(objdir)RDFParser.o) \ - $(filter $(FIRST_BUILD),$(objdir)GeneralEvaluation.o) $(filter $(FIRST_BUILD),$(objdir)StringIndex.o) \ - $(filter $(FIRST_BUILD),$(objdir)Transaction.o) - $(CXX) $(CFLAGS) Database/Database.cpp $(inc) $(inc_log) -o $(objdir)Database.o $(openmp) - -$(objdir)Join.o: Database/Join.cpp Database/Join.h $(filter $(FIRST_BUILD),$(objdir)IDList.o) \ - $(filter $(FIRST_BUILD),$(objdir)KVstore.o) $(filter $(FIRST_BUILD),$(objdir)SPARQLquery.o) $(filter $(FIRST_BUILD),$(objdir)Transaction.o) - $(CXX) $(CFLAGS) Database/Join.cpp $(inc) $(inc_log) -o $(objdir)Join.o $(openmp) - -$(objdir)CSR.o: Database/CSR.cpp Database/CSR.h - $(CXX) $(CFLAGS) Database/CSR.cpp $(inc) -o $(objdir)CSR.o $(openmp) - $(CXX) -std=c++11 -fPIC -shared Database/CSR.cpp -o lib/libgcsr.so - -$(objdir)TableOperator.o: Database/TableOperator.cpp Database/TableOperator.h $(filter $(FIRST_BUILD),$(objdir)BGPQuery.o) - $(CXX) $(CFLAGS) Database/TableOperator.cpp $(inc) $(inc_log) -o $(objdir)TableOperator.o $(openmp) - -#$(objdir)ResultTrigger.o: Database/ResultTrigger.cpp Database/ResultTrigger.h $(objdir)Util.o -# $(CXX) $(CFLAGS) Database/ResultTrigger.cpp $(inc) -o $(objdir)ResultTrigger.o $(openmp) - -$(objdir)PlanTree.o: Database/PlanTree.cpp Database/PlanTree.h $(filter $(FIRST_BUILD),$(objdir)TableOperator.o) - $(CXX) $(CFLAGS) Database/PlanTree.cpp $(inc) $(inc_log) -o $(objdir)PlanTree.o $(openmp) - -$(objdir)PlanGenerator.o: Database/PlanGenerator.cpp Database/PlanGenerator.h \ - $(filter $(FIRST_BUILD),$(objdir)IDList.o) $(filter $(FIRST_BUILD),$(objdir)PlanTree.o) \ - $(filter $(FIRST_BUILD),$(objdir)OrderedVector.o) - $(CXX) $(CFLAGS) Database/PlanGenerator.cpp $(inc) $(inc_log) -o $(objdir)PlanGenerator.o $(openmp) - -$(objdir)Executor.o: Database/Executor.cpp Database/Executor.h $(filter $(FIRST_BUILD),$(objdir)IDList.o) \ - $(filter $(FIRST_BUILD),$(objdir)Join.o) $(filter $(FIRST_BUILD),$(objdir)Transaction.o) \ - $(filter $(FIRST_BUILD),$(objdir)TableOperator.o) $(filter $(FIRST_BUILD), $(objdir)DPBTopKUtil.o) - $(CXX) $(CFLAGS) Database/Executor.cpp $(inc) $(inc_log) -o $(objdir)Executor.o $(openmp) ${ldl} - -$(objdir)Optimizer.o: Database/Optimizer.cpp Database/Optimizer.h Database/OptimizerDebug.h \ - $(filter $(FIRST_BUILD), $(objdir)Executor.o) $(filter $(FIRST_BUILD),$(objdir)DFSPlan.o) \ - $(filter $(FIRST_BUILD),$(objdir)PlanGenerator.o) $(filter $(FIRST_BUILD),$(objdir)DPBTopKUtil.o) \ - $(filter $(FIRST_BUILD),$(objdir)FilterPlan.o) - $(CXX) $(CFLAGS) Database/Optimizer.cpp $(inc) $(inc_log) -o $(objdir)Optimizer.o $(openmp) ${ldl} - -$(objdir)Txn_manager.o: Database/Txn_manager.cpp Database/Txn_manager.h $(filter $(FIRST_BUILD),$(objdir)Util.o) \ - $(filter $(FIRST_BUILD),$(objdir)Transaction.o) $(filter $(FIRST_BUILD),$(objdir)Database.o) - $(CXX) $(CFLAGS) Database/Txn_manager.cpp $(inc) $(inc_log) -o $(objdir)Txn_manager.o $(openmp) - -#objects in Database/ end - - -#objects in Query/ begin - -$(objdir)IDList.o: Query/IDList.cpp Query/IDList.h - $(CXX) $(CFLAGS) Query/IDList.cpp $(inc) $(inc_log) -o $(objdir)IDList.o $(openmp) - -$(objdir)SPARQLquery.o: Query/SPARQLquery.cpp Query/SPARQLquery.h $(filter $(FIRST_BUILD),$(objdir)BasicQuery.o) - $(CXX) $(CFLAGS) Query/SPARQLquery.cpp $(inc) $(inc_log) -o $(objdir)SPARQLquery.o $(openmp) - -$(objdir)BasicQuery.o: Query/BasicQuery.cpp Query/BasicQuery.h - $(CXX) $(CFLAGS) Query/BasicQuery.cpp $(inc) $(inc_log) -o $(objdir)BasicQuery.o $(openmp) - -$(objdir)ResultSet.o: Query/ResultSet.cpp Query/ResultSet.h $(filter $(FIRST_BUILD),$(objdir)Stream.o) - $(CXX) $(CFLAGS) Query/ResultSet.cpp $(inc) $(inc_log) -o $(objdir)ResultSet.o $(openmp) - -$(objdir)Varset.o: Query/Varset.cpp Query/Varset.h - $(CXX) $(CFLAGS) Query/Varset.cpp $(inc) $(inc_log) -o $(objdir)Varset.o $(openmp) - -$(objdir)DFSPlan.o: Query/DFSPlan.cpp Query/DFSPlan.h $(filter $(FIRST_BUILD),$(objdir)TableOperator.o) - $(CXX) $(CFLAGS) Query/DFSPlan.cpp $(inc) $(inc_log) -o $(objdir)DFSPlan.o $(openmp) - -$(objdir)QueryTree.o: Query/QueryTree.cpp Query/QueryTree.h $(filter $(FIRST_BUILD),$(objdir)Varset.o) - $(CXX) $(CFLAGS) Query/QueryTree.cpp $(inc) $(inc_log) -o $(objdir)QueryTree.o $(openmp) - -$(objdir)TempResult.o: Query/TempResult.cpp Query/TempResult.h Query/RegexExpression.h \ - $(filter $(FIRST_BUILD),$(objdir)StringIndex.o) $(filter $(FIRST_BUILD),$(objdir)QueryTree.o) \ - $(filter $(FIRST_BUILD),$(objdir)EvalMultitypeValue.o) - $(CXX) $(CFLAGS) Query/TempResult.cpp $(inc) $(inc_log) -o $(objdir)TempResult.o $(openmp) - -$(objdir)QueryCache.o: Query/QueryCache.cpp Query/QueryCache.h $(filter $(FIRST_BUILD),$(objdir)TempResult.o) - $(CXX) $(CFLAGS) Query/QueryCache.cpp $(inc) $(inc_log) -o $(objdir)QueryCache.o $(openmp) - -$(objdir)PathQueryHandler.o: Query/PathQueryHandler.cpp Query/PathQueryHandler.h $(filter $(FIRST_BUILD),$(objdir)CSR.o) - $(CXX) $(CFLAGS) Query/PathQueryHandler.cpp $(inc) -o $(objdir)PathQueryHandler.o $(openmp) ${ldl} - $(CXX) -std=c++11 -fPIC -shared Query/PathQueryHandler.cpp -o lib/libgpathqueryhandler.so lib/libgcsr.so - -$(objdir)BGPQuery.o: Query/BGPQuery.cpp Query/BGPQuery.h $(filter $(FIRST_BUILD),$(objdir)Util.o) \ - $(filter $(FIRST_BUILD),$(objdir)Triple.o) $(filter $(FIRST_BUILD),$(objdir)KVstore.o) - $(CXX) $(CFLAGS) Query/BGPQuery.cpp $(inc) $(inc_log) -o $(objdir)BGPQuery.o $(openmp) - -$(objdir)FilterPlan.o: Query/FilterPlan.cpp Query/FilterPlan.h $(filter $(FIRST_BUILD),$(objdir)TableOperator.o) - $(CXX) $(CFLAGS) Query/FilterPlan.cpp $(inc) $(inc_log) -o $(objdir)FilterPlan.o $(openmp) - -#objects in Query/topk/ begin - -$(objdir)Pool.o: Query/topk/DPB/Pool.cpp Query/topk/DPB/Pool.h $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) Query/topk/DPB/Pool.cpp $(inc) $(inc_log) -o $(objdir)Pool.o $(openmp) - -$(objdir)DynamicTrie.o: Query/topk/DPB/DynamicTrie.cpp Query/topk/DPB/DynamicTrie.h \ - $(filter $(FIRST_BUILD),$(objdir)Util.o) $(filter $(FIRST_BUILD),$(objdir)Pool.o) - $(CXX) $(CFLAGS) Query/topk/DPB/DynamicTrie.cpp $(inc) $(inc_log) -o $(objdir)DynamicTrie.o $(openmp) - -$(objdir)OrderedList.o: Query/topk/DPB/OrderedList.cpp Query/topk/DPB/OrderedList.h \ - $(filter $(FIRST_BUILD),$(objdir)Util.o) $(filter $(FIRST_BUILD),$(objdir)Pool.o) \ - $(filter $(FIRST_BUILD),$(objdir)DynamicTrie.o) - $(CXX) $(CFLAGS) Query/topk/DPB/OrderedList.cpp $(inc) $(inc_log) -o $(objdir)OrderedList.o $(openmp) - -$(objdir)TopKSearchPlan.o: Query/topk/TopKSearchPlan.cpp Query/topk/TopKSearchPlan.h \ - $(filter $(FIRST_BUILD),$(objdir)OrderedList.o) $(filter $(FIRST_BUILD),$(objdir)QueryTree.o) $(filter $(FIRST_BUILD),$(objdir)PlanGenerator.o) - $(CXX) $(CFLAGS) Query/topk/TopKSearchPlan.cpp $(inc) $(inc_log) -o $(objdir)TopKSearchPlan.o $(openmp) - -$(objdir)TopKUtil.o: Query/topk/TopKUtil.cpp Query/topk/TopKUtil.h $(filter $(FIRST_BUILD),$(objdir)TopKSearchPlan.o) - $(CXX) $(CFLAGS) Query/topk/TopKUtil.cpp $(inc) $(inc_log) -o $(objdir)TopKUtil.o $(openmp) - -$(objdir)DPBTopKUtil.o: Query/topk/DPBTopKUtil.cpp Query/topk/DPBTopKUtil.h $(filter $(FIRST_BUILD),$(objdir)TopKUtil.o) - $(CXX) $(CFLAGS) Query/topk/DPBTopKUtil.cpp $(inc) $(inc_log) -o $(objdir)DPBTopKUtil.o $(openmp) - -#objects in Query/topk/ end - - -#no more using $(objdir)Database.o -$(objdir)GeneralEvaluation.o: Query/GeneralEvaluation.cpp Query/GeneralEvaluation.h Query/RegexExpression.h \ - $(filter $(FIRST_BUILD),$(objdir)StringIndex.o) $(filter $(FIRST_BUILD),$(objdir)QueryParser.o) \ - $(filter $(FIRST_BUILD),$(objdir)EvalMultitypeValue.o) $(filter $(FIRST_BUILD),$(objdir)SPARQLquery.o) \ - $(filter $(FIRST_BUILD),$(objdir)QueryCache.o) $(filter $(FIRST_BUILD),$(objdir)ResultSet.o) \ - $(filter $(FIRST_BUILD),$(objdir)PathQueryHandler.o) $(filter $(FIRST_BUILD),$(objdir)Optimizer.o) - $(CXX) $(CFLAGS) Query/GeneralEvaluation.cpp $(inc) $(inc_log) -o $(objdir)GeneralEvaluation.o $(openmp) ${ldl} - -#objects in Query/ end - - -#objects in Signature/ begin - -#$(objdir)SigEntry.o: Signature/SigEntry.cpp Signature/SigEntry.h $(objdir)Signature.o -# $(CXX) $(CFLAGS) Signature/SigEntry.cpp $(inc) -o $(objdir)SigEntry.o $(openmp) -# -#$(objdir)Signature.o: Signature/Signature.cpp Signature/Signature.h -# $(CXX) $(CFLAGS) Signature/Signature.cpp $(inc) -o $(objdir)Signature.o $(openmp) - -#objects in Signature/ end - - -#objects in Util/ begin - -$(objdir)Util.o: Util/Util.cpp Util/Util.h $(objdir)Slog.o - $(CXX) $(CFLAGS) Util/Util.cpp $(inc_log) -o $(objdir)Util.o $(openmp) - -$(objdir)WebUrl.o: Util/WebUrl.cpp Util/WebUrl.h - $(CXX) $(CFLAGS) Util/WebUrl.cpp -o $(objdir)WebUrl.o $(openmp) - - - -$(objdir)INIParser.o: Util/INIParser.cpp Util/INIParser.h - $(CXX) $(CFLAGS) Util/INIParser.cpp -o $(objdir)INIParser.o $(openmp) - -$(objdir)Slog.o: Util/Slog.cpp Util/Slog.h $(lib_log) - $(CXX) $(CFLAGS) Util/Slog.cpp $(inc_log) -o $(objdir)Slog.o $(openmp) - -#$(objdir)grpc.srpc.o: GRPC/grpc.srpc.h $(lib_workflow) -# $(CXX) $(CFLAGS) GRPC/grpc.srpc.h -o $(objdir)grpc.srpc.o $(openmp) - -$(objdir)Stream.o: Util/Stream.cpp Util/Stream.h $(filter $(FIRST_BUILD),$(objdir)Util.o) $(filter $(FIRST_BUILD),$(objdir)Bstr.o) - $(CXX) $(CFLAGS) Util/Stream.cpp $(inc_log) -o $(objdir)Stream.o $(def64IO) $(openmp) - -$(objdir)Bstr.o: Util/Bstr.cpp Util/Bstr.h $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) Util/Bstr.cpp $(inc_log) -o $(objdir)Bstr.o $(openmp) - -$(objdir)Triple.o: Util/Triple.cpp Util/Triple.h $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) Util/Triple.cpp $(inc_log) -o $(objdir)Triple.o $(openmp) - -$(objdir)VList.o: Util/VList.cpp Util/VList.h - $(CXX) $(CFLAGS) Util/VList.cpp $(inc_log) -o $(objdir)VList.o $(openmp) - -$(objdir)EvalMultitypeValue.o: Util/EvalMultitypeValue.cpp Util/EvalMultitypeValue.h - $(CXX) $(CFLAGS) Util/EvalMultitypeValue.cpp $(inc_log) -o $(objdir)EvalMultitypeValue.o $(openmp) - -$(objdir)Version.o: Util/Version.cpp Util/Version.h - $(CXX) $(CFLAGS) Util/Version.cpp $(inc_log) -o $(objdir)Version.o $(openmp) - -$(objdir)SpinLock.o: Util/SpinLock.h Util/SpinLock.cpp - $(CXX) $(CFLAGS) Util/SpinLock.cpp -o $(objdir)SpinLock.o $(openmp) - -$(objdir)GraphLock.o: Util/GraphLock.h Util/GraphLock.cpp - $(CXX) $(CFLAGS) Util/GraphLock.cpp -o $(objdir)GraphLock.o $(openmp) - -$(objdir)Transaction.o: Util/Transaction.cpp Util/Transaction.h $(filter $(FIRST_BUILD),$(objdir)Util.o) $(filter $(FIRST_BUILD),$(objdir)IDTriple.o) - $(CXX) $(CFLAGS) Util/Transaction.cpp $(inc) $(inc_log) -o $(objdir)Transaction.o $(openmp) - -$(objdir)IDTriple.o: Util/IDTriple.cpp Util/IDTriple.h - $(CXX) $(CFLAGS) Util/IDTriple.cpp $(inc_log) -o $(objdir)IDTriple.o $(openmp) - -$(objdir)Latch.o: Util/Latch.cpp Util/Latch.h - $(CXX) $(CFLAGS) Util/Latch.cpp -o $(objdir)Latch.o $(openmp) - -$(objdir)IPWhiteList.o: Util/IPWhiteList.cpp Util/IPWhiteList.h $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) Util/IPWhiteList.cpp $(inc_log) -o $(objdir)IPWhiteList.o $(def64IO) $(openmp) - -$(objdir)IPBlackList.o: Util/IPBlackList.cpp Util/IPBlackList.h $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) Util/IPBlackList.cpp $(inc_log) -o $(objdir)IPBlackList.o $(def64IO) $(openmp) - -$(objdir)OrderedVector.o: Util/OrderedVector.cpp Util/OrderedVector.h - $(CXX) $(CFLAGS) Util/OrderedVector.cpp -o $(objdir)OrderedVector.o $(openmp) - -#objects in util/ end - - -#objects in VSTree/ begin - -#$(objdir)VSTree.o: VSTree/VSTree.cpp VSTree/VSTree.h $(objdir)EntryBuffer.o $(objdir)LRUCache.o $(objdir)VNode.o -# $(CXX) $(CFLAGS) VSTree/VSTree.cpp $(inc) -o $(objdir)VSTree.o $(def64IO) $(openmp) -# -#$(objdir)EntryBuffer.o: VSTree/EntryBuffer.cpp VSTree/EntryBuffer.h Signature/SigEntry.h -# $(CXX) $(CFLAGS) VSTree/EntryBuffer.cpp $(inc) -o $(objdir)EntryBuffer.o $(def64IO) $(openmp) -# -#$(objdir)LRUCache.o: VSTree/LRUCache.cpp VSTree/LRUCache.h VSTree/VNode.h -# $(CXX) $(CFLAGS) VSTree/LRUCache.cpp $(inc) -o $(objdir)LRUCache.o $(def64IO) $(openmp) -# -#$(objdir)VNode.o: VSTree/VNode.cpp VSTree/VNode.h -# $(CXX) $(CFLAGS) VSTree/VNode.cpp $(inc) -o $(objdir)VNode.o $(def64IO) $(openmp) - -#objects in VSTree/ end - - -#objects in StringIndex/ begin -$(objdir)StringIndex.o: StringIndex/StringIndex.cpp StringIndex/StringIndex.h \ - $(filter $(FIRST_BUILD),$(objdir)KVstore.o) $(filter $(FIRST_BUILD),$(objdir)Util.o) - $(CXX) $(CFLAGS) StringIndex/StringIndex.cpp $(inc) $(inc_log) -o $(objdir)StringIndex.o $(def64IO) $(openmp) -#objects in StringIndex/ end - - -#objects in Parser/ begin - -$(objdir)SPARQLParser.o: Parser/SPARQL/SPARQLParser.cpp Parser/SPARQL/SPARQLParser.h - $(CXX) $(CFLAGS) Parser/SPARQL/SPARQLParser.cpp $(inc) -o $(objdir)SPARQLParser.o $(openmp) - -$(objdir)SPARQLLexer.o: Parser/SPARQL/SPARQLLexer.cpp Parser/SPARQL/SPARQLLexer.h - $(CXX) $(CFLAGS) Parser/SPARQL/SPARQLLexer.cpp $(inc) -o $(objdir)SPARQLLexer.o $(openmp) - -$(objdir)TurtleParser.o: Parser/TurtleParser.cpp Parser/TurtleParser.h Parser/Type.h - $(CXX) $(CFLAGS) Parser/TurtleParser.cpp $(inc) $(inc_log) -o $(objdir)TurtleParser.o $(openmp) - -$(objdir)RDFParser.o: Parser/RDFParser.cpp Parser/RDFParser.h $(filter $(FIRST_BUILD),$(objdir)TurtleParser.o) $(filter $(FIRST_BUILD),$(objdir)Triple.o) - $(CXX) $(CFLAGS) Parser/RDFParser.cpp $(inc) $(inc_log) -o $(objdir)RDFParser.o $(openmp) - -$(objdir)QueryParser.o: Parser/QueryParser.cpp Parser/QueryParser.h $(filter $(FIRST_BUILD),$(objdir)SPARQLParser.o) \ - $(filter $(FIRST_BUILD),$(objdir)SPARQLLexer.o) $(filter $(FIRST_BUILD),$(objdir)QueryTree.o) - $(CXX) $(CFLAGS) Parser/QueryParser.cpp $(inc) $(inc_log) -o $(objdir)QueryParser.o $(openmp) - -#objects in Parser/ end - -#objects in Trie/ begin - -$(objdir)TrieNode.o: Trie/TrieNode.cpp Trie/TrieNode.h - $(CXX) $(CFLAGS) Trie/TrieNode.cpp -o $(objdir)TrieNode.o - -$(objdir)Trie.o: Trie/Trie.cpp Trie/Trie.h $(filter $(FIRST_BUILD),$(objdir)TrieNode.o) $(filter $(FIRST_BUILD),$(objdir)Triple.o) $(filter $(FIRST_BUILD),$(objdir)RDFParser.o) - $(CXX) $(CFLAGS) Trie/Trie.cpp $(inc) $(inc_log) -o $(objdir)Trie.o - -#objects in Server/ begin - -$(objdir)Operation.o: Server/Operation.cpp Server/Operation.h - $(CXX) $(CFLAGS) Server/Operation.cpp $(inc) $(inc_log) -o $(objdir)Operation.o $(openmp) - -$(objdir)Socket.o: Server/Socket.cpp Server/Socket.h - $(CXX) $(CFLAGS) Server/Socket.cpp $(inc) $(inc_log) -o $(objdir)Socket.o $(openmp) - -$(objdir)Server.o: Server/Server.cpp Server/Server.h $(filter $(FIRST_BUILD),$(objdir)Socket.o) \ - $(filter $(FIRST_BUILD),$(objdir)Database.o) $(filter $(FIRST_BUILD),$(objdir)Operation.o) - $(CXX) $(CFLAGS) Server/Server.cpp $(inc) $(inc_log) -o $(objdir)Server.o $(openmp) - -$(objdir)CompressFileUtil.o: Util/CompressFileUtil.cpp Util/CompressFileUtil.h Util/Util.h - $(CXX) $(CFLAGS) Util/CompressFileUtil.cpp $(inc) $(inc_log) $(inc_zlib) -o $(objdir)CompressFileUtil.o $(def64IO) $(openmp) - -# $(objdir)client_http.o: Server/client_http.hpp -# $(CXX) $(CFLAGS) Server/client_http.hpp $(inc) -o $(objdir)client_http.o - -# $(objdir)server_http.o: Server/server_http.hpp -# $(CXX) $(CFLAGS) Server/server_http.hpp $(inc) -o $(objdir)server_http.o - -#objects in Server/ end - -#objects in GRPC/ begin - -$(objdir)APIUtil.o: GRPC/APIUtil.cpp GRPC/APIUtil.h Database/Database.h Database/Txn_manager.h Util/Util.h $(lib_antlr) - $(CXX) $(CFLAGS) GRPC/APIUtil.cpp $(inc) $(inc_log) -o $(objdir)APIUtil.o $(def64IO) $(openmp) - -$(objdir)grpc_status_code.o: GRPC/grpc_status_code.cpp GRPC/grpc_status_code.h $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) GRPC/grpc_status_code.cpp $(inc) $(inc_rpc) -o $(objdir)grpc_status_code.o $(def64IO) $(openmp) - -$(objdir)grpc_multipart_parser.o: GRPC/grpc_multipart_parser.cpp GRPC/grpc_multipart_parser.h $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) GRPC/grpc_multipart_parser.cpp $(inc) $(inc_rpc) -o $(objdir)grpc_multipart_parser.o $(def64IO) $(openmp) - -$(objdir)grpc_content.o: GRPC/grpc_content.cpp GRPC/grpc_content.h GRPC/grpc_stringpiece.h $(objdir)grpc_multipart_parser.o $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) GRPC/grpc_content.cpp $(inc) $(inc_rpc) -o $(objdir)grpc_content.o $(def64IO) $(openmp) - -$(objdir)grpc_message.o: GRPC/grpc_message.cpp GRPC/grpc_message.h GRPC/grpc_noncopyable.h $(objdir)grpc_content.o $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) GRPC/grpc_message.cpp $(inc) $(inc_rpc) $(inc_log) -o $(objdir)grpc_message.o $(def64IO) $(openmp) - -$(objdir)grpc_server_task.o: GRPC/grpc_server_task.cpp GRPC/grpc_server_task.h $(objdir)grpc_message.o GRPC/grpc_noncopyable.h $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) GRPC/grpc_server_task.cpp $(inc) $(inc_rpc) $(inc_log) -o $(objdir)grpc_server_task.o $(def64IO) $(openmp) - -$(objdir)grpc_routetable.o: GRPC/grpc_routetable.cpp GRPC/grpc_routetable.h GRPC/grpc_request_handler.h GRPC/grpc_noncopyable.h GRPC/grpc_stringpiece.h $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) GRPC/grpc_routetable.cpp $(inc) $(inc_rpc) $(inc_log) -o $(objdir)grpc_routetable.o $(def64IO) $(openmp) - -$(objdir)grpc_router.o: GRPC/grpc_router.cpp GRPC/grpc_router.h $(objdir)grpc_routetable.o GRPC/grpc_noncopyable.h $(objdir)grpc_server_task.o $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) GRPC/grpc_router.cpp $(inc) $(inc_rpc) $(inc_log) -o $(objdir)grpc_router.o $(def64IO) $(openmp) - -$(objdir)grpc_server.o: GRPC/grpc_server.cpp GRPC/grpc_server.h $(objdir)grpc_message.o $(objdir)grpc_router.o $(lib_antlr) $(lib_rpc) - $(CXX) $(CFLAGS) GRPC/grpc_server.cpp $(inc) $(inc_rpc) $(inc_log) -o $(objdir)grpc_server.o $(def64IO) $(openmp) - -#objects in GRPC/ end - -# your gcc g++ v5.4 path -# in ./bashrc CXX should be gcc, otherwise, make pre2 will error -# see https://blog.csdn.net/weixin_34268610/article/details/89085852 -#pre1:export CXX=/usr/local/gcc-5.4.0/bin/gcc -#pre1:export CXX=/usr/local/gcc-5.4.0/bin/g++ -#pre1: -# cd tools; tar -xvf log4cplus-1.2.0.tar;cd log4cplus-1.2.0;./configure;make;sudo make install; - -pre: - rm -rf tools/rapidjson/ - rm -rf tools/antlr4-cpp-runtime-4/ - rm -rf tools/workflow - rm -rf tools/log4cplus - rm -rf tools/indicators - rm -rf tools/zlib-1.3 - rm -rf lib/libantlr4-runtime.a lib/libworkflow.a lib/liblog4cplus.a - rm -rf lib/libminizip.a; - cd tools; tar -xzvf rapidjson.tar.gz; - cd tools; tar -xzvf antlr4-cpp-runtime-4.tar.gz; - cd tools; tar -xvf indicators.tar; - cd tools; tar -xzvf workflow-0.10.3.tar.gz; - cd tools; tar -xzvf log4cplus-2.0.8.tar.gz; - cd tools; tar -xzvf zlib-1.3.tar.gz; - cd tools/antlr4-cpp-runtime-4/; cmake .; make; cp dist/libantlr4-runtime.a ../../lib/; - cd tools/workflow; make; cp _lib/libworkflow.a ../../lib/; - cd tools/log4cplus; ./configure --enable-static; make; cp .libs/liblog4cplus.a ../../lib/; - cd tools/zlib-1.3; ./configure; make; cp *.h ./include/; cd contrib/minizip; make; cp *.h ../../include/; cp libminizip.a ../../../../lib/; - -$(api_cpp): $(objdir)Socket.o - $(MAKE) -C api/http/cpp/src - -$(api_socket): $(objdir)Socket.o - $(MAKE) -C api/socket/cpp/src - - -.PHONY: clean dist tarball api_example gtest sumlines contribution test - -test: $(TARGET) - @echo "basic build/query/add/sub/drop test......" - @bash scripts/basic_test.sh - @echo "repeatedly insertion/deletion test......" - @scripts/update_test > /dev/null - @echo "parser test......" - @bash scripts/parser_test.sh - -clean: - #rm -rf lib/libantlr4-runtime.a - $(MAKE) -C api/socket/cpp/src clean - $(MAKE) -C api/socket/cpp/example clean - $(MAKE) -C api/http/cpp/src clean - $(MAKE) -C api/http/cpp/example clean - $(MAKE) -C api/http/java/src clean - $(MAKE) -C api/http/java/example clean - #$(MAKE) -C KVstore clean - rm -rf $(exedir)g* $(objdir)*.o $(exedir).gserver* $(exedir)shutdown $(exedir)rollback - rm -rf bin/*.class - rm -rf $(testdir)update_test $(testdir)dataset_test $(testdir)transaction_test $(testdir)run_transaction $(testdir)workload $(testdir)debug_test - #rm -rf .project .cproject .settings just for eclipse - rm -rf logs/*.log - rm -rf *.out # gmon.out for gprof with -pg - rm -rf lib/libgcsr.so lib/libgpathqueryhandler.so - - -dist: clean - rm -rf *.nt *.n3 .debug/*.log .tmp/*.dat *.txt *.db - rm -rf lib/libantlr4-runtime.a - rm -rf cscope* .cproject .settings tags - rm -rf *.info - rm -rf backups/*.db - -tarball: - tar -czvf gstore.tar.gz api backups bin lib tools .debug .tmp .objs scripts docs data logs \ - Main Database KVstore Util Query Signature VSTree Parser Server README.md init.conf conf.ini StringIndex COVERAGE \ - Dockerfile LICENSE makefile Trie - -APIexample: $(api_cpp) $(api_socket) - $(MAKE) -C api/http/cpp/example - $(MAKE) -C api/socket/cpp/example - -gtest: $(objdir)gtest.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gtest $(objdir)gtest.o $(objfile) lib/libantlr4-runtime.a $(library) $(openmp) - -$(objdir)gtest.o: scripts/gtest.cpp - $(CXX) $(CFLAGS) scripts/gtest.cpp $(inc) -o $(objdir)gtest.o $(openmp) - -$(exedir)gadd: $(objdir)gadd.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gadd $(objdir)gadd.o $(objfile) lib/libantlr4-runtime.a $(library) $(openmp) ${ldl} - -$(objdir)gadd.o: Main/gadd.cpp - $(CXX) $(CFLAGS) Main/gadd.cpp $(inc) $(inc_log) -o $(objdir)gadd.o $(openmp) - -#$(objdir)HttpConnector: $(objdir)HttpConnector.o $(objfile) - #$(CXX) $(CFLAGS) -o $(exedir)HttpConnector $(objdir)HttpConnector.o $(objfile) lib/libantlr4-runtime.a $(library) $(inc) - -#$(objdir)HttpConnector.o: Main/HttpConnector.cpp - #$(CXX) $(CFLAGS) Main/HttpConnector.cpp $(inc) -o $(objdir)HttpConnector.o $(library) - -$(exedir)gsub: $(objdir)gsub.o $(objfile) - $(CXX) $(EXEFLAG) -o $(exedir)gsub $(objdir)gsub.o $(objfile) lib/libantlr4-runtime.a $(library) $(openmp) ${ldl} - -$(objdir)gsub.o: Main/gsub.cpp - $(CXX) $(CFLAGS) Main/gsub.cpp $(inc) $(inc_log) -o $(objdir)gsub.o $(openmp) - -sumlines: - @bash scripts/sumline.sh - -tag: - ctags -R - -idx: - find `realpath .` -name "*.h" -o -name "*.c" -o -name "*.cpp" > cscope.files - cscope -bkq #-i cscope.files - -cover: - bash scripts/cover.sh - -fulltest: - #NOTICE:compile gstore with -O2 only - #setup new virtuoso and configure it - cp scripts/full_test.sh ~ - cd ~ - bash full_test.sh - -#test the efficience of kvstore, insert/delete/search, use dbpedia170M by default -test-kvstore: - # test/kvstore_test.cpp - echo "TODO" - -# https://segmentfault.com/a/1190000008542123 -contribution: - bash scripts/contribution.sh - diff --git a/package.json b/package.json deleted file mode 100644 index fe9e70c4..00000000 --- a/package.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "config": { - "ghooks": { - "commit-msg": "validate-commit-msg" - } - }, - - "scripts": { - "changelog-all": "conventional-changelog -p angular -i CHANGELOG.md -w -r 0", - "changelog": "conventional-changelog -p angular -i CHANGELOG.md -w" - } -} diff --git a/scripts/.gitignore b/scripts/.gitignore new file mode 100644 index 00000000..411859ca --- /dev/null +++ b/scripts/.gitignore @@ -0,0 +1,3 @@ +* +!*.* +!*/ \ No newline at end of file diff --git a/scripts/contribution.sh b/scripts/contribution.sh deleted file mode 100644 index 1e563c63..00000000 --- a/scripts/contribution.sh +++ /dev/null @@ -1,3 +0,0 @@ -# NOTICE: we adopts the scoring strategy that the lines added and subtracted by one should be accumulated as his contribution -git log --format='%aN' | sort -u | while read name; do echo -en "$name\t"; git log --author="$name" --pretty=tformat: --numstat | awk '{ add += $1; subs += $2; loc += $1 + $2 } END { printf "added lines: %s, removed lines: %s, total lines: %s\n", add, subs, loc }' -; done -#git log --format='%aN' | sort -u | while read name; do echo -en "$name\t"; git log --author="$name" --pretty=tformat: --numstat | awk '{ add += $1; subs += $2; loc += $1 - $2 } END { printf "added lines: %s, removed lines: %s, total lines: %s\n", add, subs, loc }' -; done diff --git a/scripts/init.sh b/scripts/init.sh index 15fa01ff..bec5a753 100644 --- a/scripts/init.sh +++ b/scripts/init.sh @@ -1,13 +1,56 @@ #!/bin/bash -### - # @Author: your name - # @Date: 2021-09-10 09:55:24 - # @LastEditTime: 2021-09-10 09:56:38 - # @LastEditors: Please set LastEditors - # @Description: In User Settings Edit - # @FilePath: /gstore/scripts/init.sh -### -#set -v -#initialize system.db -#"bin/ginit" "-make" >& /dev/null -"bin/ginit" "--make" \ No newline at end of file + +# Exit on error +set -e + +# Get the directory name of the current script +ROOT_DIR=$(dirname "${BASH_SOURCE[0]}") + +BLUE='\033[0;34m' +RED='\033[0;31m' +NC='\033[0m' + +if [ ! -d ./.tmp ]; then + echo -e "${BLUE} [INIT] Creating directories... ${NC}" + mkdir -p bin lib backups logs .tmp .debug +fi + +if [ ! -f ./init.conf ]; then + echo -e "${BLUE} [INIT] No init.conf file found. Copying default... ${NC}" + cp -r "${ROOT_DIR}"/../defaults/* "./" + cp -r "${ROOT_DIR}"/../lib/*.so* "./lib/" + if [ -d /data ] && [ ! -d ./data/system ]; then + echo -e "${BLUE} [INIT] Creating symlink... ${NC}" + ln -s /data ./data + fi + + + # Check if GSTORE_ROOT_PASSWORD is set + if [ -z "$GSTORE_ROOT_PASSWORD" ]; then + echo -e "${RED} [INIT] GSTORE_ROOT_PASSWORD is not set. We strongly recommend setting a strong password. ${NC}" + else + echo -e "${BLUE} [INIT] Setting root password... ${NC}" + # Replace the line in the file + sed -i -e "s/^#\\?\\s*root_password=.*/root_password=${GSTORE_ROOT_PASSWORD}/" init.conf + fi +fi + +if [ ! -d ./system.db ]; then + echo -e "${BLUE} [INIT] Creating system.db... ${NC}" + "${ROOT_DIR}/../bin/ginit" --make + + # list all directories in /app/data + for dir in "${ROOT_DIR}"/../data/*; do + # get the directory name + dir_name=$(basename "$dir") + if [ "$dir_name" != "system" ] && [ -d "data/$dir_name" ] && [ -f "data/$dir_name/$dir_name.nt" ] ; then + # create the database + echo -e "${BLUE} [INIT] Creating $dir_name... ${NC}" + "${ROOT_DIR}/../bin/gbuild" -db "$dir_name" -f "data/$dir_name/$dir_name.nt" + fi + done +fi + +echo -e "${BLUE} [INIT] Command:" "$@" "${NC}" + +exec "$@" \ No newline at end of file diff --git a/scripts/parser_test/parser_r1.txt b/scripts/parser_test/parser_r1.txt deleted file mode 100644 index c20704e9..00000000 --- a/scripts/parser_test/parser_r1.txt +++ /dev/null @@ -1,3 +0,0 @@ -?name ?mbox -"Johnny Lee Outlaw" -"Peter Goodguy" diff --git a/scripts/parser_test/parser_r10.txt b/scripts/parser_test/parser_r10.txt deleted file mode 100644 index f529b7d1..00000000 --- a/scripts/parser_test/parser_r10.txt +++ /dev/null @@ -1,3 +0,0 @@ -?title ?price -"SPARQL Tutorial" - -"The Semantic Web" "23"^^ diff --git a/scripts/parser_test/parser_r11.txt b/scripts/parser_test/parser_r11.txt deleted file mode 100644 index 0a703f8d..00000000 --- a/scripts/parser_test/parser_r11.txt +++ /dev/null @@ -1,3 +0,0 @@ -?name ?mbox ?hpage -"Alice" - -"Bob" - diff --git a/scripts/parser_test/parser_r12.txt b/scripts/parser_test/parser_r12.txt deleted file mode 100644 index d229d9f3..00000000 --- a/scripts/parser_test/parser_r12.txt +++ /dev/null @@ -1,5 +0,0 @@ -?title -"SPARQL Query Language Tutorial" -"SPARQL" -"SPARQL Protocol Tutorial" -"SPARQL (updated)" diff --git a/scripts/parser_test/parser_r13.txt b/scripts/parser_test/parser_r13.txt deleted file mode 100644 index b5ed5b4f..00000000 --- a/scripts/parser_test/parser_r13.txt +++ /dev/null @@ -1,5 +0,0 @@ -?x ?y -"SPARQL Query Language Tutorial" -"SPARQL" - -- "SPARQL Protocol Tutorial" -- "SPARQL (updated)" diff --git a/scripts/parser_test/parser_r14.txt b/scripts/parser_test/parser_r14.txt deleted file mode 100644 index 6efc4b18..00000000 --- a/scripts/parser_test/parser_r14.txt +++ /dev/null @@ -1,3 +0,0 @@ -?title ?author -"SPARQL Query Language Tutorial" "Alice" -"SPARQL Protocol Tutorial" "Bob" diff --git a/scripts/parser_test/parser_r15.txt b/scripts/parser_test/parser_r15.txt deleted file mode 100644 index 74c7a79c..00000000 --- a/scripts/parser_test/parser_r15.txt +++ /dev/null @@ -1,5 +0,0 @@ -?s - - - - \ No newline at end of file diff --git a/scripts/parser_test/parser_r16.txt b/scripts/parser_test/parser_r16.txt deleted file mode 100644 index d2312ee7..00000000 --- a/scripts/parser_test/parser_r16.txt +++ /dev/null @@ -1,2 +0,0 @@ -?s ?p ?o - diff --git a/scripts/parser_test/parser_r17.txt b/scripts/parser_test/parser_r17.txt deleted file mode 100644 index d2312ee7..00000000 --- a/scripts/parser_test/parser_r17.txt +++ /dev/null @@ -1,2 +0,0 @@ -?s ?p ?o - diff --git a/scripts/parser_test/parser_r18.txt b/scripts/parser_test/parser_r18.txt deleted file mode 100644 index 70a4b494..00000000 --- a/scripts/parser_test/parser_r18.txt +++ /dev/null @@ -1,2 +0,0 @@ -?name -"Alice" diff --git a/scripts/parser_test/parser_r19.txt b/scripts/parser_test/parser_r19.txt deleted file mode 100644 index a610573a..00000000 --- a/scripts/parser_test/parser_r19.txt +++ /dev/null @@ -1,3 +0,0 @@ -?nameX ?nameY ?nickY -"Alice" "Bob" - -"Alice" "Clare" "CT" diff --git a/scripts/parser_test/parser_r2.txt b/scripts/parser_test/parser_r2.txt deleted file mode 100644 index 41e0c70d..00000000 --- a/scripts/parser_test/parser_r2.txt +++ /dev/null @@ -1 +0,0 @@ -[empty result] diff --git a/scripts/parser_test/parser_r20.txt b/scripts/parser_test/parser_r20.txt deleted file mode 100644 index 623b0e7d..00000000 --- a/scripts/parser_test/parser_r20.txt +++ /dev/null @@ -1,2 +0,0 @@ -?_askResult -"true"^^ diff --git a/scripts/parser_test/parser_r21.txt b/scripts/parser_test/parser_r21.txt deleted file mode 100644 index 69aa0f28..00000000 --- a/scripts/parser_test/parser_r21.txt +++ /dev/null @@ -1,2 +0,0 @@ -?_askResult -"false"^^ diff --git a/scripts/parser_test/parser_r22.txt b/scripts/parser_test/parser_r22.txt deleted file mode 100644 index 2ba335dc..00000000 --- a/scripts/parser_test/parser_r22.txt +++ /dev/null @@ -1,2 +0,0 @@ -?givenName -"Bob" diff --git a/scripts/parser_test/parser_r23.txt b/scripts/parser_test/parser_r23.txt deleted file mode 100644 index 70a4b494..00000000 --- a/scripts/parser_test/parser_r23.txt +++ /dev/null @@ -1,2 +0,0 @@ -?name -"Alice" diff --git a/scripts/parser_test/parser_r24.txt b/scripts/parser_test/parser_r24.txt deleted file mode 100644 index 34de7418..00000000 --- a/scripts/parser_test/parser_r24.txt +++ /dev/null @@ -1,3 +0,0 @@ -?name1 ?name2 -"Alice" "Ms A." -"Ms A." "Alice" diff --git a/scripts/parser_test/parser_r25.txt b/scripts/parser_test/parser_r25.txt deleted file mode 100644 index dcb86b5b..00000000 --- a/scripts/parser_test/parser_r25.txt +++ /dev/null @@ -1,2 +0,0 @@ -?name ?mbox -"Alice" diff --git a/scripts/parser_test/parser_r26.txt b/scripts/parser_test/parser_r26.txt deleted file mode 100644 index 2e011e4c..00000000 --- a/scripts/parser_test/parser_r26.txt +++ /dev/null @@ -1,2 +0,0 @@ -?name ?mbox -"Bob" "bob@work.example" diff --git a/scripts/parser_test/parser_r27.txt b/scripts/parser_test/parser_r27.txt deleted file mode 100644 index dcb86b5b..00000000 --- a/scripts/parser_test/parser_r27.txt +++ /dev/null @@ -1,2 +0,0 @@ -?name ?mbox -"Alice" diff --git a/scripts/parser_test/parser_r28.txt b/scripts/parser_test/parser_r28.txt deleted file mode 100644 index 09268230..00000000 --- a/scripts/parser_test/parser_r28.txt +++ /dev/null @@ -1,2 +0,0 @@ -?name ?mbox -"Roberto"@es diff --git a/scripts/parser_test/parser_r29.txt b/scripts/parser_test/parser_r29.txt deleted file mode 100644 index b5c5d248..00000000 --- a/scripts/parser_test/parser_r29.txt +++ /dev/null @@ -1,2 +0,0 @@ -?title -"Cette S茅rie des Ann茅es Soixante-dix"@fr diff --git a/scripts/parser_test/parser_r3.txt b/scripts/parser_test/parser_r3.txt deleted file mode 100644 index 8d959600..00000000 --- a/scripts/parser_test/parser_r3.txt +++ /dev/null @@ -1,2 +0,0 @@ -?v - diff --git a/scripts/parser_test/parser_r30.txt b/scripts/parser_test/parser_r30.txt deleted file mode 100644 index 70a4b494..00000000 --- a/scripts/parser_test/parser_r30.txt +++ /dev/null @@ -1,2 +0,0 @@ -?name -"Alice" diff --git a/scripts/parser_test/parser_r31.txt b/scripts/parser_test/parser_r31.txt deleted file mode 100644 index 83445215..00000000 --- a/scripts/parser_test/parser_r31.txt +++ /dev/null @@ -1,2 +0,0 @@ -?y ?x - "42"^^ \ No newline at end of file diff --git a/scripts/parser_test/parser_r32.txt b/scripts/parser_test/parser_r32.txt deleted file mode 100644 index c280fe91..00000000 --- a/scripts/parser_test/parser_r32.txt +++ /dev/null @@ -1,4 +0,0 @@ -?name -"Bob" -"Alice" -"Carol" \ No newline at end of file diff --git a/scripts/parser_test/parser_r33.txt b/scripts/parser_test/parser_r33.txt deleted file mode 100644 index 36dcb5d0..00000000 --- a/scripts/parser_test/parser_r33.txt +++ /dev/null @@ -1,2 +0,0 @@ -?y -"{\"paths\":[{\"src\":\"\",\"dst\":\"\",\"edges\":[{\"fromNode\":2,\"toNode\":3,\"predIRI\":\"<鍠滄>\"},{\"fromNode\":3,\"toNode\":1,\"predIRI\":\"<鍏虫敞>\"},{\"fromNode\":1,\"toNode\":2,\"predIRI\":\"<鍠滄>\"}],\"nodes\":[{\"nodeIndex\":1,\"nodeIRI\":\"\"},{\"nodeIndex\":2,\"nodeIRI\":\"\"},{\"nodeIndex\":3,\"nodeIRI\":\"\"}]}]}" diff --git a/scripts/parser_test/parser_r34.txt b/scripts/parser_test/parser_r34.txt deleted file mode 100644 index 80b68cea..00000000 --- a/scripts/parser_test/parser_r34.txt +++ /dev/null @@ -1,2 +0,0 @@ -?y -"{\"paths\":[{\"src\":\"\",\"dst\":\"\",\"length\":3}]}" diff --git a/scripts/parser_test/parser_r35.txt b/scripts/parser_test/parser_r35.txt deleted file mode 100644 index 4f742fad..00000000 --- a/scripts/parser_test/parser_r35.txt +++ /dev/null @@ -1,2 +0,0 @@ -?y -"{\"paths\":[{\"src\":\"\",\"dst\":\"\",\"value\":\"true\"}]}" diff --git a/scripts/parser_test/parser_r36.txt b/scripts/parser_test/parser_r36.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r36.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r37.txt b/scripts/parser_test/parser_r37.txt deleted file mode 100644 index a18d25fa..00000000 --- a/scripts/parser_test/parser_r37.txt +++ /dev/null @@ -1,4 +0,0 @@ -?x ?y ?z - "Alice"@en - "-5"^^ - "167.5"^^ diff --git a/scripts/parser_test/parser_r38.txt b/scripts/parser_test/parser_r38.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r38.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r39.txt b/scripts/parser_test/parser_r39.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r39.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r4.txt b/scripts/parser_test/parser_r4.txt deleted file mode 100644 index 85f40af3..00000000 --- a/scripts/parser_test/parser_r4.txt +++ /dev/null @@ -1,2 +0,0 @@ -?v - diff --git a/scripts/parser_test/parser_r40.txt b/scripts/parser_test/parser_r40.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r40.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r41.txt b/scripts/parser_test/parser_r41.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r41.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r42.txt b/scripts/parser_test/parser_r42.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r42.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r43.txt b/scripts/parser_test/parser_r43.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r43.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r44.txt b/scripts/parser_test/parser_r44.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r44.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r45.txt b/scripts/parser_test/parser_r45.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r45.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r46.txt b/scripts/parser_test/parser_r46.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r46.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r47.txt b/scripts/parser_test/parser_r47.txt deleted file mode 100644 index f447b9af..00000000 --- a/scripts/parser_test/parser_r47.txt +++ /dev/null @@ -1,2 +0,0 @@ -?x - diff --git a/scripts/parser_test/parser_r48.txt b/scripts/parser_test/parser_r48.txt deleted file mode 100644 index c690442b..00000000 --- a/scripts/parser_test/parser_r48.txt +++ /dev/null @@ -1,9 +0,0 @@ -?x - - - - - - - - \ No newline at end of file diff --git a/scripts/parser_test/parser_r49.txt b/scripts/parser_test/parser_r49.txt deleted file mode 100644 index c690442b..00000000 --- a/scripts/parser_test/parser_r49.txt +++ /dev/null @@ -1,9 +0,0 @@ -?x - - - - - - - - \ No newline at end of file diff --git a/scripts/parser_test/parser_r5.txt b/scripts/parser_test/parser_r5.txt deleted file mode 100644 index 5d1c18da..00000000 --- a/scripts/parser_test/parser_r5.txt +++ /dev/null @@ -1,2 +0,0 @@ -?v - diff --git a/scripts/parser_test/parser_r50.txt b/scripts/parser_test/parser_r50.txt deleted file mode 100644 index e6a0efe2..00000000 --- a/scripts/parser_test/parser_r50.txt +++ /dev/null @@ -1,8 +0,0 @@ -?x ?y - - - - - - - \ No newline at end of file diff --git a/scripts/parser_test/parser_r6.txt b/scripts/parser_test/parser_r6.txt deleted file mode 100644 index 8ea7c7ab..00000000 --- a/scripts/parser_test/parser_r6.txt +++ /dev/null @@ -1,2 +0,0 @@ -?title -"SPARQL Tutorial" diff --git a/scripts/parser_test/parser_r7.txt b/scripts/parser_test/parser_r7.txt deleted file mode 100644 index 8dccdfda..00000000 --- a/scripts/parser_test/parser_r7.txt +++ /dev/null @@ -1,2 +0,0 @@ -?title -"The Semantic Web" diff --git a/scripts/parser_test/parser_r8.txt b/scripts/parser_test/parser_r8.txt deleted file mode 100644 index 7fd2a354..00000000 --- a/scripts/parser_test/parser_r8.txt +++ /dev/null @@ -1,2 +0,0 @@ -?title ?price -"The Semantic Web" "23"^^ diff --git a/scripts/parser_test/parser_r9.txt b/scripts/parser_test/parser_r9.txt deleted file mode 100644 index 1de1ba9e..00000000 --- a/scripts/parser_test/parser_r9.txt +++ /dev/null @@ -1,4 +0,0 @@ -?name ?mbox -"Alice" -"Alice" -"Bob" - diff --git a/scripts/setup/ubuntu_dev.sh b/scripts/setup/ubuntu_dev.sh new file mode 100644 index 00000000..c51a158c --- /dev/null +++ b/scripts/setup/ubuntu_dev.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +apt-get update +apt-get install -y \ + build-essential \ + cmake \ + ninja-build \ + mold \ + python3-pip \ + pkg-config \ + uuid-dev \ + libjemalloc-dev \ + libreadline-dev \ + libssl-dev \ + lcov + +echo "* - nofile 65535" >> /etc/security/limits.conf +echo "* - noproc 65535" >> /etc/security/limits.conf \ No newline at end of file diff --git a/scripts/setup/ubuntu_run.sh b/scripts/setup/ubuntu_run.sh new file mode 100644 index 00000000..c955361d --- /dev/null +++ b/scripts/setup/ubuntu_run.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +apt-get update +apt-get install -y \ + libgomp1 \ + libssl3 \ + libjemalloc2 \ + libreadline8 \ + libuuid1 \ + && rm -rf /var/lib/apt/lists/* + +echo "* - nofile 65535" >> /etc/security/limits.conf +echo "* - noproc 65535" >> /etc/security/limits.conf \ No newline at end of file diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt new file mode 100644 index 00000000..c5e2c311 --- /dev/null +++ b/src/CMakeLists.txt @@ -0,0 +1,12 @@ +add_subdirectory(Connector) +add_subdirectory(Database) +add_subdirectory(GRPC) +add_subdirectory(KVstore) +add_subdirectory(Main) +add_subdirectory(Parser) +add_subdirectory(Query) +add_subdirectory(Server) +add_subdirectory(Signature) +add_subdirectory(StringIndex) +add_subdirectory(Trie) +add_subdirectory(Util) \ No newline at end of file diff --git a/src/Connector b/src/Connector new file mode 120000 index 00000000..7185be3a --- /dev/null +++ b/src/Connector @@ -0,0 +1 @@ +../api/http/cpp/src/ \ No newline at end of file diff --git a/src/Database/CMakeLists.txt b/src/Database/CMakeLists.txt new file mode 100644 index 00000000..d4e1a6c6 --- /dev/null +++ b/src/Database/CMakeLists.txt @@ -0,0 +1,19 @@ +add_library(gstore_database OBJECT + Database.cpp + Join.cpp + CSR.cpp + TableOperator.cpp + PlanTree.cpp + PlanGenerator.cpp + Executor.cpp + Optimizer.cpp + Txn_manager.cpp +) + +add_library(gcsr SHARED + CSR.cpp +) +# set fPIC for gscr +set_property(TARGET gcsr PROPERTY POSITION_INDEPENDENT_CODE ON) +add_dependencies(gcsr prepare) +install(TARGETS gcsr DESTINATION ${GSTORE_LIB_DIR}) \ No newline at end of file diff --git a/Database/CSR.cpp b/src/Database/CSR.cpp similarity index 100% rename from Database/CSR.cpp rename to src/Database/CSR.cpp diff --git a/Database/CSR.h b/src/Database/CSR.h similarity index 100% rename from Database/CSR.h rename to src/Database/CSR.h diff --git a/Database/Database.cpp b/src/Database/Database.cpp similarity index 99% rename from Database/Database.cpp rename to src/Database/Database.cpp index 696d4580..77ee543e 100644 --- a/Database/Database.cpp +++ b/src/Database/Database.cpp @@ -4360,21 +4360,18 @@ bool Database::backup() string sys_cmd; if (Util::dir_exist(backup_path)) { - sys_cmd = "rm -rf " + backup_path; - system(sys_cmd.c_str()); + Util::remove_path(backup_path); } sys_cmd = "cp -r " + this->store_path + ' ' + backup_path; system(sys_cmd.c_str()); - sys_cmd = "rm " + backup_path + '/' + this->update_log; - system(sys_cmd.c_str()); + Util::remove_path(backup_path + '/' + this->update_log); // this->vstree->saveTree(); this->kvstore->flush(); this->clear_update_log(); string update_log_path = this->store_path + '/' + this->update_log_since_backup; - sys_cmd = "rm " + update_log_path; - system(sys_cmd.c_str()); + Util::remove_path(update_log_path); Util::create_file(update_log_path); cout << "Backup completed!" << endl; @@ -4406,12 +4403,10 @@ bool Database::restore() cout << "Failed to restore from original db file, trying to restore from backup file." << endl; cout << "Your old db file will be stored at " << this->store_path << ".bad" << endl; - sys_cmd = "rm -rf " + this->store_path + ".bad"; - system(sys_cmd.c_str()); + Util::remove_path(this->store_path + ".bad"); sys_cmd = "cp -r " + this->store_path + ' ' + this->store_path + ".bad"; system(sys_cmd.c_str()); - sys_cmd = "rm -rf " + this->store_path; - system(sys_cmd.c_str()); + Util::remove_path(this->store_path); sys_cmd = "cp -r " + backup_path + ' ' + this->store_path; system(sys_cmd.c_str()); Util::create_file(this->store_path + '/' + this->update_log); @@ -4549,8 +4544,7 @@ bool Database::restore_update(multiset &_i, multiset &_r) return false; } - string cmd = "rm " + tmp_path; - system(cmd.c_str()); + Util::remove_path(tmp_path); return true; } diff --git a/Database/Database.h b/src/Database/Database.h similarity index 100% rename from Database/Database.h rename to src/Database/Database.h diff --git a/Database/Executor.cpp b/src/Database/Executor.cpp similarity index 100% rename from Database/Executor.cpp rename to src/Database/Executor.cpp diff --git a/Database/Executor.h b/src/Database/Executor.h similarity index 100% rename from Database/Executor.h rename to src/Database/Executor.h diff --git a/Database/Join.cpp b/src/Database/Join.cpp similarity index 100% rename from Database/Join.cpp rename to src/Database/Join.cpp diff --git a/Database/Join.h b/src/Database/Join.h similarity index 100% rename from Database/Join.h rename to src/Database/Join.h diff --git a/Database/Optimizer.cpp b/src/Database/Optimizer.cpp similarity index 99% rename from Database/Optimizer.cpp rename to src/Database/Optimizer.cpp index da79da78..1e06f9af 100644 --- a/Database/Optimizer.cpp +++ b/src/Database/Optimizer.cpp @@ -71,10 +71,10 @@ std::shared_ptr> Optimizer::GenerateResultTempla auto it = dfs_operation.begin(); it++; - while(it!=dfs_operation.end()) + for (; it!=dfs_operation.end(); ++it) { tables.emplace_back( IntermediateResult::OnlyPositionCopy(tables.back())); - if(dfs_operation[0]->op_type_ != StepOperation::StepOpType::Extend) + if((*it)->op_type_ != StepOperation::StepOpType::Extend) continue; if( (*it)->GetRange() == StepOperation::OpRangeType::OneNode) tables.back().AddNewNode( (*it)->GetOneNodePlan()->node_to_join_); @@ -88,7 +88,6 @@ std::shared_ptr> Optimizer::GenerateResultTempla tables.back().AddNewNode(the_only_edge.p_); tables.back().AddNewNode(the_only_edge.o_); } - it++; } return table_template; } @@ -104,7 +103,7 @@ tuple Optimizer:: ExecutionDepthFirst(shared_ptrop_type_ ==StepOperation::StepOpType::Extend) { - auto first_result = InitialTable(bgp_query,id_caches,first_operation, gstore::Executor::NO_LIMIT_OUTPUT); + auto first_result = InitialTable(bgp_query,id_caches,first_operation, query_plan->join_order_->size()==1 ? query_info.limit_num_ : gstore::Executor::NO_LIMIT_OUTPUT); first_table = get<1>(first_result); if( query_plan->join_order_->size()==1 || first_table.values_->empty()) return make_tuple(true, first_table); @@ -954,6 +953,7 @@ tuple Optimizer::JoinTable(shared_ptr &bgp_q #ifdef OPTIMIZER_DEBUG_INFO void Optimizer::PrintTable(IntermediateResult& result, shared_ptr &bgp_query){ + cout << "\tresult size = " << result.values_->size() << endl; auto &table = *result.values_; auto result_size = result.values_->size(); PrintDebugInfoLine(g_format("Print Table of size %d ",result_size)); @@ -974,7 +974,7 @@ void Optimizer::PrintTable(IntermediateResult& result, for(auto it = table.begin();it !=table.end() && c<10 ;c++,it++) { cout<<" record["<IsIthEdgeLinkedVarSO(nei_index)) continue; unsigned nei_id = var_descrip->so_edge_nei_[nei_index]; - if (bgpquery->is_var_satellite_by_id(nei_id)) continue; + if (find(plan_var_vec.begin(), plan_var_vec.end(), nei_id) == plan_var_vec.end()) continue; unsigned pre_size = (var_descrip->IsIthEdgePreVar(nei_index) ? max((unsigned)triples_num/limitID_predicate, (unsigned)2) : pre2num[var_descrip->so_edge_pre_id_[nei_index]]); node_score += PlanGenerator::PARAM_PRE / (double)(pre_size+1); @@ -1132,6 +1132,30 @@ unsigned PlanGenerator::HeuristicNextNode(unsigned last_node_id) { return best_next_node_id; } +unsigned PlanGenerator::HeuristicNextNodeFromVec(const set &neighbor_nodes) { + unsigned next_node_id = UINT_MAX; + double next_node_score = 0; + for (auto node : neighbor_nodes) { + double this_node_score = NodeScore(node); + if (this_node_score > next_node_score) { + next_node_id = node; + next_node_score = this_node_score; + } + } + return next_node_id; +} + +void PlanGenerator::RemoveNodeAddNeighbor(unsigned node_id, set &neighbor_nodes) { + auto var_descrip = bgpquery->get_vardescrip_by_id(node_id); + for (unsigned i = 0; i < var_descrip->degree_; ++i) { + if (var_descrip->so_edge_nei_type_[i] == VarDescriptor::EntiType::ConEntiType) continue; + if (find(plan_var_vec.begin(), plan_var_vec.end(), var_descrip->so_edge_nei_[i]) == plan_var_vec.end() && + find(join_nodes.begin(), join_nodes.end(), var_descrip->so_edge_nei_[i]) != join_nodes.end()) + neighbor_nodes.insert(var_descrip->so_edge_nei_[i]); + } + neighbor_nodes.erase(node_id); +} + // 1. Scan all vars, count join nodes num. // 2. Choose first node. // 3. While there are remaining join nodes, add one node. @@ -1143,22 +1167,23 @@ PlanTree *PlanGenerator::HeuristicPlan(bool use_binary_join) { plan_var_vec.emplace_back(first_node_id); plan_var_degree.emplace_back(0); - stack visited_nodes_stack; - visited_nodes_stack.push(first_node_id); + set neighbor_nodes{first_node_id}; + PlanTree *plan = nullptr; + PlanTree *temp_plan = nullptr; + + while (!neighbor_nodes.empty()) { + unsigned this_node_id = HeuristicNextNodeFromVec(neighbor_nodes); + if (plan == nullptr) + plan = new PlanTree(this_node_id, bgpquery); + else { + temp_plan = new PlanTree(plan, bgpquery, this_node_id, true); + swap(temp_plan, plan); + plan_var_vec.emplace_back(this_node_id); + plan_var_degree.emplace_back(1); + } + RemoveNodeAddNeighbor(this_node_id, neighbor_nodes); + } - auto* plan = new PlanTree(first_node_id, bgpquery); - while (!visited_nodes_stack.empty()) { - unsigned last_node_id = visited_nodes_stack.top(); - unsigned next_node_id = HeuristicNextNode(last_node_id); - if (next_node_id == UINT_MAX) { - visited_nodes_stack.pop(); - continue; - } - plan = new PlanTree(plan, bgpquery, next_node_id, true); - visited_nodes_stack.push(next_node_id); - plan_var_vec.emplace_back(next_node_id); - plan_var_degree.emplace_back(1); - } AddSatelliteNode(plan); list this_query_plan{plan}; @@ -1187,9 +1212,6 @@ PlanTree *PlanGenerator::DPPlan(bool use_binary_join) { return best_plan; } -// 鎴戝湪鎯宠繖鏍蜂竴涓棶棰橈細浠涔堟椂鍊欎細鏈夎皳璇嶅彉閲忥紵 -// 濡傛灉鏈夎皳璇嶅彉閲忥紝鎰熻涓瀹氫細鏈塻_o_var杩炴帴涓婏紝涓嶇劧浼氳鎷嗗垎寮銆 -// 鏄繖鏍峰悧锛 PlanTree *PlanGenerator::GetPlan(bool use_binary_join) { if (bgpquery->get_triple_num() == 1) return GetSpecialOneTriplePlan(); switch (PlanStrategy(use_binary_join)) { @@ -1200,6 +1222,7 @@ PlanTree *PlanGenerator::GetPlan(bool use_binary_join) { default: cout << "Error in PlanGenerator::get_plan, query strategy error!" << endl; assert(false); + return nullptr; // never reach here } } diff --git a/Database/PlanGenerator.h b/src/Database/PlanGenerator.h similarity index 97% rename from Database/PlanGenerator.h rename to src/Database/PlanGenerator.h index 2a3f5812..69a6482a 100644 --- a/Database/PlanGenerator.h +++ b/src/Database/PlanGenerator.h @@ -129,6 +129,8 @@ class PlanGenerator { void ConsiderVarScan(BGPQueryStrategy strategy); unsigned HeuristicFirstNode(); unsigned HeuristicNextNode(unsigned last_node_id); + unsigned HeuristicNextNodeFromVec(const std::set &neighbor_nodes); + void RemoveNodeAddNeighbor(unsigned node_id, std::set &neighbor_nodes); PlanTree* HeuristicPlan(bool use_binary_join = true); PlanTree* DPPlan(bool use_binary_join = true); diff --git a/Database/PlanTree.cpp b/src/Database/PlanTree.cpp similarity index 100% rename from Database/PlanTree.cpp rename to src/Database/PlanTree.cpp diff --git a/Database/PlanTree.h b/src/Database/PlanTree.h similarity index 100% rename from Database/PlanTree.h rename to src/Database/PlanTree.h diff --git a/Database/ResultTrigger.cpp b/src/Database/ResultTrigger.cpp similarity index 100% rename from Database/ResultTrigger.cpp rename to src/Database/ResultTrigger.cpp diff --git a/Database/ResultTrigger.h b/src/Database/ResultTrigger.h similarity index 100% rename from Database/ResultTrigger.h rename to src/Database/ResultTrigger.h diff --git a/Database/Strategy.cpp b/src/Database/Strategy.cpp similarity index 100% rename from Database/Strategy.cpp rename to src/Database/Strategy.cpp diff --git a/Database/Strategy.h b/src/Database/Strategy.h similarity index 100% rename from Database/Strategy.h rename to src/Database/Strategy.h diff --git a/Database/TableOperator.cpp b/src/Database/TableOperator.cpp similarity index 100% rename from Database/TableOperator.cpp rename to src/Database/TableOperator.cpp diff --git a/Database/TableOperator.h b/src/Database/TableOperator.h similarity index 100% rename from Database/TableOperator.h rename to src/Database/TableOperator.h diff --git a/Database/Txn_manager.cpp b/src/Database/Txn_manager.cpp similarity index 100% rename from Database/Txn_manager.cpp rename to src/Database/Txn_manager.cpp diff --git a/Database/Txn_manager.h b/src/Database/Txn_manager.h similarity index 100% rename from Database/Txn_manager.h rename to src/Database/Txn_manager.h diff --git a/GRPC/APIUtil.cpp b/src/GRPC/APIUtil.cpp similarity index 77% rename from GRPC/APIUtil.cpp rename to src/GRPC/APIUtil.cpp index 191a1092..f2c4f3df 100644 --- a/GRPC/APIUtil.cpp +++ b/src/GRPC/APIUtil.cpp @@ -17,6 +17,7 @@ APIUtil::APIUtil() pthread_rwlock_init(&already_build_map_lock, NULL); pthread_rwlock_init(&txn_m_lock, NULL); pthread_rwlock_init(&ips_map_lock, NULL); + pthread_rwlock_init(&system_db_lock, NULL); pthread_rwlock_init(&query_log_lock, NULL); pthread_rwlock_init(&access_log_lock, NULL); pthread_rwlock_init(&transactionlog_lock, NULL); @@ -56,9 +57,11 @@ APIUtil::~APIUtil() } if (databases.find(SYSTEM_DB_NAME) != databases.end()) { + pthread_rwlock_wrlock(&system_db_lock); system_database->save(); delete system_database; system_database = NULL; + pthread_rwlock_unlock(&system_db_lock); } pthread_rwlock_unlock(&databases_map_lock); @@ -78,6 +81,7 @@ APIUtil::~APIUtil() pthread_rwlock_destroy(&already_build_map_lock); pthread_rwlock_destroy(&txn_m_lock); pthread_rwlock_destroy(&ips_map_lock); + pthread_rwlock_destroy(&system_db_lock); pthread_rwlock_destroy(&query_log_lock); pthread_rwlock_destroy(&access_log_lock); pthread_rwlock_destroy(&transactionlog_lock); @@ -90,13 +94,11 @@ APIUtil::~APIUtil() if (Util::file_exist(system_password_path)) { - string cmd = "rm -f " + system_password_path; - system(cmd.c_str()); + Util::remove_path(system_password_path); } if (Util::file_exist(system_port_path)) { - string cmd = "rm -f " + system_port_path; - system(cmd.c_str()); + Util::remove_path(system_port_path); } } @@ -380,115 +382,19 @@ int APIUtil::initialize(const std::string server_type, const std::string port, c } } -bool APIUtil::trywrlock_database_map() -{ - if (pthread_rwlock_trywrlock(&databases_map_lock) == 0) - { - #if defined(DEBUG) - SLOG_DEBUG("trywrlock_database_map success"); - #endif - return true; - } - else - { - #if defined(DEBUG) - SLOG_DEBUG("trywrlock_database_map unsuccess"); - #endif - return false; - } -} - -bool APIUtil::unlock_database_map() -{ - if (pthread_rwlock_unlock(&databases_map_lock) == 0) - { - #if defined(DEBUG) - SLOG_DEBUG("unlock database_map success"); - #endif - return true; - } - else - { - #if defined(DEBUG) - SLOG_DEBUG("unlock database_map unsuccess"); - #endif - return false; - } - -} - -bool APIUtil::trywrlock_already_build_map() -{ - if (pthread_rwlock_trywrlock(&already_build_map_lock) == 0) - { - #if defined(DEBUG) - SLOG_DEBUG("trywrlock_already_build_map success"); - #endif - return true; - } - - else - { - #if defined(DEBUG) - SLOG_DEBUG("trywrlock_already_build_map unsuccess"); - #endif - return false; - } - -} - bool APIUtil::unlock_already_build_map() { - if (pthread_rwlock_unlock(&already_build_map_lock) == 0) + int rwlock_code = pthread_rwlock_unlock(&already_build_map_lock); + if ( rwlock_code == 0) { #if defined(DEBUG) - SLOG_DEBUG("unlock_already_build_map success"); + SLOG_DEBUG("already_build_map unlock ok"); #endif return true; } - else { - #if defined(DEBUG) - SLOG_DEBUG("unlock_already_build_map unsuccess"); - #endif - return false; - } - -} - -bool APIUtil::rw_wrlock_build_map() -{ - if(pthread_rwlock_wrlock(&already_build_map_lock) == 0) - { - #if defined(DEBUG) - SLOG_DEBUG("lock already_build_map success"); - #endif - return true; - } - else - { - #if defined(DEBUG) - SLOG_ERROR("lock already_build_map fail"); - #endif - return false; - } -} - -bool APIUtil::rw_wrlock_database_map() -{ - if(pthread_rwlock_wrlock(&databases_map_lock) == 0) - { - #if defined(DEBUG) - SLOG_DEBUG("lock database_map success"); - #endif - return true; - } - else - { - #if defined(DEBUG) - SLOG_ERROR("lock database_map fail"); - #endif + SLOG_ERROR("already_build_map unlock error: " + to_string(rwlock_code)); return false; } } @@ -675,71 +581,87 @@ int APIUtil::db_copy(string src_path, string dest_path) bool APIUtil::add_database(const std::string &db_name, Database *&db) { - #if defined(DEBUG) - SLOG_DEBUG("try lock database_map"); - #endif - if (!APIUtil::rw_wrlock_database_map()) + int rwlock_code = pthread_rwlock_wrlock(&databases_map_lock); + if (rwlock_code != 0) { - SLOG_ERROR("database_map lock false"); + SLOG_ERROR("database_map write lock error: " + to_string(rwlock_code)); return false; } #if defined(DEBUG) - SLOG_DEBUG("database_map lock true"); + SLOG_DEBUG("database_map write lock ok"); #endif databases.insert(pair(db_name, db)); - if (APIUtil::unlock_database_map()) + rwlock_code = pthread_rwlock_unlock(&databases_map_lock); + if (rwlock_code == 0) { #if defined(DEBUG) - SLOG_DEBUG("database_map unlock true"); + SLOG_DEBUG("database_map unlock ok"); #endif + return true; } else { - #if defined(DEBUG) - SLOG_DEBUG("database_map unlock false"); - #endif + SLOG_ERROR("database_map unlock error:" + to_string(rwlock_code)); + return false; } - return true; } -DatabaseInfo* APIUtil::get_databaseinfo(const std::string& db_name) +bool APIUtil::get_databaseinfo(const std::string& db_name, DatabaseInfo*& dbInfo) { - pthread_rwlock_rdlock(&already_build_map_lock); - - DatabaseInfo* dbinfo = NULL; + int rwlock_code = pthread_rwlock_rdlock(&already_build_map_lock); + if (rwlock_code != 0) + { + SLOG_ERROR("already_build_map read lock error: " + to_string(rwlock_code)); + dbInfo = NULL; + return false; + } std::map::iterator iter = already_build.find(db_name); if (iter!=already_build.end()) { - dbinfo = iter->second; + dbInfo = iter->second; } - - pthread_rwlock_unlock(&already_build_map_lock); - - return dbinfo; + else + { + #if defined(DEBUG) + SLOG_WARN("can't find [" + db_name + "] database info from already_build_map"); + #endif + dbInfo = NULL; + } + return unlock_already_build_map(); } bool APIUtil::trywrlock_databaseinfo(DatabaseInfo *dbinfo) { - if (pthread_rwlock_trywrlock(&(dbinfo->db_lock)) == 0) - return true; - else + if (dbinfo == NULL || dbinfo == nullptr) + return false; + int rwlock_code = pthread_rwlock_trywrlock(&(dbinfo->db_lock)); + if (rwlock_code != 0) + { + SLOG_ERROR("try write lock database[" + dbinfo->getName() + "] error: " + to_string(rwlock_code)); return false; + } + else + { + #if defined(DEBUG) + SLOG_DEBUG("try write lock database[" + dbinfo->getName() + "] ok"); + #endif + return true; + } } -bool APIUtil::tryrdlock_databaseinfo(DatabaseInfo* dbinfo) +bool APIUtil::rdlock_databaseinfo(DatabaseInfo* dbinfo) { - if (pthread_rwlock_tryrdlock(&(dbinfo->db_lock)) == 0) + int rwlock_code = pthread_rwlock_rdlock(&(dbinfo->db_lock)); + if (rwlock_code == 0) { #if defined(DEBUG) - SLOG_DEBUG("tryrdlock_databaseinfo success"); + SLOG_DEBUG("read lock database[" + dbinfo->getName() + "] ok"); #endif return true; } else { - #if defined(DEBUG) - SLOG_ERROR("tryrdlock_databaseinfo fail."); - #endif + SLOG_ERROR("read lock database[" + dbinfo->getName() + "] error: " + to_string(rwlock_code)); return false; } } @@ -750,22 +672,21 @@ bool APIUtil::unlock_databaseinfo(DatabaseInfo* dbinfo) if (dbinfo == NULL) { #if defined(DEBUG) - SLOG_ERROR("db_info is null"); + SLOG_WARN("db_info is null"); #endif return false; - } - if (pthread_rwlock_unlock(&(dbinfo->db_lock)) == 0) + } + int rwlock_code = pthread_rwlock_unlock(&(dbinfo->db_lock)); + if (rwlock_code == 0) { #if defined(DEBUG) - SLOG_DEBUG("unlock_databaseinfo success"); + SLOG_DEBUG("database [" + dbinfo->getName() + "] unlock ok"); #endif return true; } else { - #if defined(DEBUG) - SLOG_ERROR("unlock_databaseinfo fail."); - #endif + SLOG_ERROR("database [" + dbinfo->getName() + "] unlock error: " + to_string(rwlock_code)); return false; } } @@ -803,42 +724,41 @@ bool APIUtil::remove_txn_managers(std::string db_name) return true; } -bool APIUtil::find_txn_managers(std::string db_name) +bool APIUtil::db_checkpoint(string db_name) { - #if defined(DEBUG) - SLOG_DEBUG("unload txn_manager:" + to_string(txn_managers.size())); - #endif - pthread_rwlock_rdlock(&txn_m_lock); - if (txn_managers.find(db_name) == txn_managers.end()) + int rwlock_code = pthread_rwlock_wrlock(&txn_m_lock); + if (rwlock_code != 0) { - string error = db_name + " transaction manager not exist!"; - SLOG_ERROR(error); - pthread_rwlock_unlock(&txn_m_lock); + SLOG_ERROR("txn_m write lock error: " + to_string(rwlock_code)); return false; } - pthread_rwlock_unlock(&txn_m_lock); - return true; -} - -bool APIUtil::db_checkpoint(string db_name) -{ - pthread_rwlock_wrlock(&txn_m_lock); + #if defined(DEBUG) + SLOG_DEBUG("txn_m write lock ok"); + #endif if (txn_managers.find(db_name) == txn_managers.end()) { - string error = db_name + " txn checkpoint error!"; - SLOG_ERROR(error); + SLOG_WARN(db_name + " checkpoint error: can't find txn manager!"); pthread_rwlock_unlock(&txn_m_lock); return false; } shared_ptr txn_m = txn_managers[db_name]; - // txn_m->abort_all_running(); - // txn_m->Checkpoint(); + txn_m->abort_all_running(); + txn_m->Checkpoint(); txn_managers.erase(db_name); - pthread_rwlock_unlock(&txn_m_lock); - #if defined(DEBUG) - SLOG_DEBUG(db_name + " txn checkpoint success!"); - #endif - return true; + rwlock_code = pthread_rwlock_unlock(&txn_m_lock); + if (rwlock_code == 0) + { + #if defined(DEBUG) + SLOG_DEBUG("txn_m unlock ok"); + SLOG_DEBUG(db_name + " checkpoint success!"); + #endif + return true; + } + else + { + SLOG_ERROR("txn_m unlock error:" + to_string(rwlock_code)); + return false; + } } // bool APIUtil::db_checkpoint_all() @@ -880,7 +800,15 @@ bool APIUtil::db_checkpoint(string db_name) bool APIUtil::delete_from_databases(string db_name) { - pthread_rwlock_wrlock(&databases_map_lock); + int rwlock_code = pthread_rwlock_wrlock(&databases_map_lock); + if (rwlock_code != 0) + { + SLOG_ERROR("database_map write lock error: " + to_string(rwlock_code)); + return false; + } + #if defined(DEBUG) + SLOG_DEBUG("database_map write lock ok"); + #endif Database *db = NULL; std::map::iterator iter = databases.find(db_name); if (iter != databases.end()) @@ -890,75 +818,92 @@ bool APIUtil::delete_from_databases(string db_name) db = NULL; } databases.erase(db_name); - pthread_rwlock_unlock(&databases_map_lock); - return true; + rwlock_code = pthread_rwlock_unlock(&databases_map_lock); + if (rwlock_code == 0) + { + #if defined(DEBUG) + SLOG_DEBUG("database_map unlock ok"); + #endif + return true; + } + else + { + SLOG_ERROR("database_map unlock error:" + to_string(rwlock_code)); + return false; + } } bool APIUtil::delete_from_already_build(string db_name) { - if(APIUtil::trywrlock_already_build_map()){ - already_build.erase(db_name); + int rwlock_code = pthread_rwlock_wrlock(&already_build_map_lock); + if(rwlock_code == 0) + { // remove databse info from system.db - std::string update = "DELETE WHERE {<" + db_name + "> ?y.}"; - update_sys_db(update); - update = "DELETE WHERE {<" + db_name + "> ?y.}"; - update_sys_db(update); - update = "DELETE WHERE {<" + db_name + "> ?y.}"; - update_sys_db(update); - - // clear all privileges - std::map::iterator iter; - for (iter = users.begin(); iter != users.end(); iter++) - { - pthread_rwlock_wrlock(&(iter->second->query_priv_set_lock)); - iter->second->query_priv.erase(db_name); - pthread_rwlock_unlock(&(iter->second->query_priv_set_lock)); + string update = "DELETE WHERE {<" + + db_name + "> ?y1. <" + + db_name + "> ?y2. <" + + db_name + "> ?y3. }"; + bool update_result = update_sys_db(update); + // remove all privileges of db_name + update = "DELETE WHERE {?s <" + db_name + ">. }"; + update_result = update_sys_db(update) || update_result; + update = "DELETE WHERE {?s <" + db_name + ">. }"; + update_result = update_sys_db(update) || update_result; + update = "DELETE WHERE {?s <" + db_name + ">. }"; + update_result = update_sys_db(update) || update_result; + update = "DELETE WHERE {?s <" + db_name + ">. }"; + update_result = update_sys_db(update) || update_result; + update = "DELETE WHERE {?s <" + db_name + ">. }"; + update_result = update_sys_db(update) || update_result; + update = "DELETE WHERE {?s <" + db_name + ">. }"; + update_result = update_sys_db(update) || update_result; + update = "DELETE WHERE {?s <" + db_name + ">. }"; + update_result = update_sys_db(update) || update_result; + if (update_result) + { + refresh_sys_db(); + + // remove from already build map + already_build.erase(db_name); + + // clear all privileges + std::map::iterator iter; + for (iter = users.begin(); iter != users.end(); iter++) + { + pthread_rwlock_wrlock(&(iter->second->query_priv_set_lock)); + iter->second->query_priv.erase(db_name); + pthread_rwlock_unlock(&(iter->second->query_priv_set_lock)); - pthread_rwlock_wrlock(&(iter->second->update_priv_set_lock)); - iter->second->update_priv.erase(db_name); - pthread_rwlock_unlock(&(iter->second->update_priv_set_lock)); + pthread_rwlock_wrlock(&(iter->second->update_priv_set_lock)); + iter->second->update_priv.erase(db_name); + pthread_rwlock_unlock(&(iter->second->update_priv_set_lock)); - pthread_rwlock_wrlock(&(iter->second->load_priv_set_lock)); - iter->second->load_priv.erase(db_name); - pthread_rwlock_unlock(&(iter->second->load_priv_set_lock)); + pthread_rwlock_wrlock(&(iter->second->load_priv_set_lock)); + iter->second->load_priv.erase(db_name); + pthread_rwlock_unlock(&(iter->second->load_priv_set_lock)); - pthread_rwlock_wrlock(&(iter->second->unload_priv_set_lock)); - iter->second->unload_priv.erase(db_name); - pthread_rwlock_unlock(&(iter->second->unload_priv_set_lock)); + pthread_rwlock_wrlock(&(iter->second->unload_priv_set_lock)); + iter->second->unload_priv.erase(db_name); + pthread_rwlock_unlock(&(iter->second->unload_priv_set_lock)); - pthread_rwlock_wrlock(&(iter->second->backup_priv_set_lock)); - iter->second->backup_priv.erase(db_name); - pthread_rwlock_unlock(&(iter->second->backup_priv_set_lock)); + pthread_rwlock_wrlock(&(iter->second->backup_priv_set_lock)); + iter->second->backup_priv.erase(db_name); + pthread_rwlock_unlock(&(iter->second->backup_priv_set_lock)); - pthread_rwlock_wrlock(&(iter->second->restore_priv_set_lock)); - iter->second->restore_priv.erase(db_name); - pthread_rwlock_unlock(&(iter->second->restore_priv_set_lock)); + pthread_rwlock_wrlock(&(iter->second->restore_priv_set_lock)); + iter->second->restore_priv.erase(db_name); + pthread_rwlock_unlock(&(iter->second->restore_priv_set_lock)); - pthread_rwlock_wrlock(&(iter->second->export_priv_set_lock)); - iter->second->export_priv.erase(db_name); - pthread_rwlock_unlock(&(iter->second->export_priv_set_lock)); + pthread_rwlock_wrlock(&(iter->second->export_priv_set_lock)); + iter->second->export_priv.erase(db_name); + pthread_rwlock_unlock(&(iter->second->export_priv_set_lock)); + } } - - update = "DELETE where {?x <" + db_name + ">.}"; - update_sys_db(update); - update = "DELETE where {?x <" + db_name + ">.}"; - update_sys_db(update); - update = "DELETE where {?x <" + db_name + ">.}"; - update_sys_db(update); - update = "DELETE where {?x <" + db_name + ">.}"; - update_sys_db(update); - update = "DELETE where {?x <" + db_name + ">.}"; - update_sys_db(update); - update = "DELETE where {?x <" + db_name + ">.}"; - update_sys_db(update); - update = "DELETE where {?x <" + db_name + ">.}"; - update_sys_db(update); - - APIUtil::unlock_already_build_map(); - return true; + return unlock_already_build_map(); } else { + SLOG_ERROR("already_build_map write lock error:" + to_string(rwlock_code)); return false; } } @@ -1045,71 +990,87 @@ txn_id_t APIUtil::check_txn_id(string TID_s) return TID; } -Database *APIUtil::get_database(const std::string &db_name) +bool APIUtil::get_database(const std::string &db_name, Database *& db) { - pthread_rwlock_rdlock(&databases_map_lock); - Database *db = NULL; + bool rwlock_code = pthread_rwlock_rdlock(&databases_map_lock); + if (rwlock_code != 0) + { + #if defined(DEBUG) + SLOG_DEBUG("database_map read lock error: " + to_string(rwlock_code)); + #endif + return false; + } + #if defined(DEBUG) + SLOG_DEBUG("database_map read lock ok"); + #endif std::map::iterator iter = databases.find(db_name); if (iter != databases.end()) { db = iter->second; } - pthread_rwlock_unlock(&databases_map_lock); - return db; + else + { + db = NULL; + } + rwlock_code = pthread_rwlock_unlock(&databases_map_lock); + if (rwlock_code == 0) + { + #if defined(DEBUG) + SLOG_DEBUG("database_map unlock ok"); + #endif + return true; + } + else + { + #if defined(DEBUG) + SLOG_DEBUG("database_map unlock error:" + to_string(rwlock_code)); + #endif + return false; + } } bool APIUtil::check_already_load(const std::string &db_name) { - Database *rt = APIUtil::get_database(db_name); - if (rt == NULL) + Database *db; + bool rt = APIUtil::get_database(db_name, db); + if (rt && db != NULL) { - return false; + return true; } else { - return true; + return false; } } bool APIUtil::add_already_build(const std::string &db_name, const std::string &creator, const std::string &build_time) { - pthread_rwlock_wrlock(&already_build_map_lock); + int rwlock_code = pthread_rwlock_wrlock(&already_build_map_lock); + if (rwlock_code != 0) + { + SLOG_ERROR("already_build_map write lock error: " + to_string(rwlock_code)); + return false; + } #if defined(DEBUG) - SLOG_DEBUG("already_build_map_lock acquired."); + SLOG_DEBUG("already_build_map write lock ok."); #endif struct DatabaseInfo* temp_db = new DatabaseInfo(db_name, creator, build_time); already_build.insert(pair(db_name, temp_db)); - pthread_rwlock_unlock(&already_build_map_lock); + unlock_already_build_map(); string update = "INSERT DATA {<" + db_name + "> \"already_built\"." + "<" + db_name + "> <" + creator + "> ." + "<" + db_name + "> \"" + build_time + "\".}"; - update_sys_db(update); + bool update_result = update_sys_db(update); + if (update_result) + refresh_sys_db(); #if defined(DEBUG) SLOG_DEBUG("database add done."); #endif - return true; + return update_result; } -// std::string APIUtil::get_already_build(const std::string &db_name) -// { -// pthread_rwlock_rdlock(&already_build_map_lock); -// std::map::iterator iter = already_build.find(db_name); -// pthread_rwlock_unlock(&already_build_map_lock); -// if (iter == already_build.end()) -// { -// return ""; -// } -// else -// { -// return iter->second->toJSON(); -// } -// } - void APIUtil::get_already_builds(const std::string& username, vector &array) { pthread_rwlock_rdlock(&already_build_map_lock); - // rapidjson::StringBuffer strBuf; - // rapidjson::Writer writer(strBuf); - // writer.StartArray(); std::map::iterator iter; for (iter = already_build.begin(); iter != already_build.end(); iter++) { @@ -1132,11 +1093,8 @@ void APIUtil::get_already_builds(const std::string& username, vectorsetStatus("unloaded"); } array.push_back(db_info); - // writer.String(db_info->toJSON().c_str()); } - // writer.EndArray(); pthread_rwlock_unlock(&already_build_map_lock); - // return strBuf.GetString(); } bool APIUtil::check_already_build(const std::string &db_name) @@ -1160,11 +1118,7 @@ bool APIUtil::trywrlock_database(const std::string &db_name) pthread_rwlock_rdlock(&already_build_map_lock); std::map::iterator iter = already_build.find(db_name); pthread_rwlock_unlock(&already_build_map_lock); - if (pthread_rwlock_trywrlock(&(iter->second->db_lock)) != 0) - { - result = false; - } - else + if (pthread_rwlock_trywrlock(&(iter->second->db_lock)) == 0) { result = true; } @@ -1293,7 +1247,7 @@ bool APIUtil::check_db_count() return already_build.size() < max_database_num; } -bool APIUtil::add_privilege(const std::string& username, const std::string& type, const std::string& db_name) +bool APIUtil::add_privilege(const std::string& username, const vector& types, const std::string& db_name) { if(username == ROOT_USERNAME) { @@ -1303,66 +1257,93 @@ bool APIUtil::add_privilege(const std::string& username, const std::string& type std::map::iterator it = users.find(username); if(it != users.end() && db_name != SYSTEM_DB_NAME) { - if(type == "query") - { - string update = "INSERT DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->query_priv_set_lock)); - it->second->query_priv.insert(db_name); - pthread_rwlock_unlock(&(it->second->query_priv_set_lock)); - - } - else if(type == "update") - { - string update = "INSERT DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->update_priv_set_lock)); - it->second->update_priv.insert(db_name); - pthread_rwlock_unlock(&(it->second->update_priv_set_lock)); - - } - else if(type == "load") - { - string update = "INSERT DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->load_priv_set_lock)); - it->second->load_priv.insert(db_name); - pthread_rwlock_unlock(&(it->second->load_priv_set_lock)); - } - else if(type == "unload") - { - string update = "INSERT DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->unload_priv_set_lock)); - it->second->unload_priv.insert(db_name); - pthread_rwlock_unlock(&(it->second->unload_priv_set_lock)); - } - else if(type == "restore") - { - string update = "INSERT DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->restore_priv_set_lock)); - it->second->restore_priv.insert(db_name); - pthread_rwlock_unlock(&(it->second->restore_priv_set_lock)); - } - else if(type == "backup") - { - string update = "INSERT DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->backup_priv_set_lock)); - it->second->backup_priv.insert(db_name); - pthread_rwlock_unlock(&(it->second->backup_priv_set_lock)); - } - else if(type == "export") - { - string update = "INSERT DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->export_priv_set_lock)); - it->second->export_priv.insert(db_name); - pthread_rwlock_unlock(&(it->second->export_priv_set_lock)); - } + string update = "INSERT DATA { "; + for (unsigned i = 0; i < types.size(); i++) + { + string type = types[i]; + if(type == "query") + { + update = update + "<" + username + "> <" + db_name + ">. "; + } + else if(type == "update") + { + update = update + "<" + username + "> <" + db_name + ">. "; + } + else if(type == "load") + { + update = update + "<" + username + "> <" + db_name + ">. "; + } + else if(type == "unload") + { + update = update + "<" + username + "> <" + db_name + ">. "; + } + else if(type == "restore") + { + update = update + "<" + username + "> <" + db_name + ">. "; + } + else if(type == "backup") + { + update = update + "<" + username + "> <" + db_name + ">. "; + } + else if(type == "export") + { + update = update + "<" + username + "> <" + db_name + ">. "; + } + } + update = update + "}"; + bool add_result = APIUtil::update_sys_db(update); + if (add_result) + { + refresh_sys_db(); + for (unsigned i = 0; i < types.size(); i++) + { + string type = types[i]; + if(type == "query") + { + pthread_rwlock_wrlock(&(it->second->query_priv_set_lock)); + it->second->query_priv.insert(db_name); + pthread_rwlock_unlock(&(it->second->query_priv_set_lock)); + } + else if(type == "update") + { + pthread_rwlock_wrlock(&(it->second->update_priv_set_lock)); + it->second->update_priv.insert(db_name); + pthread_rwlock_unlock(&(it->second->update_priv_set_lock)); + } + else if(type == "load") + { + pthread_rwlock_wrlock(&(it->second->load_priv_set_lock)); + it->second->load_priv.insert(db_name); + pthread_rwlock_unlock(&(it->second->load_priv_set_lock)); + } + else if(type == "unload") + { + pthread_rwlock_wrlock(&(it->second->unload_priv_set_lock)); + it->second->unload_priv.insert(db_name); + pthread_rwlock_unlock(&(it->second->unload_priv_set_lock)); + } + else if(type == "restore") + { + pthread_rwlock_wrlock(&(it->second->restore_priv_set_lock)); + it->second->restore_priv.insert(db_name); + pthread_rwlock_unlock(&(it->second->restore_priv_set_lock)); + } + else if(type == "backup") + { + pthread_rwlock_wrlock(&(it->second->backup_priv_set_lock)); + it->second->backup_priv.insert(db_name); + pthread_rwlock_unlock(&(it->second->backup_priv_set_lock)); + } + else if(type == "export") + { + pthread_rwlock_wrlock(&(it->second->export_priv_set_lock)); + it->second->export_priv.insert(db_name); + pthread_rwlock_unlock(&(it->second->export_priv_set_lock)); + } + } + } pthread_rwlock_unlock(&users_map_lock); - return 1; + return add_result; } else { @@ -1380,70 +1361,69 @@ bool APIUtil::update_sys_db(string query) #if defined(DEBUG) SLOG_DEBUG("update sparql:\n" + query); #endif + pthread_rwlock_wrlock(&system_db_lock); ResultSet _rs; FILE* ofp = stdout; string msg; int ret = system_database->query(query, _rs, ofp); if (ret <= -100) //select query - { - if(ret == -100) - { - msg = _rs.to_str(); - } - else //query error - { - msg = "query failed."; - } - - return false; - } - else //update query - { - if(ret >= 0) - { - #if defined(DEBUG) - msg = "update num: " + util.int2string(ret); - SLOG_DEBUG(msg); - #endif - refresh_sys_db(); - //system_database->save(); - //delete system_database; - //system_database=NULL; - return true; - } - else //update error - { - msg = "update failed."; - SLOG_ERROR(msg); - return false; - } - } - + { + if(ret == -100) + { + msg = _rs.to_str(); + } + else //query error + { + msg = "query failed."; + } + #if defined(DEBUG) + SLOG_DEBUG(msg); + #endif + pthread_rwlock_unlock(&system_db_lock); + return false; + } + else //update query + { + if(ret >= 0) + { + #if defined(DEBUG) + msg = "update num: " + util.int2string(ret); + SLOG_DEBUG(msg); + #endif + pthread_rwlock_unlock(&system_db_lock); + return true; + } + else //update error + { + msg = "update failed."; + SLOG_ERROR(msg); + pthread_rwlock_unlock(&system_db_lock); + return false; + } + } } bool APIUtil::refresh_sys_db() { - pthread_rwlock_rdlock(&databases_map_lock); + pthread_rwlock_wrlock(&system_db_lock); system_database->save(); - delete system_database; - system_database = NULL; + APIUtil::delete_from_databases(SYSTEM_DB_NAME); system_database = new Database(SYSTEM_DB_NAME); bool flag = system_database->load(); #if defined(DEBUG) SLOG_DEBUG("system database refresh"); #endif - pthread_rwlock_unlock(&databases_map_lock); + if (flag) + { + APIUtil::add_database(SYSTEM_DB_NAME, system_database); + } + pthread_rwlock_unlock(&system_db_lock); return flag; } std::string APIUtil::query_sys_db(const std::string& sparql) { - string db_name = SYSTEM_DB_NAME; - pthread_rwlock_rdlock(&already_build_map_lock); - std::map::iterator it_already_build = already_build.find(db_name); - pthread_rwlock_unlock(&already_build_map_lock); - - pthread_rwlock_rdlock(&(it_already_build->second->db_lock)); + pthread_rwlock_rdlock(&system_db_lock); ResultSet rs; FILE* output = NULL; @@ -1464,7 +1444,7 @@ std::string APIUtil::query_sys_db(const std::string& sparql) SLOG_DEBUG("search system db returned successfully."); #endif string success = rs.to_JSON(); - pthread_rwlock_unlock(&(it_already_build->second->db_lock)); + pthread_rwlock_unlock(&system_db_lock); return success; } else @@ -1479,7 +1459,7 @@ std::string APIUtil::query_sys_db(const std::string& sparql) // error_code = 403; } - pthread_rwlock_unlock(&(it_already_build->second->db_lock)); + pthread_rwlock_unlock(&system_db_lock); return error; } @@ -1500,12 +1480,13 @@ bool APIUtil::user_add(const string& username, const string& password) { #if defined(DEBUG) SLOG_DEBUG("user ready to add."); - #endif + #endif struct DBUserInfo *temp_user = new DBUserInfo(username, password); users.insert(pair(username, temp_user)); string update = "INSERT DATA {<" + username + "> \"" + password + "\".}"; - update_sys_db(update); - result = true; + result = update_sys_db(update); + if (result) + refresh_sys_db(); } pthread_rwlock_unlock(&users_map_lock); return result; @@ -1518,25 +1499,25 @@ bool APIUtil::user_delete(const string& username) if(users.find(username) != users.end()) { users.erase(username); - string update = "DELETE where {<" + username + "> ?o.}"; - update_sys_db(update); + string update = "DELETE WHERE {<" + username + "> ?o.}" ; + result = update_sys_db(update); // clear privileges - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - - result = true; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + if (result) + refresh_sys_db(); } pthread_rwlock_unlock(&users_map_lock); return result; @@ -1552,10 +1533,13 @@ bool APIUtil::user_pwd_alert(const string& username, const string& password) { iter->second->setPassword(password); string update = "DELETE WHERE {<" + username + "> ?o.}"; - update_sys_db(update); - string update2 = "INSERT DATA {<" + username + "> \"" + password + "\".}"; - update_sys_db(update2); - result = true; + result = update_sys_db(update); + if (result) + { + update = "INSERT DATA {<" + username + "> \"" + password + "\".}"; + result = update_sys_db(update) || result; + refresh_sys_db(); + } } pthread_rwlock_unlock(&users_map_lock); return result; @@ -1569,62 +1553,64 @@ int APIUtil::clear_user_privilege(string username) } pthread_rwlock_rdlock(&users_map_lock); std::map::iterator it = users.find(username); - string update=""; if(it != users.end()) { - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->query_priv_set_lock)); - it->second->query_priv.clear(); - pthread_rwlock_unlock(&(it->second->query_priv_set_lock)); - - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->load_priv_set_lock)); - it->second->load_priv.clear(); - pthread_rwlock_unlock(&(it->second->load_priv_set_lock)); - - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->unload_priv_set_lock)); - it->second->unload_priv.clear(); - pthread_rwlock_unlock(&(it->second->unload_priv_set_lock)); - - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->update_priv_set_lock)); - it->second->update_priv.clear(); - pthread_rwlock_unlock(&(it->second->update_priv_set_lock)); - - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->backup_priv_set_lock)); - it->second->backup_priv.clear(); - pthread_rwlock_unlock(&(it->second->backup_priv_set_lock)); - - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->restore_priv_set_lock)); - it->second->restore_priv.clear(); - pthread_rwlock_unlock(&(it->second->restore_priv_set_lock)); - - update = "DELETE where {<" + username + "> ?x.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->export_priv_set_lock)); - it->second->export_priv.clear(); - pthread_rwlock_unlock(&(it->second->export_priv_set_lock)); - + string update = "DELETE WHERE {<" + username + "> ?o.}"; + bool result = update_sys_db(update); + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + update = "DELETE WHERE {<" + username + "> ?o.}"; + result = update_sys_db(update) || result; + if (result) + { + refresh_sys_db(); + pthread_rwlock_wrlock(&(it->second->query_priv_set_lock)); + it->second->query_priv.clear(); + pthread_rwlock_unlock(&(it->second->query_priv_set_lock)); + + pthread_rwlock_wrlock(&(it->second->load_priv_set_lock)); + it->second->load_priv.clear(); + pthread_rwlock_unlock(&(it->second->load_priv_set_lock)); + + pthread_rwlock_wrlock(&(it->second->unload_priv_set_lock)); + it->second->unload_priv.clear(); + pthread_rwlock_unlock(&(it->second->unload_priv_set_lock)); + + pthread_rwlock_wrlock(&(it->second->update_priv_set_lock)); + it->second->update_priv.clear(); + pthread_rwlock_unlock(&(it->second->update_priv_set_lock)); + + pthread_rwlock_wrlock(&(it->second->backup_priv_set_lock)); + it->second->backup_priv.clear(); + pthread_rwlock_unlock(&(it->second->backup_priv_set_lock)); + + pthread_rwlock_wrlock(&(it->second->restore_priv_set_lock)); + it->second->restore_priv.clear(); + pthread_rwlock_unlock(&(it->second->restore_priv_set_lock)); + + pthread_rwlock_wrlock(&(it->second->export_priv_set_lock)); + it->second->export_priv.clear(); + pthread_rwlock_unlock(&(it->second->export_priv_set_lock)); + } pthread_rwlock_unlock(&users_map_lock); - return 1; + return result; } else { pthread_rwlock_unlock(&users_map_lock); - return -1; + return 0; } } -bool APIUtil::del_privilege(const std::string& username, const std::string& type, const std::string& db_name) +bool APIUtil::del_privilege(const std::string& username, const vector& types, const std::string& db_name) { if (username == ROOT_USERNAME) { @@ -1632,75 +1618,105 @@ bool APIUtil::del_privilege(const std::string& username, const std::string& type } pthread_rwlock_rdlock(&users_map_lock); std::map::iterator it = users.find(username); - int del_result = 0; - if(it != users.end()) + if(it != users.end() && db_name != SYSTEM_DB_NAME) { - if(type == "query" && it->second->query_priv.find(db_name) != it->second->query_priv.end()) - { - string update = "DELETE DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->query_priv_set_lock)); - it->second->query_priv.erase(db_name); - pthread_rwlock_unlock(&(it->second->query_priv_set_lock)); - del_result = 1; - } - else if(type == "update" && it->second->update_priv.find(db_name) != it->second->update_priv.end()) - { - string update = "DELETE DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->update_priv_set_lock)); - it->second->update_priv.erase(db_name); - pthread_rwlock_unlock(&(it->second->update_priv_set_lock)); - del_result = 1; - } - else if(type == "load" && it->second->load_priv.find(db_name) != it->second->load_priv.end()) - { - string update = "DELETE DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->load_priv_set_lock)); - it->second->load_priv.erase(db_name); - pthread_rwlock_unlock(&(it->second->load_priv_set_lock)); - del_result = 1; - } - else if(type == "unload" && it->second->unload_priv.find(db_name) != it->second->unload_priv.end()) - { - string update = "DELETE DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->unload_priv_set_lock)); - it->second->unload_priv.erase(db_name); - pthread_rwlock_unlock(&(it->second->unload_priv_set_lock)); - del_result = 1; - } - else if(type == "backup" && it->second->backup_priv.find(db_name) != it->second->backup_priv.end()) - { - string update = "DELETE DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->backup_priv_set_lock)); - it->second->backup_priv.erase(db_name); - pthread_rwlock_unlock(&(it->second->backup_priv_set_lock)); - del_result = 1; - } - else if(type == "restore" && it->second->restore_priv.find(db_name) != it->second->restore_priv.end()) - { - string update = "DELETE DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->restore_priv_set_lock)); - it->second->restore_priv.erase(db_name); - pthread_rwlock_unlock(&(it->second->restore_priv_set_lock)); - del_result = 1; - } - else if(type == "export" && it->second->export_priv.find(db_name) != it->second->export_priv.end()) - { - string update = "DELETE DATA {<" + username + "> <" + db_name + ">.}"; - update_sys_db(update); - pthread_rwlock_wrlock(&(it->second->export_priv_set_lock)); - it->second->export_priv.erase(db_name); - pthread_rwlock_unlock(&(it->second->export_priv_set_lock)); - del_result = 1; - } + string update = ""; + bool del_result = false; + bool refresh_flag = false; + for (unsigned i = 0; i < types.size(); i++) + { + string type = types[i]; + if(type == "query" && it->second->query_priv.find(db_name) != it->second->query_priv.end()) + { + update = "DELETE DATA { <" + username + "> <" + db_name + ">. }"; + } + else if(type == "update" && it->second->update_priv.find(db_name) != it->second->update_priv.end()) + { + update = "DELETE DATA { <" + username + "> <" + db_name + ">. }"; + } + else if(type == "load" && it->second->load_priv.find(db_name) != it->second->load_priv.end()) + { + update = "DELETE DATA { <" + username + "> <" + db_name + ">. }"; + } + else if(type == "unload" && it->second->unload_priv.find(db_name) != it->second->unload_priv.end()) + { + update = "DELETE DATA { <" + username + "> <" + db_name + ">. }"; + } + else if(type == "backup" && it->second->backup_priv.find(db_name) != it->second->backup_priv.end()) + { + update = "DELETE DATA { <" + username + "> <" + db_name + ">. }"; + } + else if(type == "restore" && it->second->restore_priv.find(db_name) != it->second->restore_priv.end()) + { + update = "DELETE DATA { <" + username + "> <" + db_name + ">. }"; + } + else if(type == "export" && it->second->export_priv.find(db_name) != it->second->export_priv.end()) + { + update = "DELETE DATA { <" + username + "> <" + db_name + ">. }"; + } else + { + continue; + } + // delete privilege + del_result = update_sys_db(update); + refresh_flag = refresh_flag || del_result; + // remove from privilege set + if (del_result) + { + if(type == "query" && it->second->query_priv.find(db_name) != it->second->query_priv.end()) + { + pthread_rwlock_wrlock(&(it->second->query_priv_set_lock)); + it->second->query_priv.erase(db_name); + pthread_rwlock_unlock(&(it->second->query_priv_set_lock)); + } + else if(type == "update" && it->second->update_priv.find(db_name) != it->second->update_priv.end()) + { + pthread_rwlock_wrlock(&(it->second->update_priv_set_lock)); + it->second->update_priv.erase(db_name); + pthread_rwlock_unlock(&(it->second->update_priv_set_lock)); + } + else if(type == "load" && it->second->load_priv.find(db_name) != it->second->load_priv.end()) + { + pthread_rwlock_wrlock(&(it->second->load_priv_set_lock)); + it->second->load_priv.erase(db_name); + pthread_rwlock_unlock(&(it->second->load_priv_set_lock)); + } + else if(type == "unload" && it->second->unload_priv.find(db_name) != it->second->unload_priv.end()) + { + pthread_rwlock_wrlock(&(it->second->unload_priv_set_lock)); + it->second->unload_priv.erase(db_name); + pthread_rwlock_unlock(&(it->second->unload_priv_set_lock)); + } + else if(type == "backup" && it->second->backup_priv.find(db_name) != it->second->backup_priv.end()) + { + pthread_rwlock_wrlock(&(it->second->backup_priv_set_lock)); + it->second->backup_priv.erase(db_name); + pthread_rwlock_unlock(&(it->second->backup_priv_set_lock)); + } + else if(type == "restore" && it->second->restore_priv.find(db_name) != it->second->restore_priv.end()) + { + pthread_rwlock_wrlock(&(it->second->restore_priv_set_lock)); + it->second->restore_priv.erase(db_name); + pthread_rwlock_unlock(&(it->second->restore_priv_set_lock)); + } + else if(type == "export" && it->second->export_priv.find(db_name) != it->second->export_priv.end()) + { + pthread_rwlock_wrlock(&(it->second->export_priv_set_lock)); + it->second->export_priv.erase(db_name); + pthread_rwlock_unlock(&(it->second->export_priv_set_lock)); + } + } + } + if (refresh_flag) + refresh_sys_db(); + pthread_rwlock_unlock(&users_map_lock); + return del_result; } - pthread_rwlock_unlock(&users_map_lock); - return del_result; + else + { + pthread_rwlock_unlock(&users_map_lock); + return 0; + } } bool APIUtil::check_privilege(const std::string& username, const std::string& type, const std::string& db_name) @@ -1812,6 +1828,7 @@ bool APIUtil::init_privilege(const std::string& username, const std::string& db_ bool rt = update_sys_db(update); if(rt) { + refresh_sys_db(); pthread_rwlock_wrlock(&(it->second->query_priv_set_lock)); it->second->query_priv.insert(db_name); pthread_rwlock_unlock(&(it->second->query_priv_set_lock)); @@ -1860,6 +1877,7 @@ bool APIUtil::init_privilege(const std::string& username, const std::string& db_ ss << "INSERT DATA {"; unsigned int total_privilegs = 0; std::map> priv_user_map; + pthread_rwlock_rdlock(&system_db_lock); for (std::string type:privileges) { sparql = "select ?x where {?x <" + src_db_name + ">.}"; @@ -1881,6 +1899,7 @@ bool APIUtil::init_privilege(const std::string& username, const std::string& db_ } } } + pthread_rwlock_unlock(&system_db_lock); ss << "}"; if (total_privilegs > 0) { @@ -1888,6 +1907,7 @@ bool APIUtil::init_privilege(const std::string& username, const std::string& db_ bool rt = update_sys_db(insert_sparql); if(rt) { + refresh_sys_db(); pthread_rwlock_rdlock(&users_map_lock); std::map>::iterator iter_priv; for (iter_priv = priv_user_map.begin(); iter_priv != priv_user_map.end(); iter_priv++) @@ -2319,10 +2339,8 @@ int APIUtil::update_transactionlog(std::string TID, std::string state, std::stri } fclose(fp); fclose(fp1); - string cmd = "rm "; - cmd += TRANSACTION_LOG_PATH; - system(cmd.c_str()); - cmd = "mv "; + Util::remove_path(TRANSACTION_LOG_PATH); + string cmd = "mv "; cmd += TRANSACTION_LOG_TEMP_PATH; cmd += ' '; cmd += TRANSACTION_LOG_PATH; @@ -2432,10 +2450,8 @@ void APIUtil::abort_transactionlog(long end_time) } fclose(fp); fclose(fp1); - string cmd = "rm "; - cmd += TRANSACTION_LOG_PATH; - system(cmd.c_str()); - cmd = "mv "; + Util::remove_path(TRANSACTION_LOG_PATH); + string cmd = "mv "; cmd += TRANSACTION_LOG_TEMP_PATH; cmd += ' '; cmd += TRANSACTION_LOG_PATH; @@ -2450,8 +2466,6 @@ std::string APIUtil::fun_cppcheck(std::string username, struct PFNInfo *fun_info string report_path = APIUtil::pfn_file_path + username + "/report.txt"; string check_file_path = APIUtil::pfn_file_path + username + "/" + file_name + "_temp.cpp"; string cppcheck = "cppcheck -j 10 --force suppress=missingIncludeSystem --template=\"[line:{line}]:({severity}) {message}\" --output-file="+report_path+" "+check_file_path; - string report_delete = "rm -f " + report_path; - string cppcheckFile_delete = "rm -f " + check_file_path; string lookAtfile = "cat " + check_file_path; ofstream fout(check_file_path.c_str()); if (fout.is_open()) @@ -2474,8 +2488,8 @@ std::string APIUtil::fun_cppcheck(std::string username, struct PFNInfo *fun_info } } cppcheck_fin.close(); - system(report_delete.c_str()); - system(cppcheckFile_delete.c_str()); + Util::remove_path(report_path); + Util::remove_path(check_file_path); return report_detail; } @@ -2631,10 +2645,9 @@ string APIUtil::fun_build(const std::string &username, const std::string fun_nam util.create_dirs(targetDir); string targetFile = targetDir + "/lib" + file_name + md5str + ".so"; string logFile = APIUtil::pfn_file_path + username + "/error.out"; - string cmd = "rm -f " + targetFile; - system(cmd.c_str()); + Util::remove_path(targetFile); string libaray = "lib/libgpathqueryhandler.so lib/libgcsr.so"; - cmd = "g++ -std=c++11 -fPIC " + sourceFile + " -shared -o " + targetFile + " " + libaray + " 2>" + logFile; + string cmd = "g++ -std=c++11 -fPIC " + sourceFile + " -shared -o " + targetFile + " " + libaray + " 2>" + logFile; int status; status = system(cmd.c_str()); string error_msg = ""; @@ -2644,8 +2657,8 @@ string APIUtil::fun_build(const std::string &username, const std::string fun_nam fun_info->setFunStatus("2"); //delete old so string usingPath = APIUtil::pfn_lib_path + username ; - string rmOldSo = "rm -f "+ usingPath +"/lib" + file_name + "*.so"; - system(rmOldSo.c_str()); + string rmOldSo = usingPath +"/lib" + file_name + "*.so"; + Util::remove_path(targetFile); //mv the new into using Path string mvCmd = "mv " + targetFile + " " + usingPath +"/"; system(mvCmd.c_str()); @@ -2676,8 +2689,7 @@ string APIUtil::fun_build(const std::string &username, const std::string fun_nam // delete delete fun_info; fun_info = NULL; - cmd = "rm -f " + logFile; - system(cmd.c_str()); + Util::remove_path(logFile); // has error_msg if (error_msg.size() > 0) { @@ -2811,10 +2823,8 @@ void APIUtil::fun_write_json_file(const std::string& username, struct PFNInfo *f string sourcePath = APIUtil::pfn_file_path + username + "/" + file_name + ".cpp"; // string backPath = sourcePath + "." + util.getTimeString2(); string libPath = APIUtil::pfn_lib_path + username + "/lib" + file_name + "*.so"; - cmd = "rm -f " + sourcePath; - system(cmd.c_str()); - cmd = "rm -f " + libPath; - system(cmd.c_str()); + Util::remove_path(sourcePath); + Util::remove_path(libPath); } } else @@ -2851,8 +2861,7 @@ void APIUtil::fun_write_json_file(const std::string& username, struct PFNInfo *f if (WIFEXITED(status) && WEXITSTATUS(status) == 0) { // remove old json file - cmd = "rm -f " + back_path; - system(cmd.c_str()); + Util::remove_path(back_path); pthread_rwlock_unlock(&fun_data_lock); #if defined(DEBUG) SLOG_DEBUG(cmd); diff --git a/GRPC/APIUtil.h b/src/GRPC/APIUtil.h similarity index 98% rename from GRPC/APIUtil.h rename to src/GRPC/APIUtil.h index 60234958..567155c8 100644 --- a/GRPC/APIUtil.h +++ b/src/GRPC/APIUtil.h @@ -116,7 +116,7 @@ struct DBUserInfo pthread_rwlock_t unload_priv_set_lock; pthread_rwlock_t backup_priv_set_lock; pthread_rwlock_t restore_priv_set_lock; - pthread_rwlock_t export_priv_set_lock; + pthread_rwlock_t export_priv_set_lock; DBUserInfo() {} DBUserInfo(std::string _username, std::string _password) { @@ -922,6 +922,7 @@ class APIUtil pthread_rwlock_t already_build_map_lock; pthread_rwlock_t txn_m_lock; pthread_rwlock_t ips_map_lock; + pthread_rwlock_t system_db_lock; string system_username = "system"; string system_password; string system_password_path; @@ -949,17 +950,12 @@ class APIUtil APIUtil(); ~APIUtil(); int initialize(const std::string server_type, const std::string port, const std::string db_name, bool load_csr); - bool trywrlock_database_map(); - bool unlock_database_map(); - bool trywrlock_already_build_map(); bool unlock_already_build_map(); - bool rw_wrlock_build_map(); - bool rw_wrlock_database_map(); bool add_database(const std::string& db_name, Database *& db); - Database* get_database(const std::string& db_name); - DatabaseInfo* get_databaseinfo(const std::string& db_name); + bool get_database(const std::string& db_name, Database *& db); + bool get_databaseinfo(const std::string& db_name, DatabaseInfo *& dbInfo); bool trywrlock_databaseinfo(DatabaseInfo* dbinfo); - bool tryrdlock_databaseinfo(DatabaseInfo* dbinfo); + bool rdlock_databaseinfo(DatabaseInfo* dbinfo); bool unlock_databaseinfo(DatabaseInfo* dbinfo); bool check_already_load(const std::string& db_name); shared_ptr get_Txn_ptr(string db_name); @@ -977,8 +973,8 @@ class APIUtil bool check_user_count(); bool check_db_exist(const std::string& db_name); bool check_db_count(); - bool add_privilege(const std::string& username, const std::string& type, const std::string& db_name); - bool del_privilege(const std::string& username, const std::string& type, const std::string& db_name); + bool add_privilege(const std::string& username, const vector& types, const std::string& db_name); + bool del_privilege(const std::string& username, const vector& types, const std::string& db_name); bool check_privilege(const std::string& username, const std::string& type, const std::string& db_name); bool init_privilege(const std::string& username, const std::string& db_name); bool copy_privilege(const std::string& src_db_name, const std::string& dst_db_name); @@ -988,7 +984,6 @@ class APIUtil bool build_db_user_privilege(std::string db_name, std::string username); bool insert_txn_managers(Database* current_database, std::string database); bool remove_txn_managers(std::string db_name); - bool find_txn_managers(std::string db_name); bool db_checkpoint(string db_name); // bool db_checkpoint_all(); bool delete_from_databases(string db_name); diff --git a/src/GRPC/CMakeLists.txt b/src/GRPC/CMakeLists.txt new file mode 100644 index 00000000..d365c80f --- /dev/null +++ b/src/GRPC/CMakeLists.txt @@ -0,0 +1,11 @@ +add_library(gstore_grpc OBJECT + APIUtil.cpp + grpc_status_code.cpp + grpc_multipart_parser.cpp + grpc_content.cpp + grpc_message.cpp + grpc_server_task.cpp + grpc_routetable.cpp + grpc_router.cpp + grpc_server.cpp +) \ No newline at end of file diff --git a/GRPC/grpc_content.cpp b/src/GRPC/grpc_content.cpp similarity index 100% rename from GRPC/grpc_content.cpp rename to src/GRPC/grpc_content.cpp diff --git a/GRPC/grpc_content.h b/src/GRPC/grpc_content.h similarity index 100% rename from GRPC/grpc_content.h rename to src/GRPC/grpc_content.h diff --git a/GRPC/grpc_message.cpp b/src/GRPC/grpc_message.cpp similarity index 85% rename from GRPC/grpc_message.cpp rename to src/GRPC/grpc_message.cpp index c15a9269..b7d7b046 100644 --- a/GRPC/grpc_message.cpp +++ b/src/GRPC/grpc_message.cpp @@ -16,6 +16,7 @@ #include "grpc_status_code.h" #include "grpc_stringpiece.h" #include "grpc_server_task.h" +#include "../Util/CompressFileUtil.h" using namespace grpc; using namespace protocol; @@ -289,31 +290,38 @@ GRPCReq &GRPCReq::operator=(GRPCReq&& other) // GRPCResp void GRPCResp::String(const std::string &str) { - auto *compress_data = new std::string; - int ret = this->compress(&str, compress_data); + auto *compress_data = malloc(str.size()); + size_t compress_size = 0; + int ret = this->compress(&str, compress_data, compress_size); if(ret != StatusOK) { this->append_output_body(static_cast(str.c_str()), str.size()); } else { - this->append_output_body_nocopy(compress_data->c_str(), compress_data->size()); + this->append_output_body_nocopy(compress_data, compress_size); } this->resp_code = 0; this->resp_msg = "Success"; - task_of(this)->add_callback([compress_data](GRPCTask *) { delete compress_data; }); + task_of(this)->add_callback([compress_data](GRPCTask *) { free(compress_data); }); } void GRPCResp::String(std::string &&str) { - auto *data = new std::string; - int ret = this->compress(&str, data); - if(ret != StatusOK) + auto *compress_data = malloc(str.size()); + size_t compress_size = 0; + int ret = this->compress(&str, compress_data, compress_size); + if(ret == StatusOK) { - *data = std::move(str); - } - this->append_output_body_nocopy(data->c_str(), data->size()); - task_of(this)->add_callback([data](GRPCTask *) { delete data; }); + this->append_output_body_nocopy(compress_data, compress_size); + } + else + { + this->append_output_body(static_cast(str.c_str()), str.size()); + } + this->resp_code = 0; + this->resp_msg = "Success"; + task_of(this)->add_callback([compress_data](GRPCTask *) { free(compress_data); }); } void GRPCResp::File(const std::string &path) @@ -361,6 +369,40 @@ void GRPCResp::Json(const std::string &str) this->String(str); } +// void GRPCResp::Gzip(const ::Json &json) +// { +// this->headers["Content-Type"] = ContentType::to_str(APPLICATION_JSON); +// this->headers["Content-Encoding"] = "gzip"; +// rapidjson::StringBuffer resBuffer; +// rapidjson::Writer resWriter(resBuffer); +// json.Accept(resWriter); +// if (json.HasMember("StatusCode") && json["StatusCode"].IsInt()) +// { +// this->resp_code = json["StatusCode"].GetInt(); +// } +// if (json.HasMember("StatusMsg") && json["StatusMsg"].IsString()) +// { +// this->resp_msg = json["StatusMsg"].GetString(); +// } +// std::string data = resBuffer.GetString(); +// void* compress_ = malloc(data.size()); +// if (compress_ == nullptr) +// { +// std::cout<<"malloc failed Cache Not Enough:"<append_output_body_nocopy(compress_, compress_size); +// task_of(this)->add_callback([compress_](GRPCTask *) { free(compress_); }); +// } + void GRPCResp::set_status(int status_code) { protocol::HttpUtil::set_response_status(this, status_code); @@ -434,21 +476,35 @@ void GRPCResp::add_task(SubTask *task) **server_task << task; } -int GRPCResp::compress(const std::string * const data, std::string *compress_data) +int GRPCResp::compress(const std::string * const data, void *compress_data, size_t &compress_size) { - int status = StatusOK; if (headers.find("Content-Encoding") != headers.end()) { if (headers["Content-Encoding"].find("gzip") != std::string::npos) { - // TODO - // status = Compressor::gzip(data, compress_data); + if (compress_data != nullptr) + { + int rt = CompressUtil::GzipHelper::compress(data, compress_data, compress_size); + if (rt == 0) + { + return StatusOK; + } + else + { + return StatusCompressError; + } + } + else + { + return -1; + } + } + else + { + return StatusCompressNotSupport; } - } else - { - status = StatusNoComrpess; } - return status; + return StatusNoComrpess; } GRPCResp::GRPCResp(GRPCResp&& other) diff --git a/GRPC/grpc_message.h b/src/GRPC/grpc_message.h similarity index 96% rename from GRPC/grpc_message.h rename to src/GRPC/grpc_message.h index 7f5261d1..3be079d1 100644 --- a/GRPC/grpc_message.h +++ b/src/GRPC/grpc_message.h @@ -4,9 +4,9 @@ #include "workflow/HttpMessage.h" #include "workflow/WFTaskFactory.h" -#include "../tools/rapidjson/document.h" -#include "../tools/rapidjson/writer.h" -#include "../tools/rapidjson/stringbuffer.h" +#include "rapidjson/document.h" +#include "rapidjson/writer.h" +#include "rapidjson/stringbuffer.h" #include "grpc_noncopyable.h" #include "grpc_content.h" @@ -183,7 +183,7 @@ class GRPCResp : public protocol::HttpResponse, public Noncopyable void add_task(SubTask *task); private: - int compress(const std::string * const data, std::string *compress_data); + int compress(const std::string * const data, void *compress_data, size_t &compress_size); public: GRPCResp() = default; diff --git a/GRPC/grpc_multipart_parser.cpp b/src/GRPC/grpc_multipart_parser.cpp similarity index 100% rename from GRPC/grpc_multipart_parser.cpp rename to src/GRPC/grpc_multipart_parser.cpp diff --git a/GRPC/grpc_multipart_parser.h b/src/GRPC/grpc_multipart_parser.h similarity index 100% rename from GRPC/grpc_multipart_parser.h rename to src/GRPC/grpc_multipart_parser.h diff --git a/GRPC/grpc_noncopyable.h b/src/GRPC/grpc_noncopyable.h similarity index 100% rename from GRPC/grpc_noncopyable.h rename to src/GRPC/grpc_noncopyable.h diff --git a/GRPC/grpc_operation.h b/src/GRPC/grpc_operation.h similarity index 100% rename from GRPC/grpc_operation.h rename to src/GRPC/grpc_operation.h diff --git a/GRPC/grpc_request_handler.h b/src/GRPC/grpc_request_handler.h similarity index 100% rename from GRPC/grpc_request_handler.h rename to src/GRPC/grpc_request_handler.h diff --git a/GRPC/grpc_router.cpp b/src/GRPC/grpc_router.cpp similarity index 100% rename from GRPC/grpc_router.cpp rename to src/GRPC/grpc_router.cpp diff --git a/GRPC/grpc_router.h b/src/GRPC/grpc_router.h similarity index 100% rename from GRPC/grpc_router.h rename to src/GRPC/grpc_router.h diff --git a/GRPC/grpc_routetable.cpp b/src/GRPC/grpc_routetable.cpp similarity index 100% rename from GRPC/grpc_routetable.cpp rename to src/GRPC/grpc_routetable.cpp diff --git a/GRPC/grpc_routetable.h b/src/GRPC/grpc_routetable.h similarity index 100% rename from GRPC/grpc_routetable.h rename to src/GRPC/grpc_routetable.h diff --git a/GRPC/grpc_server.cpp b/src/GRPC/grpc_server.cpp similarity index 100% rename from GRPC/grpc_server.cpp rename to src/GRPC/grpc_server.cpp diff --git a/GRPC/grpc_server.h b/src/GRPC/grpc_server.h similarity index 100% rename from GRPC/grpc_server.h rename to src/GRPC/grpc_server.h diff --git a/GRPC/grpc_server_task.cpp b/src/GRPC/grpc_server_task.cpp similarity index 100% rename from GRPC/grpc_server_task.cpp rename to src/GRPC/grpc_server_task.cpp diff --git a/GRPC/grpc_server_task.h b/src/GRPC/grpc_server_task.h similarity index 100% rename from GRPC/grpc_server_task.h rename to src/GRPC/grpc_server_task.h diff --git a/GRPC/grpc_status_code.cpp b/src/GRPC/grpc_status_code.cpp similarity index 100% rename from GRPC/grpc_status_code.cpp rename to src/GRPC/grpc_status_code.cpp diff --git a/GRPC/grpc_status_code.h b/src/GRPC/grpc_status_code.h similarity index 100% rename from GRPC/grpc_status_code.h rename to src/GRPC/grpc_status_code.h diff --git a/GRPC/grpc_stringpiece.h b/src/GRPC/grpc_stringpiece.h similarity index 100% rename from GRPC/grpc_stringpiece.h rename to src/GRPC/grpc_stringpiece.h diff --git a/src/KVstore/CMakeLists.txt b/src/KVstore/CMakeLists.txt new file mode 100644 index 00000000..5336a858 --- /dev/null +++ b/src/KVstore/CMakeLists.txt @@ -0,0 +1,36 @@ +add_library(gstore_isarray OBJECT + ISArray/ISArray.cpp + ISArray/ISBlockManager.cpp + ISArray/ISEntry.cpp +) + +add_library(gstore_ivarray OBJECT + IVArray/IVArray.cpp + IVArray/IVBlockManager.cpp + IVArray/IVEntry.cpp +) + +add_library(gstore_sitree OBJECT + SITree/SITree.cpp + SITree/storage/SIStorage.cpp + SITree/heap/SIHeap.cpp + SITree/node/SIIntlNode.cpp + SITree/node/SILeafNode.cpp + SITree/node/SINode.cpp +) + +add_library(gstore_kvstore OBJECT + KVstore.cpp + SITree/SITree.cpp + SITree/storage/SIStorage.cpp + SITree/heap/SIHeap.cpp + SITree/node/SIIntlNode.cpp + SITree/node/SILeafNode.cpp + SITree/node/SINode.cpp + ISArray/ISArray.cpp + ISArray/ISBlockManager.cpp + ISArray/ISEntry.cpp + IVArray/IVArray.cpp + IVArray/IVBlockManager.cpp + IVArray/IVEntry.cpp +) \ No newline at end of file diff --git a/KVstore/ISArray/ISArray.cpp b/src/KVstore/ISArray/ISArray.cpp similarity index 100% rename from KVstore/ISArray/ISArray.cpp rename to src/KVstore/ISArray/ISArray.cpp diff --git a/KVstore/ISArray/ISArray.h b/src/KVstore/ISArray/ISArray.h similarity index 100% rename from KVstore/ISArray/ISArray.h rename to src/KVstore/ISArray/ISArray.h diff --git a/KVstore/ISArray/ISBlockManager.cpp b/src/KVstore/ISArray/ISBlockManager.cpp similarity index 100% rename from KVstore/ISArray/ISBlockManager.cpp rename to src/KVstore/ISArray/ISBlockManager.cpp diff --git a/KVstore/ISArray/ISBlockManager.h b/src/KVstore/ISArray/ISBlockManager.h similarity index 100% rename from KVstore/ISArray/ISBlockManager.h rename to src/KVstore/ISArray/ISBlockManager.h diff --git a/KVstore/ISArray/ISEntry.cpp b/src/KVstore/ISArray/ISEntry.cpp similarity index 100% rename from KVstore/ISArray/ISEntry.cpp rename to src/KVstore/ISArray/ISEntry.cpp diff --git a/KVstore/ISArray/ISEntry.h b/src/KVstore/ISArray/ISEntry.h similarity index 100% rename from KVstore/ISArray/ISEntry.h rename to src/KVstore/ISArray/ISEntry.h diff --git a/KVstore/ISTree/ISTree.cpp b/src/KVstore/ISTree/ISTree.cpp similarity index 100% rename from KVstore/ISTree/ISTree.cpp rename to src/KVstore/ISTree/ISTree.cpp diff --git a/KVstore/ISTree/ISTree.h b/src/KVstore/ISTree/ISTree.h similarity index 100% rename from KVstore/ISTree/ISTree.h rename to src/KVstore/ISTree/ISTree.h diff --git a/KVstore/ISTree/heap/ISHeap.cpp b/src/KVstore/ISTree/heap/ISHeap.cpp similarity index 100% rename from KVstore/ISTree/heap/ISHeap.cpp rename to src/KVstore/ISTree/heap/ISHeap.cpp diff --git a/KVstore/ISTree/heap/ISHeap.h b/src/KVstore/ISTree/heap/ISHeap.h similarity index 100% rename from KVstore/ISTree/heap/ISHeap.h rename to src/KVstore/ISTree/heap/ISHeap.h diff --git a/KVstore/ISTree/node/ISIntlNode.cpp b/src/KVstore/ISTree/node/ISIntlNode.cpp similarity index 100% rename from KVstore/ISTree/node/ISIntlNode.cpp rename to src/KVstore/ISTree/node/ISIntlNode.cpp diff --git a/KVstore/ISTree/node/ISIntlNode.h b/src/KVstore/ISTree/node/ISIntlNode.h similarity index 100% rename from KVstore/ISTree/node/ISIntlNode.h rename to src/KVstore/ISTree/node/ISIntlNode.h diff --git a/KVstore/ISTree/node/ISLeafNode.cpp b/src/KVstore/ISTree/node/ISLeafNode.cpp similarity index 100% rename from KVstore/ISTree/node/ISLeafNode.cpp rename to src/KVstore/ISTree/node/ISLeafNode.cpp diff --git a/KVstore/ISTree/node/ISLeafNode.h b/src/KVstore/ISTree/node/ISLeafNode.h similarity index 100% rename from KVstore/ISTree/node/ISLeafNode.h rename to src/KVstore/ISTree/node/ISLeafNode.h diff --git a/KVstore/ISTree/node/ISNode.cpp b/src/KVstore/ISTree/node/ISNode.cpp similarity index 100% rename from KVstore/ISTree/node/ISNode.cpp rename to src/KVstore/ISTree/node/ISNode.cpp diff --git a/KVstore/ISTree/node/ISNode.h b/src/KVstore/ISTree/node/ISNode.h similarity index 100% rename from KVstore/ISTree/node/ISNode.h rename to src/KVstore/ISTree/node/ISNode.h diff --git a/KVstore/ISTree/storage/ISStorage.cpp b/src/KVstore/ISTree/storage/ISStorage.cpp similarity index 100% rename from KVstore/ISTree/storage/ISStorage.cpp rename to src/KVstore/ISTree/storage/ISStorage.cpp diff --git a/KVstore/ISTree/storage/ISStorage.h b/src/KVstore/ISTree/storage/ISStorage.h similarity index 100% rename from KVstore/ISTree/storage/ISStorage.h rename to src/KVstore/ISTree/storage/ISStorage.h diff --git a/KVstore/IVArray/IVArray.cpp b/src/KVstore/IVArray/IVArray.cpp similarity index 100% rename from KVstore/IVArray/IVArray.cpp rename to src/KVstore/IVArray/IVArray.cpp diff --git a/KVstore/IVArray/IVArray.h b/src/KVstore/IVArray/IVArray.h similarity index 100% rename from KVstore/IVArray/IVArray.h rename to src/KVstore/IVArray/IVArray.h diff --git a/KVstore/IVArray/IVBlockManager.cpp b/src/KVstore/IVArray/IVBlockManager.cpp similarity index 100% rename from KVstore/IVArray/IVBlockManager.cpp rename to src/KVstore/IVArray/IVBlockManager.cpp diff --git a/KVstore/IVArray/IVBlockManager.h b/src/KVstore/IVArray/IVBlockManager.h similarity index 100% rename from KVstore/IVArray/IVBlockManager.h rename to src/KVstore/IVArray/IVBlockManager.h diff --git a/KVstore/IVArray/IVCacheManager.cpp b/src/KVstore/IVArray/IVCacheManager.cpp similarity index 100% rename from KVstore/IVArray/IVCacheManager.cpp rename to src/KVstore/IVArray/IVCacheManager.cpp diff --git a/KVstore/IVArray/IVCacheManager.h b/src/KVstore/IVArray/IVCacheManager.h similarity index 100% rename from KVstore/IVArray/IVCacheManager.h rename to src/KVstore/IVArray/IVCacheManager.h diff --git a/KVstore/IVArray/IVEntry.cpp b/src/KVstore/IVArray/IVEntry.cpp similarity index 100% rename from KVstore/IVArray/IVEntry.cpp rename to src/KVstore/IVArray/IVEntry.cpp diff --git a/KVstore/IVArray/IVEntry.h b/src/KVstore/IVArray/IVEntry.h similarity index 100% rename from KVstore/IVArray/IVEntry.h rename to src/KVstore/IVArray/IVEntry.h diff --git a/KVstore/IVTree/IVTree.cpp b/src/KVstore/IVTree/IVTree.cpp similarity index 100% rename from KVstore/IVTree/IVTree.cpp rename to src/KVstore/IVTree/IVTree.cpp diff --git a/KVstore/IVTree/IVTree.h b/src/KVstore/IVTree/IVTree.h similarity index 100% rename from KVstore/IVTree/IVTree.h rename to src/KVstore/IVTree/IVTree.h diff --git a/KVstore/IVTree/heap/IVHeap.cpp b/src/KVstore/IVTree/heap/IVHeap.cpp similarity index 100% rename from KVstore/IVTree/heap/IVHeap.cpp rename to src/KVstore/IVTree/heap/IVHeap.cpp diff --git a/KVstore/IVTree/heap/IVHeap.h b/src/KVstore/IVTree/heap/IVHeap.h similarity index 100% rename from KVstore/IVTree/heap/IVHeap.h rename to src/KVstore/IVTree/heap/IVHeap.h diff --git a/KVstore/IVTree/node/IVIntlNode.cpp b/src/KVstore/IVTree/node/IVIntlNode.cpp similarity index 100% rename from KVstore/IVTree/node/IVIntlNode.cpp rename to src/KVstore/IVTree/node/IVIntlNode.cpp diff --git a/KVstore/IVTree/node/IVIntlNode.h b/src/KVstore/IVTree/node/IVIntlNode.h similarity index 100% rename from KVstore/IVTree/node/IVIntlNode.h rename to src/KVstore/IVTree/node/IVIntlNode.h diff --git a/KVstore/IVTree/node/IVLeafNode.cpp b/src/KVstore/IVTree/node/IVLeafNode.cpp similarity index 100% rename from KVstore/IVTree/node/IVLeafNode.cpp rename to src/KVstore/IVTree/node/IVLeafNode.cpp diff --git a/KVstore/IVTree/node/IVLeafNode.h b/src/KVstore/IVTree/node/IVLeafNode.h similarity index 100% rename from KVstore/IVTree/node/IVLeafNode.h rename to src/KVstore/IVTree/node/IVLeafNode.h diff --git a/KVstore/IVTree/node/IVNode.cpp b/src/KVstore/IVTree/node/IVNode.cpp similarity index 100% rename from KVstore/IVTree/node/IVNode.cpp rename to src/KVstore/IVTree/node/IVNode.cpp diff --git a/KVstore/IVTree/node/IVNode.h b/src/KVstore/IVTree/node/IVNode.h similarity index 100% rename from KVstore/IVTree/node/IVNode.h rename to src/KVstore/IVTree/node/IVNode.h diff --git a/KVstore/IVTree/storage/IVStorage.cpp b/src/KVstore/IVTree/storage/IVStorage.cpp similarity index 100% rename from KVstore/IVTree/storage/IVStorage.cpp rename to src/KVstore/IVTree/storage/IVStorage.cpp diff --git a/KVstore/IVTree/storage/IVStorage.h b/src/KVstore/IVTree/storage/IVStorage.h similarity index 100% rename from KVstore/IVTree/storage/IVStorage.h rename to src/KVstore/IVTree/storage/IVStorage.h diff --git a/KVstore/KVstore.cpp b/src/KVstore/KVstore.cpp similarity index 100% rename from KVstore/KVstore.cpp rename to src/KVstore/KVstore.cpp diff --git a/KVstore/KVstore.h b/src/KVstore/KVstore.h similarity index 100% rename from KVstore/KVstore.h rename to src/KVstore/KVstore.h diff --git a/KVstore/SITree/SITree.cpp b/src/KVstore/SITree/SITree.cpp similarity index 100% rename from KVstore/SITree/SITree.cpp rename to src/KVstore/SITree/SITree.cpp diff --git a/KVstore/SITree/SITree.h b/src/KVstore/SITree/SITree.h similarity index 100% rename from KVstore/SITree/SITree.h rename to src/KVstore/SITree/SITree.h diff --git a/KVstore/SITree/heap/SIHeap.cpp b/src/KVstore/SITree/heap/SIHeap.cpp similarity index 100% rename from KVstore/SITree/heap/SIHeap.cpp rename to src/KVstore/SITree/heap/SIHeap.cpp diff --git a/KVstore/SITree/heap/SIHeap.h b/src/KVstore/SITree/heap/SIHeap.h similarity index 100% rename from KVstore/SITree/heap/SIHeap.h rename to src/KVstore/SITree/heap/SIHeap.h diff --git a/KVstore/SITree/node/SIIntlNode.cpp b/src/KVstore/SITree/node/SIIntlNode.cpp similarity index 100% rename from KVstore/SITree/node/SIIntlNode.cpp rename to src/KVstore/SITree/node/SIIntlNode.cpp diff --git a/KVstore/SITree/node/SIIntlNode.h b/src/KVstore/SITree/node/SIIntlNode.h similarity index 100% rename from KVstore/SITree/node/SIIntlNode.h rename to src/KVstore/SITree/node/SIIntlNode.h diff --git a/KVstore/SITree/node/SILeafNode.cpp b/src/KVstore/SITree/node/SILeafNode.cpp similarity index 100% rename from KVstore/SITree/node/SILeafNode.cpp rename to src/KVstore/SITree/node/SILeafNode.cpp diff --git a/KVstore/SITree/node/SILeafNode.h b/src/KVstore/SITree/node/SILeafNode.h similarity index 100% rename from KVstore/SITree/node/SILeafNode.h rename to src/KVstore/SITree/node/SILeafNode.h diff --git a/KVstore/SITree/node/SINode.cpp b/src/KVstore/SITree/node/SINode.cpp similarity index 100% rename from KVstore/SITree/node/SINode.cpp rename to src/KVstore/SITree/node/SINode.cpp diff --git a/KVstore/SITree/node/SINode.h b/src/KVstore/SITree/node/SINode.h similarity index 100% rename from KVstore/SITree/node/SINode.h rename to src/KVstore/SITree/node/SINode.h diff --git a/KVstore/SITree/storage/SIStorage.cpp b/src/KVstore/SITree/storage/SIStorage.cpp similarity index 100% rename from KVstore/SITree/storage/SIStorage.cpp rename to src/KVstore/SITree/storage/SIStorage.cpp diff --git a/KVstore/SITree/storage/SIStorage.h b/src/KVstore/SITree/storage/SIStorage.h similarity index 100% rename from KVstore/SITree/storage/SIStorage.h rename to src/KVstore/SITree/storage/SIStorage.h diff --git a/KVstore/Tree.h b/src/KVstore/Tree.h similarity index 100% rename from KVstore/Tree.h rename to src/KVstore/Tree.h diff --git a/src/Main/CMakeLists.txt b/src/Main/CMakeLists.txt new file mode 100644 index 00000000..b645da78 --- /dev/null +++ b/src/Main/CMakeLists.txt @@ -0,0 +1,128 @@ +set(LIB_EXE + # Conan Deps + Boost::system + Boost::regex + Boost::thread + minizip::minizip + OpenSSL::SSL + OpenSSL::Crypto + indicators::indicators + rapidjson + log4cplus::log4cplus + CURL::libcurl + # System Deps + OpenMP::OpenMP_CXX + ${LIB_JEMALLOC} + ${LIB_READLINE} + Threads::Threads + # Unmanaged Deps + antlr4-runtime + workflow + Backward::Backward +) + +# set definition if has debug info +if (CMAKE_BUILD_TYPE MATCHES "Deb") + LIST(APPEND LIB_EXE + libdwarf::libdwarf + libelf::libelf + ) +endif () + +set(OBJ_EXE + $ + $ + $ + $ + $ + $ + $ + $ + $ +) + +add_executable(gadd gadd.cpp ${OBJ_EXE}) +target_link_libraries(gadd ${LIB_EXE}) + +add_executable(gsub gsub.cpp ${OBJ_EXE}) +target_link_libraries(gsub ${LIB_EXE}) + +add_executable(gexport gexport.cpp ${OBJ_EXE}) +target_link_libraries(gexport ${LIB_EXE}) + +add_executable(gdrop gdrop.cpp ${OBJ_EXE}) +target_link_libraries(gdrop ${LIB_EXE}) + +add_executable(ginit ginit.cpp ${OBJ_EXE}) +target_link_libraries(ginit ${LIB_EXE}) + +add_executable(shutdown shutdown.cpp $ $) +target_link_libraries(shutdown ${LIB_EXE}) + +add_executable(gmonitor gmonitor.cpp ${OBJ_EXE}) +target_link_libraries(gmonitor ${LIB_EXE}) + +add_executable(gshow gshow.cpp ${OBJ_EXE}) +target_link_libraries(gshow ${LIB_EXE}) + +add_executable(gbuild gbuild.cpp ${OBJ_EXE}) +target_link_libraries(gbuild ${LIB_EXE}) + +add_executable(gquery gquery.cpp ${OBJ_EXE}) +# seems gquery does need readline +target_compile_definitions(gquery PRIVATE READLINE_ON) +target_link_libraries(gquery ${LIB_EXE}) + +add_executable(gserver gserver.cpp ${OBJ_EXE}) +target_link_libraries(gserver ${LIB_EXE}) + +add_executable(gserver_backup_scheduler gserver_backup_scheduler.cpp ${OBJ_EXE}) +target_link_libraries(gserver_backup_scheduler ${LIB_EXE}) + +add_executable(ghttp ghttp.cpp ${OBJ_EXE} $ $) +target_link_libraries(ghttp ${LIB_EXE}) + +add_executable(grpc grpc.cpp ${OBJ_EXE} $) +target_link_libraries(grpc ${LIB_EXE} workflow OpenSSL::SSL OpenSSL::Crypto) + +add_executable(gbackup gbackup.cpp ${OBJ_EXE}) +target_link_libraries(gbackup ${LIB_EXE}) + +add_executable(grestore grestore.cpp ${OBJ_EXE}) +target_link_libraries(grestore ${LIB_EXE}) + +add_executable(gpara gpara.cpp ${OBJ_EXE} $) +target_link_libraries(gpara ${LIB_EXE}) + +add_executable(rollback rollback.cpp ${OBJ_EXE} $) +target_link_libraries(rollback ${LIB_EXE}) + +add_executable(gconsole gconsole.cpp ${OBJ_EXE}) +target_link_libraries(gconsole ${LIB_EXE}) + +SET(gstore_binaries + gadd + gsub + gexport + gdrop + ginit + shutdown + gmonitor + gshow + gbuild + gquery + gserver + gserver_backup_scheduler + ghttp + grpc + gbackup + grestore + gpara + rollback + gconsole +) + +foreach (gstore_binary ${gstore_binaries}) + add_dependencies(${gstore_binary} prepare) + install(TARGETS ${gstore_binary} DESTINATION ${GSTORE_EXE_DIR}) +endforeach () \ No newline at end of file diff --git a/Main/gadd.cpp b/src/Main/gadd.cpp similarity index 97% rename from Main/gadd.cpp rename to src/Main/gadd.cpp index c9796add..01b5ca46 100644 --- a/Main/gadd.cpp +++ b/src/Main/gadd.cpp @@ -107,8 +107,7 @@ int main(int argc, char *argv[]) mkdir(unz_dir_path.c_str(), 0775); if (unzip.unCompress() != CompressUtil::UnZipOK) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); cout<<"zip file uncompress faild "< #include #include -#include "workflow/HttpMessage.h" -#include "workflow/HttpUtil.h" -#include "workflow/WFServer.h" -#include "workflow/WFHttpServer.h" -#include "workflow/WFFacilities.h" -#include "../Util/WebUrl.h" #include +#include + +#include "../Util/WebUrl.h" + #include "../Util/Util_New.h" #include "../Database/Database.h" #include "../Database/Txn_manager.h" #include "../Util/Util.h" -#include "../tools/rapidjson/document.h" -#include "../tools/rapidjson/prettywriter.h" -#include "../tools/rapidjson/writer.h" -#include "../tools/rapidjson/stringbuffer.h" -#include +// #include "../tools/rapidjson/document.h" +// #include "../tools/rapidjson/prettywriter.h" +// #include "../tools/rapidjson/writer.h" +// #include "../tools/rapidjson/stringbuffer.h" + #include "../Util/IPWhiteList.h" #include "../Util/IPBlackList.h" //#include "../../Util/IPWhiteList.h" //#include "../../Util/IPBlackList.h" +// 3rd party dependencies managed by Conan +#include "rapidjson/document.h" +#include "rapidjson/prettywriter.h" +#include "rapidjson/writer.h" +#include "rapidjson/stringbuffer.h" +#include "workflow/HttpMessage.h" +#include "workflow/HttpUtil.h" +#include "workflow/WFServer.h" +#include "workflow/WFHttpServer.h" +#include "workflow/WFFacilities.h" + #define THREAD_NUM 30 #define MAX_DATABASE_NUM 100 #define MAX_USER_NUM 1000 @@ -498,13 +507,13 @@ string getArgValue(int argc, char* argv[], string argname, string default_value) } /// -/// 启动API Server 启动命令 apiserver -p 9999 -ipallow y -ipdeny y -db lubm -advanced y -/// 其中参数如下: -/// -p(必须):端口 -/// -db(可选):默认为system -/// -ipallow(可选):ip白名单 -/// -ipdeny(可选):ip黑名单 -/// -advanced(可选):是否启用高级功能(load CSR) +/// 锟斤拷锟斤拷API Server 锟斤拷锟斤拷锟斤拷锟斤拷 apiserver -p 9999 -ipallow y -ipdeny y -db lubm -advanced y +/// 锟斤拷锟叫诧拷锟斤拷锟斤拷锟铰o拷 +/// -p锟斤拷锟斤拷锟斤拷)锟斤拷锟剿匡拷 +/// -db(锟斤拷选):默锟斤拷为system +/// -ipallow(锟斤拷选):ip锟斤拷锟斤拷锟斤拷 +/// -ipdeny(锟斤拷选):ip锟斤拷锟斤拷锟斤拷 +/// -advanced(锟斤拷选):锟角凤拷锟斤拷锟矫高硷拷锟斤拷锟杰o拷load CSR) /// /// /// diff --git a/Main/gbackup.cpp b/src/Main/gbackup.cpp similarity index 100% rename from Main/gbackup.cpp rename to src/Main/gbackup.cpp diff --git a/Main/gbuild.cpp b/src/Main/gbuild.cpp similarity index 96% rename from Main/gbuild.cpp rename to src/Main/gbuild.cpp index c94d6550..051fc305 100644 --- a/Main/gbuild.cpp +++ b/src/Main/gbuild.cpp @@ -126,8 +126,7 @@ main(int argc, char * argv[]) CompressUtil::UnCompressZip unzip(_rdf, unz_dir_path); if (unzip.unCompress() != CompressUtil::UnZipOK) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); cout<<"zip file uncompress faild "< silence_sysdb_query(const string &query, vector &_rs) } // remove tmpout file //TODO: check this return value - system("rm -rf bin/.gconsole_tmp_out"); + // system("rm -rf bin/.gconsole_tmp_out"); + Util::remove_path("bin/.gconsole_tmp_out"); return move(retv); } @@ -1191,7 +1192,8 @@ int silence_sysdb_query(const string &query, ResultSet &_rs) } // remove tmpout file //TODO: check this return value - system("rm -rf bin/.gconsole_tmp_out"); + // system("rm -rf bin/.gconsole_tmp_out"); + Util::remove_path("bin/.gconsole_tmp_out"); if ((ret <= -100 && ret != -100) || (ret > -100 && ret < 0)) // select query failed or update query failed { @@ -1994,8 +1996,7 @@ int create_handler(const vector &args) mkdir(unz_dir_path.c_str(), 0775); if (unzip.unCompress() != CompressUtil::UnZipOK) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); cout<<"zip file uncompress faild "< &args) if (args.size() == 1) { // rm bin/.tmp_nt.nt - system("rm -rf bin/.tmp_nt.nt"); + // system("rm -rf bin/.tmp_nt.nt"); + Util::remove_path("bin/.tmp_nt.nt"); } if (flag == 0) { cout << "Database create failed. " << endl; - system(string("rm -rf " + db_home + "/" + db_name + db_suffix + " " + unz_dir_path).c_str()); + // system(string("rm -rf " + db_home + "/" + db_name + db_suffix + " " + unz_dir_path).c_str()); + Util::remove_path(db_home + "/" + db_name + db_suffix + " " + unz_dir_path); return -1; } @@ -2030,7 +2033,8 @@ int create_handler(const vector &args) if (silence_sysdb_query(record_newdb_sparql, rs)) { cout << "Newly created db record added failed! Database create failed.\nWarn: Please check contents of system" + db_suffix + "." << endl; - system(string("rm -rf " + db_home + "/" + db_name + db_suffix + " " + unz_dir_path).c_str()); + // system(string("rm -rf " + db_home + "/" + db_name + db_suffix + " " + unz_dir_path).c_str()); + Util::remove_path(db_home + "/" + db_name + db_suffix + " " + unz_dir_path); return -1; } } @@ -2057,8 +2061,7 @@ int create_handler(const vector &args) cout<< "See parse error log file for details " << error_log << endl; } _db.save(); - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); } cout << "Database " << db_name << "created successfully. " << endl; @@ -2120,8 +2123,8 @@ int drop_handler(const vector &args) cout << "Warn: Drop info about database " << db_name << " failed! Please check system db. \nNote that " << db_name << db_suffix << " and backlog are removed." << endl; } - string cmd = "rm -r " + db_home + "/" + db_name + db_suffix; - system(cmd.c_str()); + string cmd = db_home + "/" + db_name + db_suffix; + Util::remove_path(cmd); Util::delete_backuplog(db_name); cout << "Database " << db_name << " dropped successfully." << endl; diff --git a/Main/gdrop.cpp b/src/Main/gdrop.cpp similarity index 98% rename from Main/gdrop.cpp rename to src/Main/gdrop.cpp index 78e576a3..8eb1937f 100644 --- a/Main/gdrop.cpp +++ b/src/Main/gdrop.cpp @@ -106,8 +106,7 @@ int main(int argc, char * argv[]) msg = "update failed."; } cout<<"Delete the database info from system database successfully!"< _len_suffix && db_name.substr(len - _len_suffix, _len_suffix) == _db_suffix) - { - cout<<"The database name can not end with " + _db_suffix + "! Input \"bin/gexport -h\" for help." << endl; - return 0; - } - filepath= Util::getArgValue(argc, argv, "f", "file"); - std::string zip_path; - if (filepath.empty()) - { - filepath = db_name + "_" + Util::get_timestamp() + ".nt"; - zip_path = db_name + "_" + Util::get_timestamp() + ".zip"; - } - else - { - if (filepath[filepath.length() - 1] != '/') - filepath = filepath + "/"; - if (!Util::dir_exist(filepath)) - Util::create_dirs(filepath); - zip_path = filepath + db_name + "_" + Util::get_timestamp() + ".zip"; - filepath = filepath + db_name + "_" + Util::get_timestamp() + ".nt"; - } - cout << "gexport..." << endl; - - Database system_db("system"); - system_db.load(); - - string sparql = "ASK WHERE{<" + db_name + "> \"already_built\".}"; - ResultSet ask_rs; - FILE* ask_ofp = stdout; - // todo: check this return value - system_db.query(sparql, ask_rs, ask_ofp); - // int ret = system_db.query(sparql, ask_rs, ask_ofp); - if (ask_rs.answer[0][0] == "\"false\"^^") - { - cout<<"The database does not exist."< 3 && db_name.substr(len - 3, 3) == ".db") - { - cout << "The database name can not end with .db" << endl; - return 0; - } - filepath = db_name + ".nt"; - } - else if (argc == 3) - { - db_name = argv[1]; - int len = db_name.length(); - if (db_name.length() > 3 && db_name.substr(len - 3, 3) == ".db") - { - cout << "The database name can not end with .db" << endl; - return 0; - } - filepath = argv[2]; - if(filepath[filepath.length()-1] != '/') - filepath = filepath + "/"; - if(!boost::filesystem::exists(filepath)) - boost::filesystem::create_directories(filepath); - filepath = filepath + db_name + ".nt"; - } - - cout << "gexport..." << endl; - - Database system_db("system"); - system_db.load(); - - string sparql = "ASK WHERE{<" + db_name + "> \"already_built\".}"; - ResultSet ask_rs; - FILE* ask_ofp = stdout; - int ret = system_db.query(sparql, ask_rs, ask_ofp); - if (ask_rs.answer[0][0] == "\"false\"^^") - { - cout << "The database does not exist." << endl; - return 0; - } - - cout << "start exporting the database......" << endl; - Database _db(db_name); - _db.load(); - cout << "finish loading" << endl; - - FILE* ofp = fopen(filepath.c_str(), "w"); - _db.export_db(ofp); - fflush(ofp); - fclose(ofp); - ofp = NULL; - cout << "finish exporting the database." << endl; - - return 0;*/ -} +/*============================================================================= +# Filename: gexport.cpp +# Author: suxunbin,liwenjie +# Last Modified: 2021-8-15 23:15:16 +# Description: export a database to get .nt file +=============================================================================*/ + +#include "../Database/Database.h" +#include "../Util/Util.h" +#include "../Util/CompressFileUtil.h" +//#include "../Util/Slog.h" +using namespace std; + +int +main(int argc, char * argv[]) +{ + Util util; + //Log.init("slog.properties"); + string _db_home = util.getConfigureValue("db_home"); + string _db_suffix = util.getConfigureValue("db_suffix"); + size_t _len_suffix = _db_suffix.length(); + string db_name; + string filepath; + if (argc < 2) + { + /*cout << "please input the complete command:\t" << endl; + cout << "\t bin/gadd -h" << endl;*/ + cout<<"Invalid arguments! Input \"bin/gexport -h\" for help."< _len_suffix && db_name.substr(len - _len_suffix, _len_suffix) == _db_suffix) + { + cout<<"The database name can not end with " + _db_suffix + "! Input \"bin/gexport -h\" for help." << endl; + return 0; + } + filepath= Util::getArgValue(argc, argv, "f", "file"); + std::string zip_path; + if (filepath.empty()) + { + filepath = db_name + "_" + Util::get_timestamp() + ".nt"; + zip_path = db_name + "_" + Util::get_timestamp() + ".zip"; + } + else + { + if (filepath[filepath.length() - 1] != '/') + filepath = filepath + "/"; + if (!Util::dir_exist(filepath)) + Util::create_dirs(filepath); + zip_path = filepath + db_name + "_" + Util::get_timestamp() + ".zip"; + filepath = filepath + db_name + "_" + Util::get_timestamp() + ".nt"; + } + cout << "gexport..." << endl; + + Database system_db("system"); + system_db.load(); + + string sparql = "ASK WHERE{<" + db_name + "> \"already_built\".}"; + ResultSet ask_rs; + FILE* ask_ofp = stdout; + // todo: check this return value + system_db.query(sparql, ask_rs, ask_ofp); + // int ret = system_db.query(sparql, ask_rs, ask_ofp); + if (ask_rs.answer[0][0] == "\"false\"^^") + { + cout<<"The database does not exist."< 3 && db_name.substr(len - 3, 3) == ".db") + { + cout << "The database name can not end with .db" << endl; + return 0; + } + filepath = db_name + ".nt"; + } + else if (argc == 3) + { + db_name = argv[1]; + int len = db_name.length(); + if (db_name.length() > 3 && db_name.substr(len - 3, 3) == ".db") + { + cout << "The database name can not end with .db" << endl; + return 0; + } + filepath = argv[2]; + if(filepath[filepath.length()-1] != '/') + filepath = filepath + "/"; + if(!boost::filesystem::exists(filepath)) + boost::filesystem::create_directories(filepath); + filepath = filepath + db_name + ".nt"; + } + + cout << "gexport..." << endl; + + Database system_db("system"); + system_db.load(); + + string sparql = "ASK WHERE{<" + db_name + "> \"already_built\".}"; + ResultSet ask_rs; + FILE* ask_ofp = stdout; + int ret = system_db.query(sparql, ask_rs, ask_ofp); + if (ask_rs.answer[0][0] == "\"false\"^^") + { + cout << "The database does not exist." << endl; + return 0; + } + + cout << "start exporting the database......" << endl; + Database _db(db_name); + _db.load(); + cout << "finish loading" << endl; + + FILE* ofp = fopen(filepath.c_str(), "w"); + _db.export_db(ofp); + fflush(ofp); + fclose(ofp); + ofp = NULL; + cout << "finish exporting the database." << endl; + + return 0;*/ +} diff --git a/Main/ghttp.cpp b/src/Main/ghttp.cpp similarity index 93% rename from Main/ghttp.cpp rename to src/Main/ghttp.cpp index 4173a35e..feb71885 100644 --- a/Main/ghttp.cpp +++ b/src/Main/ghttp.cpp @@ -64,7 +64,7 @@ void shutdown_handler(const HttpServer &server, const shared_ptr &response, const shared_ptr &request); -void download_handler(const HttpServer &server, const shared_ptr &response, const shared_ptr &request); +void download_handler(const HttpServer &server, const shared_ptr &response, const shared_ptr &request, string request_type); void signalHandler(int signum); @@ -80,7 +80,7 @@ void build_thread_new(const shared_ptr &request, const shar void load_thread_new(const shared_ptr &request, const shared_ptr &response, string db_name, string remote_ip, string port, bool load_csr); -void monitor_thread_new(const shared_ptr &request, const shared_ptr &response, string db_name); +void monitor_thread_new(const shared_ptr &request, const shared_ptr &response, string db_name, string disk); void unload_thread_new(const shared_ptr &request, const shared_ptr &response, string db_name); @@ -606,8 +606,7 @@ int main(int argc, char *argv[]) string system_port_path = _db_home + "/system" + _db_suffix + "/port.txt"; if (Util::file_exist(system_port_path)) { - string cmd = "rm -f " + system_port_path; - system(cmd.c_str()); + Util::remove_path(system_port_path); } SLOG_WARN("Stopped abnormally, restarting server..."); latch.lockExclusive(); @@ -678,7 +677,12 @@ int initialize(unsigned short port, std::string db_name, bool load_src) server.resource["/file/download"]["POST"] = [&server](shared_ptr response, shared_ptr request) { - download_handler(server, response, request); + download_handler(server, response, request, "POST"); + }; + + server.resource["/file/download"]["GET"] = [&server](shared_ptr response, shared_ptr request) + { + download_handler(server, response, request, "GET"); }; server.resource["/file/download"]["OPTIONS"] = [&server](shared_ptr response, shared_ptr request) @@ -810,7 +814,7 @@ void build_thread_new(const shared_ptr &request, const shar // return; // } if (!db_path.empty()) - { + { if (db_path == apiUtil->get_system_path()) { string error = "You have no rights to access system files."; @@ -830,11 +834,10 @@ void build_thread_new(const shared_ptr &request, const shar sendResponseMsg(1003, result, operation, request, response); return; } - // check if database named [db_name] is already built if (apiUtil->check_db_exist(db_name)) { - string error = "database already built."; + string error = "Database already built."; sendResponseMsg(1004, error, operation, request, response); return; } @@ -872,8 +875,7 @@ void build_thread_new(const shared_ptr &request, const shar code = upfile.unCompress(); if (code != CompressUtil::UnZipOK) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); string error = "uncompress is failed error."; sendResponseMsg(code, error, operation, request, response); return; @@ -881,170 +883,96 @@ void build_thread_new(const shared_ptr &request, const shar db_path = upfile.getMaxFilePath(); upfile.getFileList(zip_files, db_path); } - - Socket socket; string _db_path = _db_home + "/" + db_name + _db_suffix; string dataset = db_path; string database = db_name; SLOG_DEBUG("Import dataset to build database..."); SLOG_DEBUG("DB_store: " + database + "\tRDF_data: " + dataset); Database *current_database = new Database(database); - bool flag = true; - if (!port.empty()) - { - socket.create(); - socket.connect(remote_ip, Util::string2int(port)); - if (dataset.empty()) - flag = current_database->build(dataset, socket); - else - flag = current_database->BuildEmptyDB(); - string msg = "Build database done."; - string resJson = CreateJson(0, msg, 0); - socket.send(resJson); - socket.close(); - } - else if (!dataset.empty()) - { + bool flag = false; + if (!dataset.empty()) flag = current_database->build(dataset); - } else - { flag = current_database->BuildEmptyDB(); - } delete current_database; current_database = NULL; - if (!flag) + if (flag) { - string error = "Import RDF file to database failed."; - string cmd = "rm -r " + _db_path; - system(cmd.c_str()); - sendResponseMsg(1005, error, operation, request, response); - if (!unz_dir_path.empty()) + // if zip file then excuse batchInsert + if (is_zip && zip_files.size() > 0) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); - } - return; - } - - ofstream f; - f.open(_db_path + "/success.txt"); - f.close(); - - // by default, one can query or load or unload the database that is built by itself, so add the database name to the privilege set of the user - if (apiUtil->init_privilege(username, db_name) == 0) - { - string error = "init privilege failed."; - sendResponseMsg(1006, error, operation, request, response); - if (!unz_dir_path.empty()) - { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); - } - return; - } - SLOG_DEBUG("init privilege succeed after build."); - - // add database information to system.db - if (apiUtil->build_db_user_privilege(db_name, username)) - { - string success = "Import RDF file to database done."; - string error_log = _db_path + "/parse_error.log"; - // exclude Info line - size_t parse_error_num = Util::count_lines(error_log); - if (parse_error_num > 0) - parse_error_num = parse_error_num - 1; - rapidjson::Document doc; - doc.SetObject(); - Document::AllocatorType &allocator = doc.GetAllocator(); - doc.AddMember("StatusCode", 0, allocator); - doc.AddMember("StatusMsg", StringRef(success.c_str()), allocator); - doc.AddMember("failed_num", parse_error_num, allocator); - if (parse_error_num > 0) - { - SLOG_ERROR("RDF parse error num " + to_string(parse_error_num)); - SLOG_ERROR("See log file for details " + error_log); - } - if (is_zip) - { - auto error_responce = [operation,request,response,db_name,parse_error_num](const std::string& error) - { - rapidjson::Document doc; - doc.SetObject(); - Document::AllocatorType &allocator = doc.GetAllocator(); - doc.AddMember("StatusCode", 0, allocator); - doc.AddMember("StatusMsg", StringRef(error.c_str()), allocator); - doc.AddMember("failed_num", parse_error_num, allocator); - Util::add_backuplog(db_name); - sendResponseMsg(doc, operation, request, response); - }; - if (!apiUtil->trywrlock_database(db_name)) - { - std::string error = "The operation can not been excuted due to loss of lock."; - error_responce(error); - } - else + current_database = new Database(db_name); + bool rt = current_database->load(false); + if (!rt) { - Database *cur_database = new Database(db_name); - bool rt = cur_database->load(true); - if (!rt) - { - std::string error = "The database load faild."; - error_responce(error); - apiUtil->unlock_database(db_name); - } - else + result = "Import RDF file to database failed: load error."; + Util::remove_path(_db_path); + if (!unz_dir_path.empty()) { - apiUtil->add_database(db_name, cur_database); - if (apiUtil->insert_txn_managers(cur_database, db_name) == false) - { - SLOG_WARN("when load insert_txn_managers fail."); - } - unsigned success_num = 0; - unsigned parse_insert_error_num = 0; - unsigned total_num = Util::count_lines(error_log); - for (std::string rdf_zip : zip_files) - { - SLOG_DEBUG("begin insert data from " + rdf_zip); - success_num += cur_database->batch_insert(rdf_zip, false, nullptr); - } - parse_insert_error_num = Util::count_lines(error_log)-total_num-zip_files.size(); - cur_database->save(); - apiUtil->db_checkpoint(db_name); - apiUtil->delete_from_databases(db_name); - apiUtil->unlock_database(db_name); - - rapidjson::Document doc; - doc.SetObject(); - Document::AllocatorType &allocator = doc.GetAllocator(); - doc.AddMember("StatusCode", 0, allocator); - doc.AddMember("StatusMsg", StringRef(success.c_str()), allocator); - doc.AddMember("failed_num", parse_error_num, allocator); - doc.AddMember("success_num", success_num, allocator); - doc.AddMember("failed_insert_num", parse_insert_error_num, allocator); - Util::add_backuplog(db_name); - sendResponseMsg(doc, operation, request, response); + Util::remove_path(unz_dir_path); } + sendResponseMsg(1005, result, operation, request, response); + return; } - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + for (std::string rdf_zip : zip_files) + { + current_database->batch_insert(rdf_zip, false, nullptr); + } + current_database->save(); + current_database->unload(); + delete current_database; + current_database = NULL; } - else + // init database info and privilege + if (apiUtil->build_db_user_privilege(db_name, username) + && apiUtil->init_privilege(username, db_name)) { - rapidjson::Document doc; - doc.SetObject(); - Document::AllocatorType &allocator = doc.GetAllocator(); - doc.AddMember("StatusCode", 0, allocator); - doc.AddMember("StatusMsg", StringRef(success.c_str()), allocator); - doc.AddMember("failed_num", parse_error_num, allocator); + // add success.txt + ofstream f; + f.open(_db_path + "/success.txt"); + f.close(); + // add backup.log Util::add_backuplog(db_name); - sendResponseMsg(doc, operation, request, response); + // build response result + result = "Import RDF file to database done."; + string error_log = _db_path + "/parse_error.log"; + size_t parse_error_num = Util::count_lines(error_log); + // exclude Info line + if (parse_error_num > 0) + parse_error_num = parse_error_num - 1; + if (zip_files.size() > 0) + parse_error_num = parse_error_num - zip_files.size(); + rapidjson::Document resp_data; + resp_data.SetObject(); + rapidjson::Document::AllocatorType &allocator = resp_data.GetAllocator(); + resp_data.AddMember("StatusCode", 0, allocator); + resp_data.AddMember("StatusMsg", StringRef(result.c_str()), allocator); + resp_data.AddMember("failed_num", parse_error_num, allocator); + if (parse_error_num > 0) + { + SLOG_ERROR("RDF parse error num " + to_string(parse_error_num)); + SLOG_ERROR("See log file for details " + error_log); + } + // remove unzip dir + if (!unz_dir_path.empty()) + { + Util::remove_path(unz_dir_path); + } + Util::add_backuplog(db_name); + sendResponseMsg(resp_data, operation, request, response); + return; } } else { - string error = "add database information to system failed."; - sendResponseMsg(1006, error, operation, request, response); + result = "Import RDF file to database failed."; + rmdir(_db_path.c_str()); + Util::remove_path(_db_path); + if (!unz_dir_path.empty()) + { + Util::remove_path(unz_dir_path); + } + sendResponseMsg(1005, result, operation, request, response); } } catch (const std::exception &e) @@ -1066,11 +994,11 @@ void sendResponseMsg(int code, string msg, std::string operation, const shared_p string resJson = CreateJson(code, msg, 0); if (code == 0) { - SLOG_DEBUG("response result:\n" + resJson); + SLOG_DEBUG("response result:" + resJson); } else { - SLOG_ERROR("response result:\n" + resJson); + SLOG_ERROR("response result:" + resJson); } string remote_ip = getRemoteIp(request); apiUtil->write_access_log(operation, remote_ip, code, msg); @@ -1105,7 +1033,7 @@ void sendResponseMsg(rapidjson::Document &doc, std::string operation, const shar doc.Accept(resWriter); string json_str = resBuffer.GetString(); - SLOG_DEBUG("response result:\n" + json_str); + SLOG_DEBUG("response result: " + json_str); *response << "HTTP/1.1 200 OK\r\nContent-Type: application/json\r\nContent-Length: " << json_str.length() << "\r\n\r\n" << json_str; } @@ -1150,7 +1078,8 @@ void load_thread_new(const shared_ptr &request, const share return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { if (!apiUtil->trywrlock_database(db_name)) @@ -1237,7 +1166,7 @@ void load_thread_new(const shared_ptr &request, const share * @param {string} db_name * @return {*} */ -void monitor_thread_new(const shared_ptr &request, const shared_ptr &response, string db_name) +void monitor_thread_new(const shared_ptr &request, const shared_ptr &response, string db_name, string disk) { string operation = "monitor"; try @@ -1256,15 +1185,9 @@ void monitor_thread_new(const shared_ptr &request, const sh sendResponseMsg(1004, error, operation, request, response); return; } - // Database *current_database = apiUtil->get_database(db_name); - // if (current_database == NULL) - // { - // error = "Database not load yet."; - // sendResponseMsg(1004, error, operation, request, response); - // return; - // } - DatabaseInfo *database_info = apiUtil->get_databaseinfo(db_name); - if (apiUtil->tryrdlock_databaseinfo(database_info) == false) + struct DatabaseInfo *database_info; + apiUtil->get_databaseinfo(db_name, database_info); + if (apiUtil->rdlock_databaseinfo(database_info) == false) { string error = "Unable to monitor due to loss of lock"; sendResponseMsg(1007, error, operation, request, response); @@ -1273,9 +1196,13 @@ void monitor_thread_new(const shared_ptr &request, const sh string creator = database_info->getCreator(); string time = database_info->getTime(); apiUtil->unlock_databaseinfo(database_info); - Database* current_database = new Database(db_name); - current_database->loadDBInfoFile(); - current_database->loadStatisticsInfoFile(); + Database* current_database; + apiUtil->get_database(db_name, current_database); + if (current_database == NULL) { + current_database = new Database(db_name); + current_database->loadDBInfoFile(); + current_database->loadStatisticsInfoFile(); + } unordered_map umap = current_database->getStatisticsInfo(); rapidjson::Document doc; doc.SetObject(); @@ -1302,14 +1229,16 @@ void monitor_thread_new(const shared_ptr &request, const sh doc.AddMember("subjectNum", current_database->getSubNum(), allocator); doc.AddMember("predicateNum", current_database->getPreNum(), allocator); doc.AddMember("connectionNum", apiUtil->get_connection_num(), allocator); - string db_path = _db_home + "/" + db_name + _db_suffix; - db_path = Util::getExactPath(db_path.c_str()); - string real_path = Util::getExactPath(db_path.c_str()); unsigned diskUsed = 0; - if (!real_path.empty()) { - long long unsigned count_size_byte = Util::count_dir_size(real_path.c_str()); - // byte to MB - diskUsed = count_size_byte>>20; + if (disk != "0") { + string db_path = _db_home + "/" + db_name + _db_suffix; + db_path = Util::getExactPath(db_path.c_str()); + string real_path = Util::getExactPath(db_path.c_str()); + if (!real_path.empty()) { + long long unsigned count_size_byte = Util::count_dir_size(real_path.c_str()); + // byte to MB + diskUsed = count_size_byte>>20; + } } doc.AddMember("diskUsed", diskUsed, allocator); doc.AddMember("subjectList", subjectList, allocator); @@ -1352,7 +1281,8 @@ void unload_thread_new(const shared_ptr &request, const sha sendResponseMsg(1004, error, operation, request, response); return; } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "the operation can not been excuted due to loss of lock."; @@ -1361,19 +1291,9 @@ void unload_thread_new(const shared_ptr &request, const sha } else { - if (apiUtil->find_txn_managers(db_name) == false) - { - error = "transaction manager can not find the database"; - - apiUtil->unlock_database_map(); - apiUtil->unlock_databaseinfo(db_info); - sendResponseMsg(1008, error, operation, request, response); - return; - } apiUtil->db_checkpoint(db_name); apiUtil->delete_from_databases(db_name); apiUtil->unlock_databaseinfo(db_info); - string success = "Database unloaded."; sendResponseMsg(0, success, operation, request, response); } @@ -1412,7 +1332,8 @@ void drop_thread_new(const shared_ptr &request, const share sendResponseMsg(1004, error, operation, request, response); return; } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "the operation can not been excuted due to loss of lock."; @@ -1447,14 +1368,18 @@ void drop_thread_new(const shared_ptr &request, const share return; } SLOG_DEBUG("remove " + db_name + " from the already build database list success."); - string cmd; string db_path = _db_home + "/" + db_name + _db_suffix; if (is_backup == "false") - cmd = "rm -r " + db_path; + { + Util::remove_path(db_path); + SLOG_DEBUG("delete the file: " + db_path); + } else - cmd = "mv " + db_path + " " + _db_home + "/" + db_name + ".bak"; - SLOG_DEBUG("delete the file: " + cmd); - system(cmd.c_str()); + { + string cmd = "mv " + db_path + " " + _db_home + "/" + db_name + ".bak"; + SLOG_DEBUG("delete the file: " + cmd); + system(cmd.c_str()); + } Util::delete_backuplog(db_name); string success = "Database " + db_name + " dropped."; sendResponseMsg(0, success, operation, request, response); @@ -1731,75 +1656,96 @@ void userPrivilegeManage_thread_new(const shared_ptr &reque } Util::split(privilege, ",", privileges); + vector privilegeTypes; for (unsigned i = 0; i < privileges.size(); i++) { string temp_privilege_int = privileges[i]; - string temp_privilege = ""; if (temp_privilege_int.empty()) { continue; } if (temp_privilege_int == "1") { - temp_privilege = "query"; + privilegeTypes.push_back("query"); } else if (temp_privilege_int == "2") { - temp_privilege = "load"; + privilegeTypes.push_back("load"); } else if (temp_privilege_int == "3") { - temp_privilege = "unload"; + privilegeTypes.push_back("unload"); } else if (temp_privilege_int == "4") { - temp_privilege = "update"; + privilegeTypes.push_back("update"); } else if (temp_privilege_int == "5") { - temp_privilege = "backup"; + privilegeTypes.push_back("backup"); } else if (temp_privilege_int == "6") { - temp_privilege = "restore"; + privilegeTypes.push_back("restore"); } else if (temp_privilege_int == "7") { - temp_privilege = "export"; - } - if (temp_privilege.empty() == false) + privilegeTypes.push_back("export"); + } + else + { + SLOG_DEBUG("The privilege " + temp_privilege_int + " undefined."); + continue; + } + } + if (privilegeTypes.size() > 0) + { + string privilegeNames=""; + for (size_t i = 0; i < privilegeTypes.size(); i++) { - if (type == "1") + if (i > 0) { - if (apiUtil->add_privilege(username, temp_privilege, db_name) == 0) - { - result = result + "add privilege " + temp_privilege + " failed. \r\n"; - } - else - { - result = result + "add privilege " + temp_privilege + " successfully. \r\n"; - } + privilegeNames = privilegeNames + ","; + } + privilegeNames = privilegeNames + privilegeTypes[i]; + } + if (type == "1") + { + if (apiUtil->add_privilege(username, privilegeTypes, db_name) == 0) + { + result = result + "add privilege " + privilegeNames + " failed."; + sendResponseMsg(1005, result, operation, request, response); } - else if (type == "2") + else { - if (apiUtil->del_privilege(username, temp_privilege, db_name) == 0) - { - result = result + "delete privilege " + temp_privilege + " failed. \r\n"; - } - else - { - result = result + "delete privilege " + temp_privilege + " successfully. \r\n"; - } + result = result + "add privilege " + privilegeNames + " successfully."; + sendResponseMsg(0, result, operation, request, response); + } + } + else if (type == "2") + { + if (apiUtil->del_privilege(username, privilegeTypes, db_name) == 0) + { + result = result + "delete privilege " + privilegeNames + " failed."; + sendResponseMsg(1005, result, operation, request, response); } else { - result = "the operation type is not support."; - sendResponseMsg(1003, result, operation, request, response); - return; + result = result + "delete privilege " + privilegeNames + " successfully."; + sendResponseMsg(0, result, operation, request, response); } } - } - sendResponseMsg(0, result, operation, request, response); + else + { + result = "the operation type is not support."; + sendResponseMsg(1003, result, operation, request, response); + } + } + else + { + result = "not match any valid privilege, valid values between 1 and 7."; + sendResponseMsg(1003, result, operation, request, response); + } } } catch (const std::exception &e) @@ -1873,14 +1819,16 @@ void backup_thread_new(const shared_ptr &request, const sha sendResponseMsg(1004, error, operation, request, response); return; } - Database* current_db = apiUtil->get_database(db_name); + Database* current_db; + apiUtil->get_database(db_name, current_db); if (current_db == NULL) { error = "Database not load yet."; sendResponseMsg(1004, error, operation, request, response); return; } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "the operation can not been excuted due to loss of lock."; @@ -2054,8 +2002,8 @@ void restore_thread_new(const shared_ptr &request, const sh } } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); - + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { string error = "Unable to restore due to loss of lock"; @@ -2079,11 +2027,10 @@ void restore_thread_new(const shared_ptr &request, const sh { // remove old folder string db_path = _db_home + "/" + db_name + _db_suffix; - string sys_cmd = "rm -rf " + db_path; - std::system(sys_cmd.c_str()); + Util::remove_path(db_path); // mv backup folder to database folder string folder_name = Util::get_folder_name(path, db_name); - sys_cmd = "mv " + _db_home + "/" + folder_name + " " + db_path; + string sys_cmd = "mv " + _db_home + "/" + folder_name + " " + db_path; std::system(sys_cmd.c_str()); apiUtil->unlock_databaseinfo(db_info); @@ -2145,7 +2092,7 @@ void query_thread_new(const shared_ptr &request, const shar throw runtime_error("Database not build yet."); } // check database load status - current_database = apiUtil->get_database(db_name); + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { throw runtime_error("Database not load yet."); @@ -2289,15 +2236,52 @@ void query_thread_new(const shared_ptr &request, const shar rapidjson::Writer resWriter(resBuffer); resDoc.Accept(resWriter); string resJson = resBuffer.GetString(); - + auto content = request->header.find("Accept-Encoding"); + if (content != request->header.end()) + { + std::string accept_encoding = content->second; + SLOG_DEBUG("Accept-Encoding: " + accept_encoding); + if (accept_encoding.find("gzip") != std::string::npos) + { + char* compress_ = (char*)malloc(resJson.size()); + if (compress_ != nullptr) + { + size_t compress_size = 0; + int status = CompressUtil::GzipHelper::compress(&resJson, compress_, compress_size); + if (status == 0) + { + *response << "HTTP/1.1 200 OK" + << "\r\nContent-Type: application/json" + << "\r\nContent-Length: " << compress_size + << "\r\nCache-Control: no-cache" + << "\r\nPragma: no-cache" + << "\r\nExpires: 0" + << "\r\nContent-Encoding: gzip" + << "\r\n\r\n"; + char buffer; + for (size_t i = 0; i < compress_size; ++i) + { + buffer = compress_[i]; + *response << buffer; + } + free(compress_); + return; + } + else + { + free(compress_); + } + } + } + } *response << "HTTP/1.1 200 OK" - << "\r\nContent-Type: application/json" - << "\r\nContent-Length: " << resJson.length() - << "\r\nCache-Control: no-cache" - << "\r\nPragma: no-cache" - << "\r\nExpires: 0" - << "\r\n\r\n" - << resJson; + << "\r\nContent-Type: application/json" + << "\r\nContent-Length: " << resJson.length() + << "\r\nCache-Control: no-cache" + << "\r\nPragma: no-cache" + << "\r\nExpires: 0" + << "\r\n\r\n" + << resJson; } } else if (format == "file") @@ -2371,7 +2355,7 @@ void query_thread_new(const shared_ptr &request, const shar PrettyWriter resWriter(resBuffer); resDoc.Accept(resWriter); string resJson = resBuffer.GetString(); - *response << "HTTP/1.1 200 OK" + *response << "HTTP/1.1 200 OK" << "\r\nContent-Type: application/json" << "\r\nContent-Length: " << resJson.length() << "\r\nCache-Control: no-cache" @@ -2383,7 +2367,7 @@ void query_thread_new(const shared_ptr &request, const shar } else if (format == "sparql-results+json") { - *response << "HTTP/1.1 200 OK" + *response << "HTTP/1.1 200 OK" << "\r\nContent-Type: application/sparql-results+json" << "\r\nContent-Length: " << success.length() << "\r\nCache-Control: no-cache" @@ -2459,7 +2443,8 @@ void export_thread_new(const shared_ptr &request, const sha return; } // check if database named [db_name] is already load - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { string error = "Database not load yet."; @@ -2502,14 +2487,12 @@ void export_thread_new(const shared_ptr &request, const sha { error = "export compress fail."; sendResponseMsg(1005, error, operation, request, response); - std::string cmd = "rm -f " + db_path + " " + zip_path; - system(cmd.c_str()); + Util::remove_path(db_path + " " + zip_path); return; } resDoc.AddMember("filepath", StringRef(zip_path.c_str()), allocator); sendResponseMsg(resDoc, operation, request, response); - std::string cmd = "rm -f " + db_path; - system(cmd.c_str()); + Util::remove_path(db_path); } } catch (const std::exception &e) @@ -2804,7 +2787,8 @@ void commit_thread_new(const shared_ptr &request, const sha sendResponseMsg(1004, error, operation, request, response); return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { error = "Database not load yet."; @@ -3019,7 +3003,8 @@ void checkpoint_thread_new(const shared_ptr &request, const sendResponseMsg(1004, error, operation, request, response); return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { error = "Database not load yet."; @@ -3187,15 +3172,15 @@ void batchInsert_thread_new(const shared_ptr &request, cons code = upfile.unCompress(); if (code != CompressUtil::UnZipOK) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); string error = "uncompress is failed error."; sendResponseMsg(code, error, operation, request, response); return; } upfile.getFileList(zip_files, ""); } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (apiUtil->trywrlock_database(db_name) == false) { error = "The operation can not been excuted due to loss of lock."; @@ -3256,8 +3241,7 @@ void batchInsert_thread_new(const shared_ptr &request, cons } if (!unz_dir_path.empty()) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); } } catch (const std::exception &e) @@ -3312,7 +3296,8 @@ void batchRemove_thread_new(const shared_ptr &request, cons sendResponseMsg(1004, error, operation, request, response); return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (apiUtil->trywrlock_database(db_name) == false) { error = "The operation can not been excuted due to loss of lock."; @@ -3537,7 +3522,26 @@ void request_thread(const shared_ptr &response, // monitor database else if (operation == "monitor") { - monitor_thread_new(request, response, db_name); + string disk = "1"; + try + { + if (request_type == "GET") + { + disk = WebUrl::CutParam(url, "disk"); + } + else if (request_type == "POST") + { + if (document.HasMember("disk") && document["disk"].IsString()) + { + disk = document["disk"].GetString(); + } + } + } + catch (...) + { + + } + monitor_thread_new(request, response, db_name, disk); } // unload database else if (operation == "unload") @@ -4447,7 +4451,7 @@ void shutdown_handler(const HttpServer &server, const shared_ptrregister_callback([](std::ios_base::event __e, ios_base& __b, int __i){ SLOG_DEBUG("Server stopped successfully."); @@ -4477,7 +4481,7 @@ void upload_handler(const HttpServer &server, const shared_ptrpath; ss += "\nhttp_version: " + request->http_version; ss += "\nrequest_time: " + Util::get_date_time(); @@ -4621,7 +4625,7 @@ void upload_handler(const HttpServer &server, const shared_ptr &response, const shared_ptr &request) +void download_handler(const HttpServer &server, const shared_ptr &response, const shared_ptr &request, string request_type) { string operation = "downloadfile"; string thread_id = Util::getThreadID(); @@ -4636,43 +4640,75 @@ void download_handler(const HttpServer &server, const shared_ptrcontent.string(); - string ss; - ss += "\n------------------------ ghttp-api ------------------------"; - ss += "\nthread_id: " + thread_id; - ss += "\nremote_ip: " + remote_ip; - ss += "\noperation: " + operation; - ss += "\nmethod: POST"; - ss += "\nrequest_path: " + request->path; - ss += "\nhttp_version: " + request->http_version; - ss += "\nrequest_time: " + Util::get_date_time(); - ss += "\nrequest_body: \n" + strParams; - ss += "\n----------------------------------------------------------"; - SLOG_DEBUG(ss); - - std::map form_data = parse_post_body(strParams); - username = ""; - password = ""; - if (form_data.find("username") != form_data.end()) - { - username = UrlDecode(form_data.at("username")); - } - if (form_data.find("password") != form_data.end()) - { - password = UrlDecode(form_data.at("password")); - } - if (form_data.find("encryption") != form_data.end()) - { - encryption = UrlDecode(form_data.at("encryption")); - } - else + string url; + if (request_type == "GET") { - encryption = "0"; + url = request->path; + url = UrlDecode(url); + string ss; + ss += "\n------------------------ ghttp-api ------------------------"; + ss += "\nthread_id: " + thread_id; + ss += "\nremote_ip: " + remote_ip; + ss += "\noperation: " + operation; + ss += "\nmethod: GET"; + ss += "\nrequest_path: " + request->path; + ss += "\nhttp_version: " + request->http_version; + ss += "\nrequest_time: " + Util::get_date_time(); + ss += "\n----------------------------------------------------------"; + SLOG_DEBUG(ss); + + username = WebUrl::CutParam(url, "username"); + password = WebUrl::CutParam(url, "password"); + encryption = WebUrl::CutParam(url, "encryption"); + filepath = WebUrl::CutParam(url, "filepath"); + if (encryption.empty()) { + encryption = "0"; + } + + username = UrlDecode(username); + password = UrlDecode(password); + filepath = UrlDecode(filepath); } - if (form_data.find("filepath") != form_data.end()) + else if (request_type == "POST") { - filepath = UrlDecode(form_data.at("filepath")); + auto strParams = request->content.string(); + string ss; + ss += "\n------------------------ ghttp-api ------------------------"; + ss += "\nthread_id: " + thread_id; + ss += "\nremote_ip: " + remote_ip; + ss += "\noperation: " + operation; + ss += "\nmethod: POST"; + ss += "\nrequest_path: " + request->path; + ss += "\nhttp_version: " + request->http_version; + ss += "\nrequest_time: " + Util::get_date_time(); + ss += "\nrequest_body: \n" + strParams; + ss += "\n----------------------------------------------------------"; + SLOG_DEBUG(ss); + std::map form_data = parse_post_body(strParams); + username = ""; + password = ""; + if (form_data.find("username") != form_data.end()) + { + username = UrlDecode(form_data.at("username")); + } + if (form_data.find("password") != form_data.end()) + { + password = UrlDecode(form_data.at("password")); + } + if (form_data.find("encryption") != form_data.end()) + { + encryption = UrlDecode(form_data.at("encryption")); + } + else + { + encryption = "0"; + } + if (form_data.find("filepath") != form_data.end()) + { + filepath = UrlDecode(form_data.at("filepath")); + } } + string error=""; error = apiUtil->check_param_value("username", username); if (error.empty() == false) @@ -4856,8 +4892,8 @@ std::string fileName(const std::string &filepath) std::string CreateJson(int StatusCode, string StatusMsg, bool body, string ResponseBody) { - StringBuffer s; - PrettyWriter writer(s); + rapidjson::StringBuffer s; + rapidjson::Writer writer(s); writer.StartObject(); writer.Key("StatusCode"); writer.Uint(StatusCode); @@ -5336,8 +5372,8 @@ void rename_thread_new(const shared_ptr &request, const sha return; } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); - + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "Unable to rename due to loss of lock"; diff --git a/Main/ginit.cpp b/src/Main/ginit.cpp similarity index 99% rename from Main/ginit.cpp rename to src/Main/ginit.cpp index 28954d19..cfcfe3f7 100644 --- a/Main/ginit.cpp +++ b/src/Main/ginit.cpp @@ -36,9 +36,7 @@ int main(int argc, char *argv[]) cout << "begin rebuild the system database ...." << endl; if (Util::dir_exist(_db_path)) { - string cmd; - cmd = "rm -r " + _db_path; - system(cmd.c_str()); + Util::remove_path(_db_path); } Database *_db = new Database(_db_name); bool flag = _db->build(_rdf); diff --git a/Main/gmonitor.cpp b/src/Main/gmonitor.cpp similarity index 100% rename from Main/gmonitor.cpp rename to src/Main/gmonitor.cpp diff --git a/Main/gpara.cpp b/src/Main/gpara.cpp similarity index 100% rename from Main/gpara.cpp rename to src/Main/gpara.cpp diff --git a/Main/gquery.cpp b/src/Main/gquery.cpp similarity index 100% rename from Main/gquery.cpp rename to src/Main/gquery.cpp diff --git a/Main/grestore.cpp b/src/Main/grestore.cpp similarity index 98% rename from Main/grestore.cpp rename to src/Main/grestore.cpp index 8f44ba79..78c14e15 100644 --- a/Main/grestore.cpp +++ b/src/Main/grestore.cpp @@ -165,11 +165,10 @@ main(int argc, char * argv[]) } db_path = _db_home + "/" + db_name + _db_suffix; - string sys_cmd = "rm -rf " + db_path; - system(sys_cmd.c_str()); + Util::remove_path(db_path); string folder_name = Util::get_folder_name(backup_path, db_name); - sys_cmd = "mv " + _db_home + "/" + folder_name + ' ' + db_path; + string sys_cmd = "mv " + _db_home + "/" + folder_name + ' ' + db_path; system(sys_cmd.c_str()); cout << "Time: " << Util::get_date_time() << endl; diff --git a/Main/grpc.cpp b/src/Main/grpc.cpp similarity index 93% rename from Main/grpc.cpp rename to src/Main/grpc.cpp index 72e8e706..23e01ef5 100644 --- a/Main/grpc.cpp +++ b/src/Main/grpc.cpp @@ -272,8 +272,7 @@ int main(int argc, char *argv[]) string system_port_path = _db_home + "/system" + _db_suffix + "/port.txt"; if (Util::file_exist(system_port_path)) { - string cmd = "rm -f " + system_port_path; - system(cmd.c_str()); + Util::remove_path(system_port_path); } SLOG_WARN("Stopped abnormally, restarting server..."); latch.lockExclusive(); @@ -407,7 +406,7 @@ void shutdown(const GRPCReq *request, GRPCResp *response) if (request->contentType() == APPLICATION_JSON) //for application/json { Json &json = request->json(); - json_data.CopyFrom(json, allocator, true); + json_data.CopyFrom(json, allocator); } else if (request->contentType() == APPLICATION_URLENCODED) //for applicaiton/x-www-form-urlencoded { @@ -744,7 +743,7 @@ void api(const GRPCReq *request, GRPCResp *response) if (request->contentType() == APPLICATION_JSON) //for application/json { Json &json = request->json(); - json_data.CopyFrom(json, allocator, true); + json_data.CopyFrom(json, allocator); } else if (request->contentType() == APPLICATION_URLENCODED) //for applicaiton/x-www-form-urlencoded { @@ -1235,7 +1234,8 @@ void load_task(const GRPCReq *request, GRPCResp *response, Json &json_data) return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { if (!apiUtil->trywrlock_database(db_name)) @@ -1335,7 +1335,8 @@ void unload_task(const GRPCReq *request, GRPCResp *response, Json &json_data) response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "the operation can not been excuted due to loss of lock."; @@ -1344,15 +1345,6 @@ void unload_task(const GRPCReq *request, GRPCResp *response, Json &json_data) } else { - if (apiUtil->find_txn_managers(db_name) == false) - { - error = "transaction manager can not find the database"; - - apiUtil->unlock_database_map(); - apiUtil->unlock_databaseinfo(db_info); - response->Error(StatusTranscationManageFailed, error); - return; - } apiUtil->db_checkpoint(db_name); apiUtil->delete_from_databases(db_name); apiUtil->unlock_databaseinfo(db_info); @@ -1380,6 +1372,7 @@ void monitor_task(const GRPCReq *request, GRPCResp *response, Json &json_data) try { std::string db_name = jsonParam(json_data, "db_name"); + std::string disk = jsonParam(json_data, "disk"); // check the param value is legal or not. string error = apiUtil->check_param_value("db_name", db_name); @@ -1394,15 +1387,9 @@ void monitor_task(const GRPCReq *request, GRPCResp *response, Json &json_data) response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - // Database *current_database = apiUtil->get_database(db_name); - // if (current_database == NULL) - // { - // error = "Database not load yet."; - // response->Error(StatusOperationConditionsAreNotSatisfied, error); - // return; - // } - DatabaseInfo *database_info = apiUtil->get_databaseinfo(db_name); - if (apiUtil->tryrdlock_databaseinfo(database_info) == false) + DatabaseInfo *database_info; + apiUtil->get_databaseinfo(db_name, database_info); + if (apiUtil->rdlock_databaseinfo(database_info) == false) { string error = "Unable to monitor due to loss of lock"; response->Error(StatusLossOfLock, error); @@ -1411,9 +1398,13 @@ void monitor_task(const GRPCReq *request, GRPCResp *response, Json &json_data) std::string creator = database_info->getCreator(); std::string time = database_info->getTime(); apiUtil->unlock_databaseinfo(database_info); - Database* current_database = new Database(db_name); - current_database->loadDBInfoFile(); - current_database->loadStatisticsInfoFile(); + Database* current_database; + apiUtil->get_database(db_name, current_database); + if (current_database == NULL) { + current_database = new Database(db_name); + current_database->loadDBInfoFile(); + current_database->loadStatisticsInfoFile(); + } unordered_map umap = current_database->getStatisticsInfo(); Json resp_data; resp_data.SetObject(); @@ -1440,13 +1431,16 @@ void monitor_task(const GRPCReq *request, GRPCResp *response, Json &json_data) resp_data.AddMember("subjectNum", current_database->getSubNum(), allocator); resp_data.AddMember("predicateNum", current_database->getPreNum(), allocator); resp_data.AddMember("connectionNum", apiUtil->get_connection_num(), allocator); - string db_path = _db_home + "/" + db_name + _db_suffix; - string real_path = Util::getExactPath(db_path.c_str()); unsigned diskUsed = 0; - if (!real_path.empty()) { - long long unsigned count_size_byte = Util::count_dir_size(real_path.c_str()); - // byte to MB - diskUsed = count_size_byte>>20; + if (disk != "0") + { + string db_path = _db_home + "/" + db_name + _db_suffix; + string real_path = Util::getExactPath(db_path.c_str()); + if (!real_path.empty()) { + long long unsigned count_size_byte = Util::count_dir_size(real_path.c_str()); + // byte to MB + diskUsed = count_size_byte>>20; + } } resp_data.AddMember("diskUsed", diskUsed, allocator); resp_data.AddMember("subjectList", subjectList, allocator); @@ -1472,48 +1466,48 @@ void build_task(const GRPCReq *request, GRPCResp *response, Json &json_data) try { std::string db_path = jsonParam(json_data, "db_path"); - std::string error = ""; - // error = apiUtil->check_param_value("db_path", db_path); - // if (error.empty() == false) + std::string result = ""; + // result = apiUtil->check_param_value("db_path", db_path); + // if (result.empty() == false) // { - // response->Error(StatusParamIsIllegal, error); + // response->Error(StatusParamIsIllegal, result); // return; // } if (!db_path.empty()) { if (db_path == apiUtil->get_system_path()) { - error = "You have no rights to access system files."; - response->Error(StatusCheckPrivilegeFailed, error); + result = "You have no rights to access system files."; + response->Error(StatusCheckPrivilegeFailed, result); return; } if (Util::file_exist(db_path) == false) { - error = "RDF file not exist."; - response->Error(StatusParamIsIllegal, error); + result = "RDF file not exist."; + response->Error(StatusParamIsIllegal, result); return; } } std::string db_name = jsonParam(json_data, "db_name"); - error = apiUtil->check_param_value("db_name", db_name); - if (error.empty() == false) + result = apiUtil->check_param_value("db_name", db_name); + if (result.empty() == false) { - response->Error(StatusParamIsIllegal, error); + response->Error(StatusParamIsIllegal, result); return; } // check if database named [db_name] is already built if (apiUtil->check_db_exist(db_name)) { - error = "database already built."; - response->Error(StatusOperationConditionsAreNotSatisfied, error); + result = "database already built."; + response->Error(StatusOperationConditionsAreNotSatisfied, result); return; } // check databse number if (apiUtil->check_db_count() == false) { - string error = "The total number of databases more than max_databse_num."; - response->Error(StatusOperationConditionsAreNotSatisfied, error); + result = "The total number of databases more than max_databse_num."; + response->Error(StatusOperationConditionsAreNotSatisfied, result); return; } std::vector zip_files; @@ -1542,10 +1536,9 @@ void build_task(const GRPCReq *request, GRPCResp *response, Json &json_data) code = upfile.unCompress(); if (code != CompressUtil::UnZipOK) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); - string error = "uncompress is failed error."; - response->Error(code, error); + Util::remove_path(unz_dir_path); + result = "uncompress is failed error."; + response->Error(code, result); return; } db_path = upfile.getMaxFilePath(); @@ -1561,142 +1554,86 @@ void build_task(const GRPCReq *request, GRPCResp *response, Json &json_data) bool flag = true; if (!dataset.empty()) flag = current_database->build(dataset); - else + else flag = current_database->BuildEmptyDB(); delete current_database; current_database = NULL; - if (!flag) + if (flag) { - error = "Import RDF file to database failed."; - std::string cmd = "rm -r " + _db_path; - system(cmd.c_str()); - response->Error(StatusOperationFailed, error); - if (!unz_dir_path.empty()) + // if zip file then excuse batchInsert + if (is_zip && zip_files.size() > 0) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + current_database = new Database(db_name); + bool rt = current_database->load(false); + if (!rt) + { + result = "Import RDF file to database failed: load error."; + Util::remove_path(_db_path); + if (!unz_dir_path.empty()) + { + Util::remove_path(unz_dir_path); + } + response->Error(StatusOperationFailed, result); + return; + } + for (std::string rdf_zip : zip_files) + { + current_database->batch_insert(rdf_zip, false, nullptr); + } + current_database->save(); + current_database->unload(); + delete current_database; + current_database = NULL; } - return; } - - ofstream f; - f.open(_db_path + "/success.txt"); - f.close(); - - // by default, one can query or load or unload the database that is built by itself, so add the database name to the privilege set of the user + // init database info and privilege std::string username = jsonParam(json_data, "username"); - if (apiUtil->init_privilege(username, db_name) == 0) - { - error = "init privilege failed."; - response->Error(StatusAddPrivilegeFaied, error); - if (!unz_dir_path.empty()) - { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); - } - return; - } - SLOG_DEBUG("init privilege succeed after build."); - - // add database information to system.db - if (apiUtil->build_db_user_privilege(db_name, username)) - { - string success = "Import RDF file to database done."; + if (apiUtil->build_db_user_privilege(db_name, username) + && apiUtil->init_privilege(username, db_name)) + { + ofstream f; + f.open(_db_path + "/success.txt"); + f.close(); + // add backup.log + Util::add_backuplog(db_name); + // build response result + result = "Import RDF file to database done."; string error_log = _db_path + "/parse_error.log"; size_t parse_error_num = Util::count_lines(error_log); + // exclude Info line if (parse_error_num > 0) parse_error_num = parse_error_num - 1; + if (zip_files.size() > 0) + parse_error_num = parse_error_num - zip_files.size(); rapidjson::Document resp_data; resp_data.SetObject(); rapidjson::Document::AllocatorType &allocator = resp_data.GetAllocator(); resp_data.AddMember("StatusCode", 0, allocator); - resp_data.AddMember("StatusMsg", StringRef(success.c_str()), allocator); + resp_data.AddMember("StatusMsg", StringRef(result.c_str()), allocator); resp_data.AddMember("failed_num", parse_error_num, allocator); if (parse_error_num > 0) { SLOG_ERROR("RDF parse error num " + to_string(parse_error_num)); SLOG_ERROR("See log file for details " + error_log); } - if (is_zip) - { - auto error_responce = [response,db_name,parse_error_num](const std::string& error) - { - rapidjson::Document resp_data; - resp_data.SetObject(); - rapidjson::Document::AllocatorType &allocator = resp_data.GetAllocator(); - resp_data.AddMember("StatusCode", 0, allocator); - resp_data.AddMember("StatusMsg", StringRef(error.c_str()), allocator); - resp_data.AddMember("failed_num", parse_error_num, allocator); - response->Json(resp_data); - Util::add_backuplog(db_name); - }; - if (!apiUtil->trywrlock_database(db_name)) - { - std::string error = "The operation can not been excuted due to loss of lock."; - error_responce(error); - } - else - { - Database *cur_database = new Database(db_name); - bool rt = cur_database->load(true); - if (!rt) - { - std::string error = "The database load faild."; - error_responce(error); - apiUtil->unlock_database(db_name); - } - else - { - apiUtil->add_database(db_name, cur_database); - if (apiUtil->insert_txn_managers(cur_database, db_name) == false) - { - SLOG_WARN("when load insert_txn_managers fail."); - } - unsigned success_num = 0; - unsigned parse_insert_error_num = 0; - unsigned total_num = Util::count_lines(error_log); - for (std::string rdf_zip : zip_files) - { - SLOG_DEBUG("begin insert data from " + rdf_zip); - success_num += cur_database->batch_insert(rdf_zip, false, nullptr); - } - parse_insert_error_num = Util::count_lines(error_log)-total_num-zip_files.size(); - cur_database->save(); - apiUtil->db_checkpoint(db_name); - apiUtil->delete_from_databases(db_name); - apiUtil->unlock_database(db_name); - - rapidjson::Document resp_data; - resp_data.SetObject(); - rapidjson::Document::AllocatorType &allocator = resp_data.GetAllocator(); - resp_data.AddMember("StatusCode", 0, allocator); - resp_data.AddMember("StatusMsg", StringRef(success.c_str()), allocator); - resp_data.AddMember("failed_num", parse_error_num, allocator); - resp_data.AddMember("success_num", success_num, allocator); - resp_data.AddMember("failed_insert_num", parse_insert_error_num, allocator); - response->Json(resp_data); - Util::add_backuplog(db_name); - } - } - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); - } - else + // remove unzip dir + if (!unz_dir_path.empty()) { - rapidjson::Document resp_data; - resp_data.SetObject(); - rapidjson::Document::AllocatorType &allocator = resp_data.GetAllocator(); - resp_data.AddMember("StatusCode", 0, allocator); - resp_data.AddMember("StatusMsg", StringRef(success.c_str()), allocator); - resp_data.AddMember("failed_num", parse_error_num, allocator); - response->Json(resp_data); - Util::add_backuplog(db_name); + Util::remove_path(unz_dir_path); } + Util::add_backuplog(db_name); + response->Json(resp_data); } else { - error = "add database information to system failed."; - response->Error(StatusOperationFailed, error); + result = "Import RDF file to database failed."; + rmdir(_db_path.c_str()); + Util::remove_path(_db_path); + if (!unz_dir_path.empty()) + { + Util::remove_path(unz_dir_path); + } + response->Json(result); } } catch (const std::exception &e) @@ -1733,7 +1670,8 @@ void drop_task(const GRPCReq *request, GRPCResp *response, Json &json_data) response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "the operation can not been excuted due to loss of lock."; @@ -1768,15 +1706,18 @@ void drop_task(const GRPCReq *request, GRPCResp *response, Json &json_data) return; } SLOG_DEBUG("remove " + db_name + " from the already build database list success."); - - std::string cmd; string db_path = _db_home + "/" + db_name + _db_suffix; if (is_backup == "false") - cmd = "rm -r " + db_path; + { + Util::remove_path(db_path); + SLOG_DEBUG("remove_path"+db_path); + } else - cmd = "mv " + db_path + " " + _db_home + "/" + db_name + ".bak"; - SLOG_DEBUG(cmd); - system(cmd.c_str()); + { + std::string cmd = "mv " + db_path + " " + _db_home + "/" + db_name + ".bak"; + SLOG_DEBUG(cmd); + system(cmd.c_str()); + } Util::delete_backuplog(db_name); string success = "Database " + db_name + " dropped."; response->Success(success); @@ -1816,14 +1757,16 @@ void backup_task(const GRPCReq *request, GRPCResp *response, Json &json_data) response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - Database* current_db = apiUtil->get_database(db_name); + Database* current_db; + apiUtil->get_database(db_name, current_db); if (current_db == NULL) { error = "Database not load yet."; response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "the operation can not been excuted due to loss of lock."; @@ -1997,8 +1940,8 @@ try return; } } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); - + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "Unable to restore due to loss of lock"; @@ -2021,11 +1964,10 @@ try { // remove old folder string db_path = _db_home + "/" + db_name + _db_suffix; - string sys_cmd = "rm -rf " + db_path; - std::system(sys_cmd.c_str()); + Util::remove_path(db_path); // mv backup folder to database folder string folder_name = Util::get_folder_name(path, db_name); - sys_cmd = "mv " + _db_home + "/" + folder_name + " " + db_path; + string sys_cmd = "mv " + _db_home + "/" + folder_name + " " + db_path; std::system(sys_cmd.c_str()); apiUtil->unlock_databaseinfo(db_info); @@ -2087,7 +2029,7 @@ void query_task(const GRPCReq *request, GRPCResp *response, Json &json_data) return; } // check database load status - current_database = apiUtil->get_database(db_name); + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { error = "Database not load yet."; @@ -2225,9 +2167,17 @@ void query_task(const GRPCReq *request, GRPCResp *response, Json &json_data) resp_data.AddMember("ThreadId", StringRef(thread_id.c_str()), allocator); resp_data.AddMember("QueryTime", StringRef(query_time_s.c_str()), allocator); - response->set_header_pair("Cache-Control", "no-cache"); + response->set_header_pair("Cache-Control", "no-cache"); response->set_header_pair("Pragma", "no-cache"); response->set_header_pair("Expires", "0"); + SLOG_DEBUG("Accept-Encoding:" + request->header("Accept-Encoding")); + if (request->hasHeader("Accept-Encoding")) { + std::string accept_encoding = request->header("Accept-Encoding"); + if (accept_encoding.find("gzip") != std::string::npos) + { + response->headers["Content-Encoding"] = "gzip"; + } + } response->Json(resp_data); } } @@ -2359,7 +2309,8 @@ void export_task(const GRPCReq *request, GRPCResp *response, Json &json_data) return; } // check if database named [db_name] is already load - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { string error = "Database not load yet."; @@ -2404,14 +2355,12 @@ void export_task(const GRPCReq *request, GRPCResp *response, Json &json_data) { error = "export compress fail."; response->Error(StatusCompressError, error); - std::string cmd = "rm -f " + db_path + " " + zip_path; - system(cmd.c_str()); + Util::remove_path(db_path + " " + zip_path); return; } resp_data.AddMember("filepath", StringRef(zip_path.c_str()), allocator); response->Json(resp_data); - std::string cmd = "rm -f " + db_path; - system(cmd.c_str()); + Util::remove_path(db_path); } } catch (const std::exception &e) @@ -2655,7 +2604,8 @@ void commit_task(const GRPCReq *request, GRPCResp *response, Json &json_data) response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { error = "Database not load yet."; @@ -2752,7 +2702,8 @@ void rollback_task(const GRPCReq *request, GRPCResp *response, Json &json_data) response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { error = "Database not load yet."; @@ -2815,7 +2766,8 @@ void checkpoint_task(const GRPCReq *request, GRPCResp *response, Json &json_data response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (current_database == NULL) { error = "Database not load yet."; @@ -2938,8 +2890,7 @@ void batch_insert_task(const GRPCReq *request, GRPCResp *response, Json &json_da code = upfile.unCompress(); if (code != CompressUtil::UnZipOK) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); string error = "uncompress is failed error."; response->Error(code, error); return; @@ -2947,7 +2898,8 @@ void batch_insert_task(const GRPCReq *request, GRPCResp *response, Json &json_da upfile.getFileList(zip_files, ""); } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (apiUtil->trywrlock_database(db_name) == false) { error = "The operation can not been excuted due to loss of lock."; @@ -3009,8 +2961,7 @@ void batch_insert_task(const GRPCReq *request, GRPCResp *response, Json &json_da } if (!unz_dir_path.empty()) { - std::string cmd = "rm -r " + unz_dir_path; - system(cmd.c_str()); + Util::remove_path(unz_dir_path); } } catch (const std::exception &e) @@ -3064,7 +3015,8 @@ void batch_remove_task(const GRPCReq *request, GRPCResp *response, Json &json_da response->Error(StatusOperationConditionsAreNotSatisfied, error); return; } - Database *current_database = apiUtil->get_database(db_name); + Database *current_database; + apiUtil->get_database(db_name, current_database); if (apiUtil->trywrlock_database(db_name) == false) { error = "The operation can not been excuted due to loss of lock."; @@ -3131,8 +3083,8 @@ void rename_task(const GRPCReq *request, GRPCResp *response, Json &json_data) return; } - struct DatabaseInfo *db_info = apiUtil->get_databaseinfo(db_name); - + struct DatabaseInfo *db_info; + apiUtil->get_databaseinfo(db_name, db_info); if (apiUtil->trywrlock_databaseinfo(db_info) == false) { error = "Unable to rename due to loss of lock"; @@ -3407,73 +3359,97 @@ void user_privilege_task(const GRPCReq *request, GRPCResp *response, Json &json_ privileges = privileges + ","; } Util::split(privileges, ",", privilege_vector); - std::string result; + vector privilegeTypes; for (unsigned i = 0; i < privilege_vector.size(); i++) { std::string temp_privilege_int = privilege_vector[i]; - std::string temp_privilege = ""; if (temp_privilege_int.empty()) { continue; } if (temp_privilege_int == "1") { - temp_privilege = "query"; + privilegeTypes.push_back("query"); } else if (temp_privilege_int == "2") { - temp_privilege = "load"; + privilegeTypes.push_back("load"); } else if (temp_privilege_int == "3") { - temp_privilege = "unload"; + privilegeTypes.push_back("unload"); } else if (temp_privilege_int == "4") { - temp_privilege = "update"; + privilegeTypes.push_back("update"); } else if (temp_privilege_int == "5") { - temp_privilege = "backup"; + privilegeTypes.push_back("backup"); } else if (temp_privilege_int == "6") { - temp_privilege = "restore"; + privilegeTypes.push_back("restore"); } else if (temp_privilege_int == "7") { - temp_privilege = "export"; + privilegeTypes.push_back("export"); } else { SLOG_DEBUG("The privilege " + temp_privilege_int + " undefined."); continue; } - + } + string result=""; + if (privilegeTypes.size() > 0) + { + string privilegeNames=""; + for (size_t i = 0; i < privilegeTypes.size(); i++) + { + if (i > 0) + { + privilegeNames = privilegeNames + ","; + } + privilegeNames = privilegeNames + privilegeTypes[i]; + } if (type == "1") { - if (apiUtil->add_privilege(op_username, temp_privilege, db_name) == 0) + if (apiUtil->add_privilege(op_username, privilegeTypes, db_name) == 0) { - result += "add privilege " + temp_privilege + " failed. \r\n"; + result = result + "add privilege " + privilegeNames + " failed."; + response->Error(StatusOperationFailed, result); } else { - result += "add privilege " + temp_privilege + " successfully. \r\n"; + result = result + "add privilege " + privilegeNames + " successfully."; + response->Success(result); } } else if (type == "2") { - if (apiUtil->del_privilege(op_username, temp_privilege, db_name) == 0) + if (apiUtil->del_privilege(op_username, privilegeTypes, db_name) == 0) { - result += "delete privilege " + temp_privilege + " failed. \r\n"; + result += "delete privilege " + privilegeNames + " failed."; + response->Error(StatusOperationFailed, result); } else { - result += "delete privilege " + temp_privilege + " successfully. \r\n"; + result += "delete privilege " + privilegeNames + " successfully."; + response->Success(result); } } + else + { + result = "the operation type is not support."; + response->Error(StatusParamIsIllegal, result); + } + } + else + { + result = "not match any valid privilege, valid values between 1 and 7."; + response->Error(StatusParamIsIllegal, result); } - response->Success(result); } } catch (const std::exception &e) diff --git a/Main/gserver.cpp b/src/Main/gserver.cpp similarity index 99% rename from Main/gserver.cpp rename to src/Main/gserver.cpp index 30969d0e..04403bd5 100644 --- a/Main/gserver.cpp +++ b/src/Main/gserver.cpp @@ -253,8 +253,8 @@ void checkSwap() { out << port; out.close(); chmod(Util::gserver_port_file.c_str(), 0644); - string cmd = string("rm ") + Util::gserver_port_swap; - system(cmd.c_str()); + string cmd = Util::gserver_port_swap; + Util::remove_path(cmd); } bool startServer(bool _debug) { diff --git a/Main/gserver_backup_scheduler.cpp b/src/Main/gserver_backup_scheduler.cpp similarity index 100% rename from Main/gserver_backup_scheduler.cpp rename to src/Main/gserver_backup_scheduler.cpp diff --git a/Main/gshow.cpp b/src/Main/gshow.cpp similarity index 100% rename from Main/gshow.cpp rename to src/Main/gshow.cpp diff --git a/Main/gsub.cpp b/src/Main/gsub.cpp similarity index 100% rename from Main/gsub.cpp rename to src/Main/gsub.cpp diff --git a/Main/rollback.cpp b/src/Main/rollback.cpp similarity index 99% rename from Main/rollback.cpp rename to src/Main/rollback.cpp index fd907378..b868c36d 100644 --- a/Main/rollback.cpp +++ b/src/Main/rollback.cpp @@ -296,8 +296,7 @@ main(int argc, char * argv[]) cmd = "cp -r " + _default_backup_path + "/" + folders[inx] + " " + _db_home; system(cmd.c_str()); cout << cmd << endl; - cmd = "rm -rf " + db_path; - system(cmd.c_str()); + Util::remove_path(db_path); cout << cmd << endl; cmd = "mv " + _db_home + "/" + folders[inx] + " " + db_path; system(cmd.c_str()); diff --git a/Main/shutdown.cpp b/src/Main/shutdown.cpp similarity index 92% rename from Main/shutdown.cpp rename to src/Main/shutdown.cpp index d9df377f..efbbde24 100644 --- a/Main/shutdown.cpp +++ b/src/Main/shutdown.cpp @@ -6,7 +6,7 @@ # Description: used to stop the ghttp server =============================================================================*/ -#include "../api/http/cpp/src/GstoreConnector.h" +#include "../Connector/GstoreConnector.h" #include "../Util/Util.h" using namespace std; @@ -103,10 +103,9 @@ int main(int argc, char *argv[]) document.SetObject(); document.Parse(res.c_str()); if (document.HasParseError()) { - string cmd = "rm -rf " + _db_home + "/system" +_db_suffix + "/password*.txt";; - system(cmd.c_str()); - cmd = "rm -rf " + system_port_path; - system(cmd.c_str()); + string cmd = _db_home + "/system" +_db_suffix + "/password*.txt"; + Util::remove_path(cmd); + Util::remove_path(system_port_path); cout << "http server is not running!" << endl; return 0; } @@ -142,10 +141,9 @@ int main(int argc, char *argv[]) } else { - string cmd = "rm -rf " + _db_home + "/system" +_db_suffix + "/password*.txt";; - system(cmd.c_str()); - cmd = "rm -rf " + system_port_path; - system(cmd.c_str()); + string cmd = _db_home + "/system" +_db_suffix + "/password*.txt"; + Util::remove_path(cmd); + Util::remove_path(system_port_path); cout << "http server is not running!" << endl; return 0; } diff --git a/src/Parser/CMakeLists.txt b/src/Parser/CMakeLists.txt new file mode 100644 index 00000000..f93ebb3d --- /dev/null +++ b/src/Parser/CMakeLists.txt @@ -0,0 +1,7 @@ +add_library(gstore_parser OBJECT + SPARQL/SPARQLParser.cpp + SPARQL/SPARQLLexer.cpp + TurtleParser.cpp + RDFParser.cpp + QueryParser.cpp +) diff --git a/Parser/QueryParser.cpp b/src/Parser/QueryParser.cpp similarity index 100% rename from Parser/QueryParser.cpp rename to src/Parser/QueryParser.cpp diff --git a/Parser/QueryParser.h b/src/Parser/QueryParser.h similarity index 100% rename from Parser/QueryParser.h rename to src/Parser/QueryParser.h diff --git a/Parser/RDFParser.cpp b/src/Parser/RDFParser.cpp similarity index 99% rename from Parser/RDFParser.cpp rename to src/Parser/RDFParser.cpp index 9d21dd18..c5bf99f8 100644 --- a/Parser/RDFParser.cpp +++ b/src/Parser/RDFParser.cpp @@ -3,7 +3,7 @@ # Author: Yue Pang # Mail: michelle.py@pku.edu.cn # Last Modified: 2021-08-03 15:28 CST -# Description: implements the class for parsing RDF data during build based on +# Description: implements the class for parsing RDF data during build based on RDF-3X's TurtleParser =============================================================================*/ #include "RDFParser.h" diff --git a/Parser/RDFParser.h b/src/Parser/RDFParser.h similarity index 98% rename from Parser/RDFParser.h rename to src/Parser/RDFParser.h index 92799b44..8a80762f 100644 --- a/Parser/RDFParser.h +++ b/src/Parser/RDFParser.h @@ -3,7 +3,7 @@ # Author: Yue Pang # Mail: michelle.py@pku.edu.cn # Last Modified: 2021-08-03 15:28 CST -# Description: defines the class for parsing RDF data during build based on +# Description: defines the class for parsing RDF data during build based on RDF-3X's TurtleParser =============================================================================*/ diff --git a/Parser/SPARQL/SPARQL.g4 b/src/Parser/SPARQL/SPARQL.g4 similarity index 100% rename from Parser/SPARQL/SPARQL.g4 rename to src/Parser/SPARQL/SPARQL.g4 diff --git a/Parser/SPARQL/SPARQL.interp b/src/Parser/SPARQL/SPARQL.interp similarity index 100% rename from Parser/SPARQL/SPARQL.interp rename to src/Parser/SPARQL/SPARQL.interp diff --git a/Parser/SPARQL/SPARQL.tokens b/src/Parser/SPARQL/SPARQL.tokens similarity index 100% rename from Parser/SPARQL/SPARQL.tokens rename to src/Parser/SPARQL/SPARQL.tokens diff --git a/Parser/SPARQL/SPARQLBaseListener.cpp b/src/Parser/SPARQL/SPARQLBaseListener.cpp similarity index 100% rename from Parser/SPARQL/SPARQLBaseListener.cpp rename to src/Parser/SPARQL/SPARQLBaseListener.cpp diff --git a/Parser/SPARQL/SPARQLBaseListener.h b/src/Parser/SPARQL/SPARQLBaseListener.h similarity index 100% rename from Parser/SPARQL/SPARQLBaseListener.h rename to src/Parser/SPARQL/SPARQLBaseListener.h diff --git a/Parser/SPARQL/SPARQLBaseVisitor.cpp b/src/Parser/SPARQL/SPARQLBaseVisitor.cpp similarity index 100% rename from Parser/SPARQL/SPARQLBaseVisitor.cpp rename to src/Parser/SPARQL/SPARQLBaseVisitor.cpp diff --git a/Parser/SPARQL/SPARQLBaseVisitor.h b/src/Parser/SPARQL/SPARQLBaseVisitor.h similarity index 100% rename from Parser/SPARQL/SPARQLBaseVisitor.h rename to src/Parser/SPARQL/SPARQLBaseVisitor.h diff --git a/Parser/SPARQL/SPARQLLexer.cpp b/src/Parser/SPARQL/SPARQLLexer.cpp similarity index 100% rename from Parser/SPARQL/SPARQLLexer.cpp rename to src/Parser/SPARQL/SPARQLLexer.cpp diff --git a/Parser/SPARQL/SPARQLLexer.h b/src/Parser/SPARQL/SPARQLLexer.h similarity index 100% rename from Parser/SPARQL/SPARQLLexer.h rename to src/Parser/SPARQL/SPARQLLexer.h diff --git a/Parser/SPARQL/SPARQLLexer.interp b/src/Parser/SPARQL/SPARQLLexer.interp similarity index 100% rename from Parser/SPARQL/SPARQLLexer.interp rename to src/Parser/SPARQL/SPARQLLexer.interp diff --git a/Parser/SPARQL/SPARQLListener.cpp b/src/Parser/SPARQL/SPARQLListener.cpp similarity index 100% rename from Parser/SPARQL/SPARQLListener.cpp rename to src/Parser/SPARQL/SPARQLListener.cpp diff --git a/Parser/SPARQL/SPARQLListener.h b/src/Parser/SPARQL/SPARQLListener.h similarity index 100% rename from Parser/SPARQL/SPARQLListener.h rename to src/Parser/SPARQL/SPARQLListener.h diff --git a/Parser/SPARQL/SPARQLParser.cpp b/src/Parser/SPARQL/SPARQLParser.cpp similarity index 100% rename from Parser/SPARQL/SPARQLParser.cpp rename to src/Parser/SPARQL/SPARQLParser.cpp diff --git a/Parser/SPARQL/SPARQLParser.h b/src/Parser/SPARQL/SPARQLParser.h similarity index 100% rename from Parser/SPARQL/SPARQLParser.h rename to src/Parser/SPARQL/SPARQLParser.h diff --git a/Parser/SPARQL/SPARQLVisitor.h b/src/Parser/SPARQL/SPARQLVisitor.h similarity index 100% rename from Parser/SPARQL/SPARQLVisitor.h rename to src/Parser/SPARQL/SPARQLVisitor.h diff --git a/Parser/TurtleParser.cpp b/src/Parser/TurtleParser.cpp similarity index 100% rename from Parser/TurtleParser.cpp rename to src/Parser/TurtleParser.cpp diff --git a/Parser/TurtleParser.h b/src/Parser/TurtleParser.h similarity index 100% rename from Parser/TurtleParser.h rename to src/Parser/TurtleParser.h diff --git a/Parser/Type.h b/src/Parser/Type.h similarity index 100% rename from Parser/Type.h rename to src/Parser/Type.h diff --git a/Query/BGPQuery.cpp b/src/Query/BGPQuery.cpp similarity index 100% rename from Query/BGPQuery.cpp rename to src/Query/BGPQuery.cpp diff --git a/Query/BGPQuery.h b/src/Query/BGPQuery.h similarity index 100% rename from Query/BGPQuery.h rename to src/Query/BGPQuery.h diff --git a/Query/BasicQuery.cpp b/src/Query/BasicQuery.cpp similarity index 100% rename from Query/BasicQuery.cpp rename to src/Query/BasicQuery.cpp diff --git a/Query/BasicQuery.h b/src/Query/BasicQuery.h similarity index 99% rename from Query/BasicQuery.h rename to src/Query/BasicQuery.h index f150d63e..b02a87c2 100644 --- a/Query/BasicQuery.h +++ b/src/Query/BasicQuery.h @@ -272,7 +272,7 @@ class BasicQuery bool isReady(int _var) const; void setReady(int _var); - // encode relative signature data of the query graph + // encode relative signature data of the query graph bool encodeBasicQuery(KVstore* _p_kvstore, const std::vector& _query_var); bool getEncodeBasicQueryResult() const; diff --git a/src/Query/CMakeLists.txt b/src/Query/CMakeLists.txt new file mode 100644 index 00000000..80537e45 --- /dev/null +++ b/src/Query/CMakeLists.txt @@ -0,0 +1,33 @@ +add_library(gstore_topk OBJECT + topk/DPB/DynamicTrie.cpp + topk/DPB/OrderedList.cpp + topk/DPB/Pool.cpp + topk/DPBTopKUtil.cpp + topk/TopKSearchPlan.cpp + topk/TopKUtil.cpp +) + +add_library(gstore_query OBJECT + SPARQLquery.cpp + BasicQuery.cpp + ResultSet.cpp + IDList.cpp + DFSPlan.cpp + Varset.cpp + QueryTree.cpp + TempResult.cpp + QueryCache.cpp + GeneralEvaluation.cpp + PathQueryHandler.cpp + BGPQuery.cpp + FilterPlan.cpp +) + +add_library(gpathqueryhandler SHARED + PathQueryHandler.cpp +) +# set fPIC +set_property(TARGET gpathqueryhandler PROPERTY POSITION_INDEPENDENT_CODE ON) +target_link_libraries(gpathqueryhandler gcsr) +add_dependencies(gpathqueryhandler gcsr prepare) +install(TARGETS gpathqueryhandler DESTINATION ${GSTORE_LIB_DIR}) \ No newline at end of file diff --git a/Query/DFSPlan.cpp b/src/Query/DFSPlan.cpp similarity index 100% rename from Query/DFSPlan.cpp rename to src/Query/DFSPlan.cpp diff --git a/Query/DFSPlan.h b/src/Query/DFSPlan.h similarity index 100% rename from Query/DFSPlan.h rename to src/Query/DFSPlan.h diff --git a/Query/FilterPlan.cpp b/src/Query/FilterPlan.cpp similarity index 100% rename from Query/FilterPlan.cpp rename to src/Query/FilterPlan.cpp diff --git a/Query/FilterPlan.h b/src/Query/FilterPlan.h similarity index 100% rename from Query/FilterPlan.h rename to src/Query/FilterPlan.h diff --git a/Query/GeneralEvaluation.cpp b/src/Query/GeneralEvaluation.cpp similarity index 98% rename from Query/GeneralEvaluation.cpp rename to src/Query/GeneralEvaluation.cpp index f219f97a..cfd17a28 100644 --- a/Query/GeneralEvaluation.cpp +++ b/src/Query/GeneralEvaluation.cpp @@ -1025,7 +1025,7 @@ TempResultSet* GeneralEvaluation::queryEvaluation(int dep) for (int l = 0; l < (int)(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern.size()); l++) if (rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].type == GroupPattern::SubGroupPattern::Bind_type) { - sub_result->doBind(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].bind, kvstore, stringindex, \ + sub_result->doBind(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].bind, kvstore, \ rewriting_evaluation_stack[dep].group_pattern.group_pattern_subject_object_maximal_varset); } @@ -1038,15 +1038,13 @@ TempResultSet* GeneralEvaluation::queryEvaluation(int dep) // if (!rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter.done) { rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter.done = true; - - TempResultSet *new_result = new TempResultSet(); - sub_result->doFilter(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter, *new_result, \ - this->stringindex, rewriting_evaluation_stack[dep].group_pattern.group_pattern_subject_object_maximal_varset); - - sub_result->release(); - delete sub_result; - - sub_result = new_result; + // If FILTER scope is global && no ORDER BY && is the last operation, doFilter with limit + if (dep == 0 && query_tree.getOrderByVarset().empty() && l == rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern.size() - 1) { + sub_result->doFilter(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter, kvstore,\ + rewriting_evaluation_stack[dep].group_pattern.group_pattern_subject_object_maximal_varset, query_tree.getLimit()); + } else + sub_result->doFilter(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter, kvstore,\ + rewriting_evaluation_stack[dep].group_pattern.group_pattern_subject_object_maximal_varset); } // Process OPTIONAL // @@ -1093,15 +1091,13 @@ TempResultSet* GeneralEvaluation::queryEvaluation(int dep) if (!rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter.done) { rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter.done = true; - - TempResultSet *new_result = new TempResultSet(); - sub_result->doFilter(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter, *new_result, \ - this->stringindex, rewriting_evaluation_stack[dep].group_pattern.group_pattern_subject_object_maximal_varset); - - sub_result->release(); - delete sub_result; - - sub_result = new_result; + // If FILTER scope is global && no ORDER BY && is the last operation, doFilter with limit + if (dep == 0 && query_tree.getOrderByVarset().empty() && l == rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern.size() - 1) { + sub_result->doFilter(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter, kvstore,\ + rewriting_evaluation_stack[dep].group_pattern.group_pattern_subject_object_maximal_varset, query_tree.getLimit()); + } else + sub_result->doFilter(rewriting_evaluation_stack[dep].group_pattern.sub_group_pattern[l].filter, kvstore,\ + rewriting_evaluation_stack[dep].group_pattern.group_pattern_subject_object_maximal_varset); printf("IN SECOND doFilter\n"); } } @@ -1171,18 +1167,16 @@ TempResultSet* GeneralEvaluation::queryEvaluation(int dep) } else if (group_pattern.sub_group_pattern[i].type == GroupPattern::SubGroupPattern::Filter_type) { - TempResultSet *new_result = new TempResultSet(); - result->doFilter(group_pattern.sub_group_pattern[i].filter, *new_result, this->stringindex, group_pattern.group_pattern_subject_object_maximal_varset); - - result->release(); - delete result; - - result = new_result; + // If FILTER scope is global && no ORDER BY && is the last operation, doFilter with limit + if (dep == 0 && query_tree.getOrderByVarset().empty() && i == group_pattern.sub_group_pattern.size() - 1) + result->doFilter(group_pattern.sub_group_pattern[i].filter, kvstore, group_pattern.group_pattern_subject_object_maximal_varset, query_tree.getLimit()); + else + result->doFilter(group_pattern.sub_group_pattern[i].filter, kvstore, group_pattern.group_pattern_subject_object_maximal_varset); result->initial = false; } else if (group_pattern.sub_group_pattern[i].type == GroupPattern::SubGroupPattern::Bind_type) { - result->doBind(group_pattern.sub_group_pattern[i].bind, kvstore, stringindex, \ + result->doBind(group_pattern.sub_group_pattern[i].bind, kvstore, \ group_pattern.group_pattern_subject_object_maximal_varset); } else if (group_pattern.sub_group_pattern[i].type == GroupPattern::SubGroupPattern::Subquery_type) @@ -2229,7 +2223,7 @@ void GeneralEvaluation::getFinalResult(ResultSet &ret_result) Varset result0Varset = result0.getAllVarset(); for (int j = begin; j <= end; j++) { - tmp = result0.doComp(proj[i].comp_tree_root, result0.result[j], result0_id_cols, stringindex, \ + tmp = result0.doComp(proj[i].comp_tree_root, result0.result[j], result0_id_cols, kvstore, \ result0Varset); if (tmp.datatype == EvalMultitypeValue::xsd_boolean && \ tmp.bool_value.value == EvalMultitypeValue::EffectiveBooleanValue::error_value) @@ -2352,7 +2346,7 @@ void GeneralEvaluation::getFinalResult(ResultSet &ret_result) Varset result0Varset = result0.getAllVarset(); for (int j = begin; j <= end; j++) { - tmp = result0.doComp(proj[i].comp_tree_root, result0.result[j], result0_id_cols, stringindex, \ + tmp = result0.doComp(proj[i].comp_tree_root, result0.result[j], result0_id_cols, kvstore, \ result0Varset); if (tmp.datatype == EvalMultitypeValue::xsd_boolean && \ tmp.bool_value.value == EvalMultitypeValue::EffectiveBooleanValue::error_value) @@ -2514,12 +2508,12 @@ void GeneralEvaluation::getFinalResult(ResultSet &ret_result) if (group2temp.empty()) { for (int j = begin; j <= end; j++) { new_result0.result[j].str[proj2new[i] - new_result0_id_cols] = \ - result0.doComp(proj[i].comp_tree_root, result0.result[j], result0_id_cols, stringindex, \ + result0.doComp(proj[i].comp_tree_root, result0.result[j], result0_id_cols, kvstore, \ result0Varset).term_value; } } else { new_result0.result.back().str[proj2new[i] - new_result0_id_cols] = \ - result0.doComp(proj[i].comp_tree_root, result0.result[begin], result0_id_cols, stringindex, \ + result0.doComp(proj[i].comp_tree_root, result0.result[begin], result0_id_cols, kvstore, \ result0Varset).term_value; } } diff --git a/Query/GeneralEvaluation.h b/src/Query/GeneralEvaluation.h similarity index 100% rename from Query/GeneralEvaluation.h rename to src/Query/GeneralEvaluation.h diff --git a/Query/IDList.cpp b/src/Query/IDList.cpp similarity index 100% rename from Query/IDList.cpp rename to src/Query/IDList.cpp diff --git a/Query/IDList.h b/src/Query/IDList.h similarity index 100% rename from Query/IDList.h rename to src/Query/IDList.h diff --git a/Query/PathQueryHandler.cpp b/src/Query/PathQueryHandler.cpp similarity index 100% rename from Query/PathQueryHandler.cpp rename to src/Query/PathQueryHandler.cpp diff --git a/Query/PathQueryHandler.h b/src/Query/PathQueryHandler.h similarity index 100% rename from Query/PathQueryHandler.h rename to src/Query/PathQueryHandler.h diff --git a/Query/QueryCache.cpp b/src/Query/QueryCache.cpp similarity index 100% rename from Query/QueryCache.cpp rename to src/Query/QueryCache.cpp diff --git a/Query/QueryCache.h b/src/Query/QueryCache.h similarity index 100% rename from Query/QueryCache.h rename to src/Query/QueryCache.h diff --git a/Query/QueryTree.cpp b/src/Query/QueryTree.cpp similarity index 100% rename from Query/QueryTree.cpp rename to src/Query/QueryTree.cpp diff --git a/Query/QueryTree.h b/src/Query/QueryTree.h similarity index 100% rename from Query/QueryTree.h rename to src/Query/QueryTree.h diff --git a/Query/RegexExpression.h b/src/Query/RegexExpression.h similarity index 100% rename from Query/RegexExpression.h rename to src/Query/RegexExpression.h diff --git a/Query/ResultFilter.cpp b/src/Query/ResultFilter.cpp similarity index 100% rename from Query/ResultFilter.cpp rename to src/Query/ResultFilter.cpp diff --git a/Query/ResultFilter.h b/src/Query/ResultFilter.h similarity index 100% rename from Query/ResultFilter.h rename to src/Query/ResultFilter.h diff --git a/Query/ResultSet.cpp b/src/Query/ResultSet.cpp similarity index 100% rename from Query/ResultSet.cpp rename to src/Query/ResultSet.cpp diff --git a/Query/ResultSet.h b/src/Query/ResultSet.h similarity index 100% rename from Query/ResultSet.h rename to src/Query/ResultSet.h diff --git a/Query/SPARQLquery.cpp b/src/Query/SPARQLquery.cpp similarity index 100% rename from Query/SPARQLquery.cpp rename to src/Query/SPARQLquery.cpp diff --git a/Query/SPARQLquery.h b/src/Query/SPARQLquery.h similarity index 100% rename from Query/SPARQLquery.h rename to src/Query/SPARQLquery.h diff --git a/Query/TempResult.cpp b/src/Query/TempResult.cpp similarity index 90% rename from Query/TempResult.cpp rename to src/Query/TempResult.cpp index f379b2b3..28be3c56 100644 --- a/Query/TempResult.cpp +++ b/src/Query/TempResult.cpp @@ -697,7 +697,7 @@ void TempResult::doMinus(TempResult &x, TempResult &r) } EvalMultitypeValue -TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, StringIndex *stringindex, Varset &this_varset) +TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, KVstore *kvstore, Varset &this_varset) { // Arithmetic and logical operations // if (root->lchild == NULL && root->rchild == NULL) // leaf node @@ -718,7 +718,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin if (pos < id_cols) { int id = row.id[pos]; - stringindex->randomAccess(id, &x.term_value, string_index_buffer, string_index_buffer_size); + x.term_value = kvstore->getStringByID(id); } else x.term_value = row.str[pos - id_cols]; @@ -741,7 +741,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin else if (root.children.size() == 1 && \ (root.oprt == "+" || root.oprt == "-" || root.oprt == "!")) // unary operator { - EvalMultitypeValue lRes = doComp(root.children[0], row, id_cols, stringindex, this_varset); + EvalMultitypeValue lRes = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (root.oprt == "+") return lRes; else if (root.oprt == "-") @@ -756,8 +756,13 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin || root.oprt == "-" || root.oprt == "*" || root.oprt == "/")) // binary operator { EvalMultitypeValue lRes, rRes; - lRes = doComp(root.children[0], row, id_cols, stringindex, this_varset); - rRes = doComp(root.children[1], row, id_cols, stringindex, this_varset); + lRes = doComp(root.children[0], row, id_cols, kvstore, this_varset); + if (lRes.datatype == EvalMultitypeValue::xsd_boolean) { + if ((root.oprt == "||" && lRes.bool_value.getValue() == 1) || (root.oprt == "&&" && lRes.bool_value.getValue() != 1)) + return lRes; + } else if (root.oprt == "&&") + return lRes; + rRes = doComp(root.children[1], row, id_cols, kvstore, this_varset); if (root.oprt == "||") return lRes || rRes; @@ -795,7 +800,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin EvalMultitypeValue x, y, z; string t, p, f; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::literal || x.datatype == EvalMultitypeValue::xsd_string) { t = x.str_value; @@ -803,7 +808,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin } else return ret_femv; - y = doComp(root.children[1], row, id_cols, stringindex, this_varset); + y = doComp(root.children[1], row, id_cols, kvstore, this_varset); if (y.isSimpleLiteral()) { p = y.str_value; @@ -813,7 +818,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin return ret_femv; if (root.children.size() >= 3) { - z = doComp(root.children[2], row, id_cols, stringindex, this_varset); + z = doComp(root.children[2], row, id_cols, kvstore, this_varset); if (z.isSimpleLiteral()) { f = z.str_value; @@ -835,7 +840,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin EvalMultitypeValue x; ret_femv.str_value = ""; for (size_t i = 0; i < numChild; i++) { - x = doComp(root.children[i], row, id_cols, stringindex, this_varset); + x = doComp(root.children[i], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::literal || x.datatype == EvalMultitypeValue::xsd_string) { size_t bIdx = x.str_value.find('\"'), eIdx = x.str_value.rfind('\"'); @@ -858,7 +863,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::literal) ret_femv.str_value = x.str_value.substr(0, x.str_value.rfind('"') + 1); else if (x.datatype == EvalMultitypeValue::iri) @@ -876,7 +881,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); ret_femv.bool_value = (x.datatype == EvalMultitypeValue::iri); return ret_femv; @@ -885,7 +890,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); ret_femv.bool_value = (x.datatype == EvalMultitypeValue::literal || x.datatype == EvalMultitypeValue::xsd_string || x.datatype == EvalMultitypeValue::xsd_boolean || @@ -901,7 +906,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); ret_femv.bool_value = (x.datatype == EvalMultitypeValue::xsd_integer || x.datatype == EvalMultitypeValue::xsd_decimal || x.datatype == EvalMultitypeValue::xsd_float || @@ -913,7 +918,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::literal) { ret_femv.datatype = EvalMultitypeValue::literal; @@ -931,7 +936,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::rdf_term) { size_t p = x.str_value.rfind("^^"); @@ -992,7 +997,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::xsd_datetime) { ret_femv.datatype = EvalMultitypeValue::xsd_integer; @@ -1006,7 +1011,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::xsd_datetime) { ret_femv.datatype = EvalMultitypeValue::xsd_integer; @@ -1020,7 +1025,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::xsd_datetime) { ret_femv.datatype = EvalMultitypeValue::xsd_integer; @@ -1034,7 +1039,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::xsd_datetime) { ret_femv.datatype = EvalMultitypeValue::xsd_integer; @@ -1048,7 +1053,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::xsd_datetime) { ret_femv.datatype = EvalMultitypeValue::xsd_integer; @@ -1062,7 +1067,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); string langTag = ""; if (x.datatype == EvalMultitypeValue::xsd_string) { @@ -1097,8 +1102,8 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x, y; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); - y = doComp(root.children[1], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); + y = doComp(root.children[1], row, id_cols, kvstore, this_varset); if(x.argCompatible(y)) { string x_content = x.getStrContent(), y_content = y.getStrContent(); @@ -1115,7 +1120,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (x.datatype == EvalMultitypeValue::xsd_integer) { ret_femv.datatype = EvalMultitypeValue::xsd_integer; @@ -1143,10 +1148,10 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x, y; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if (!x.isSimpleLiteral()) return ret_femv; - y = doComp(root.children[1], row, id_cols, stringindex, this_varset); + y = doComp(root.children[1], row, id_cols, kvstore, this_varset); if (!y.isSimpleLiteral()) return ret_femv; @@ -1157,7 +1162,7 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin else if (root.oprt == "BOUND") { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); // Only return false when x is xsd_boolean and has error_value if (x.datatype == EvalMultitypeValue::xsd_boolean && x.bool_value.getValue() == 2) ret_femv.bool_value = EvalMultitypeValue::EffectiveBooleanValue::false_value; @@ -1170,12 +1175,12 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); ret_femv.bool_value.value = EvalMultitypeValue::EffectiveBooleanValue::false_value; for (int i = 1; i < (int)root.children.size(); i++) { EvalMultitypeValue y; - y = doComp(root.children[1], row, id_cols, stringindex, this_varset); + y = doComp(root.children[1], row, id_cols, kvstore, this_varset); EvalMultitypeValue equal = (x == y); if (i == 1) ret_femv = equal; @@ -1192,12 +1197,12 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); ret_femv.bool_value.value = EvalMultitypeValue::EffectiveBooleanValue::true_value; for (int i = 1; i < (int)root.children.size(); i++) { EvalMultitypeValue y; - y = doComp(root.children[1], row, id_cols, stringindex, this_varset); + y = doComp(root.children[1], row, id_cols, kvstore, this_varset); EvalMultitypeValue inequal = (x != y); if (i == 1) ret_femv = inequal; @@ -1214,14 +1219,14 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin { EvalMultitypeValue x, y, z; - x = doComp(root.children[0], row, id_cols, stringindex, this_varset); + x = doComp(root.children[0], row, id_cols, kvstore, this_varset); if ((x.datatype == EvalMultitypeValue::xsd_boolean && x.bool_value.value == EvalMultitypeValue::EffectiveBooleanValue::error_value) || x.datatype != EvalMultitypeValue::xsd_boolean) return ret_femv; if (x.datatype == EvalMultitypeValue::xsd_boolean && x.bool_value.value == EvalMultitypeValue::EffectiveBooleanValue::true_value) - ret_femv = doComp(root.children[1], row, id_cols, stringindex, this_varset); + ret_femv = doComp(root.children[1], row, id_cols, kvstore, this_varset); else - ret_femv = doComp(root.children[2], row, id_cols, stringindex, this_varset); + ret_femv = doComp(root.children[2], row, id_cols, kvstore, this_varset); return ret_femv; } @@ -1229,60 +1234,35 @@ TempResult::doComp(const CompTreeNode &root, ResultPair &row, int id_cols, Strin return ret_femv; } -void TempResult::doFilter(const CompTreeNode &filter, TempResult &r, StringIndex *stringindex, Varset &entity_literal_varset) -{ - Varset this_varset = this->getAllVarset(); - // filter.mapVarPos2Varset(this_varset, entity_literal_varset); - - int this_id_cols = this->id_varset.getVarsetSize(); - - int r_id_cols = r.id_varset.getVarsetSize(); - vector this2r_id_pos = this->id_varset.mapTo(r.id_varset); - - int this_str_cols = this->str_varset.getVarsetSize(); - - int r_str_cols = r.str_varset.getVarsetSize(); - vector this2r_str_pos = this->str_varset.mapTo(r.str_varset); - - for (int i = 0; i < (int)this->result.size(); i++) - { - // EvalMultitypeValue ret_femv = matchFilterTree(filter, this->result[i], this_id_cols, stringindex); - EvalMultitypeValue ret_femv = doComp(filter, this->result[i], this_id_cols, stringindex, this_varset); - - if (ret_femv.datatype == EvalMultitypeValue::xsd_boolean && ret_femv.bool_value.value == EvalMultitypeValue::EffectiveBooleanValue::true_value) - { - r.result.push_back(ResultPair()); - - if (r_id_cols > 0) - { - r.result.back().id = new unsigned [r_id_cols]; - r.result.back().sz = r_id_cols; - unsigned *v = r.result.back().id; - - for (int k = 0; k < this_id_cols; k++) - v[this2r_id_pos[k]] = this->result[i].id[k]; - } - - if (r_str_cols > 0) - { - r.result.back().str.resize(r_str_cols); - vector &v = r.result.back().str; - - for (int k = 0; k < this_str_cols; k++) - v[this2r_str_pos[k]] = this->result[i].str[k]; - } - } - } +void TempResult::doFilter(const CompTreeNode &filter, KVstore *kvstore, Varset &entity_literal_varset, unsigned limit_number) { + unsigned original_size = this->result.size(); + unsigned delete_num = 0, save_num = 0; + + Varset this_varset = this->getAllVarset(); + int this_id_cols = this->id_varset.getVarsetSize(); + + for (unsigned i = 0; save_num < limit_number && i < original_size-delete_num;) { + EvalMultitypeValue ret_femv = doComp(filter, this->result[i], this_id_cols, kvstore, this_varset); + if (ret_femv.datatype == EvalMultitypeValue::xsd_boolean && ret_femv.bool_value.value == EvalMultitypeValue::EffectiveBooleanValue::true_value) { + ++i; + ++save_num; + } else { + swap(this->result[i], this->result[original_size - 1 - delete_num]); + ++delete_num; + } + } + + this->result.erase(this->result.begin()+(save_num), this->result.end()); + this->result.shrink_to_fit(); } - -void TempResult::doBind(const GroupPattern::Bind &bind, KVstore *kvstore, StringIndex *stringindex, Varset &entity_literal_varset) +void TempResult::doBind(const GroupPattern::Bind &bind, KVstore *kvstore, Varset &entity_literal_varset) { Varset this_varset = this->getAllVarset(); int this_id_cols = this->id_varset.getVarsetSize(); this->str_varset.addVar(bind.var); for (int i = 0; i < (int)this->result.size(); i++) { - EvalMultitypeValue ret_femv = doComp(bind.bindExpr, this->result[i], this_id_cols, stringindex, this_varset); + EvalMultitypeValue ret_femv = doComp(bind.bindExpr, this->result[i], this_id_cols, kvstore, this_varset); this->result[i].str.push_back(ret_femv.getRep()); } return; @@ -1518,22 +1498,21 @@ void TempResultSet::doMinus(TempResultSet &x, TempResultSet &r, StringIndex *str printf("after doMinus, used %ld ms.\n", tv_end - tv_begin); } -// void TempResultSet::doFilter(GroupPattern::FilterTree::FilterTreeNode &filter, TempResultSet &r, StringIndex *stringindex, Varset &entity_literal_varset) -void TempResultSet::doFilter(const CompTreeNode &filter, TempResultSet &r, StringIndex *stringindex, Varset &entity_literal_varset) -{ - long tv_begin = Util::get_cur_time(); +void TempResultSet::doFilter(const CompTreeNode &filter, KVstore *kvstore, Varset &entity_literal_varset, unsigned limit_num) { + unsigned before_size = results[0].result.size(); + long tv_begin = Util::get_cur_time(); - for (int i = 0; i < (int)this->results.size(); i++) - { - int pos = r.findCompatibleResult(this->results[i].id_varset, this->results[i].str_varset); - this->results[i].doFilter(filter, r.results[pos], stringindex, entity_literal_varset); - } + for (int i = 0; i < (int) this->results.size(); i++) { + this->results[i].doFilter(filter, kvstore, entity_literal_varset, limit_num); + } - long tv_end = Util::get_cur_time(); - printf("after doFilter, used %ld ms.\n", tv_end - tv_begin); + long tv_end = Util::get_cur_time(); + unsigned after_size = results[0].result.size(); + printf("after doFilter, used %ld ms. ", tv_end - tv_begin); + printf("before filter size %d, after filter size %d.\n", before_size, after_size); } -void TempResultSet::doBind(const GroupPattern::Bind &bind, KVstore *kvstore, StringIndex *stringindex, Varset &entity_literal_varset) +void TempResultSet::doBind(const GroupPattern::Bind &bind, KVstore *kvstore, Varset &entity_literal_varset) { long tv_begin = Util::get_cur_time(); @@ -1544,7 +1523,7 @@ void TempResultSet::doBind(const GroupPattern::Bind &bind, KVstore *kvstore, Str } for (int i = 0; i < (int)this->results.size(); i++) - this->results[i].doBind(bind, kvstore, stringindex, entity_literal_varset); + this->results[i].doBind(bind, kvstore,entity_literal_varset); long tv_end = Util::get_cur_time(); printf("after doBind, used %ld ms.\n", tv_end - tv_begin); diff --git a/Query/TempResult.h b/src/Query/TempResult.h similarity index 87% rename from Query/TempResult.h rename to src/Query/TempResult.h index 17e3b698..78542a22 100644 --- a/Query/TempResult.h +++ b/src/Query/TempResult.h @@ -54,10 +54,10 @@ class TempResult void doOptional(std::vector &binding, TempResult &x, TempResult &rn, TempResult &ra, bool add_no_binding); void doMinus(TempResult &x, TempResult &r); - void doFilter(const CompTreeNode &filter, TempResult &r, StringIndex *stringindex, Varset &entity_literal_varset); - void doBind(const GroupPattern::Bind &bind, KVstore *kvstore, StringIndex *stringindex, Varset &entity_literal_varset); + void doFilter(const CompTreeNode &filter, KVstore *kvstore, Varset &entity_literal_varset, unsigned limit_num = std::numeric_limits::max()); + void doBind(const GroupPattern::Bind &bind, KVstore *kvstore, Varset &entity_literal_varset); - EvalMultitypeValue doComp(const CompTreeNode &root, ResultPair &row, int id_cols, StringIndex *stringindex, Varset &this_varset); + EvalMultitypeValue doComp(const CompTreeNode &root, ResultPair &row, int id_cols, KVstore *kvstore, Varset &this_varset); void print(int no=-1); private: @@ -85,8 +85,8 @@ class TempResultSet void doUnion(TempResultSet &x, TempResultSet &r); void doOptional(TempResultSet &x, TempResultSet &r, StringIndex *stringindex, Varset &entity_literal_varset); void doMinus(TempResultSet &x, TempResultSet &r, StringIndex *stringindex, Varset &entity_literal_varset); - void doFilter(const CompTreeNode &filter, TempResultSet &r, StringIndex *stringindex, Varset &entity_literal_varset); - void doBind(const GroupPattern::Bind &bind, KVstore *kvstore, StringIndex *stringindex, Varset &entity_literal_varset); + void doFilter(const CompTreeNode &filter, KVstore *kvstore, Varset &entity_literal_varset, unsigned limit_num = std::numeric_limits::max()); + void doBind(const GroupPattern::Bind &bind, KVstore *kvstore, Varset &entity_literal_varset); void doProjection1(Varset &proj, TempResultSet &r, StringIndex *stringindex, Varset &entity_literal_varset); void doDistinct1(TempResultSet &r); diff --git a/Query/Varset.cpp b/src/Query/Varset.cpp similarity index 100% rename from Query/Varset.cpp rename to src/Query/Varset.cpp diff --git a/Query/Varset.h b/src/Query/Varset.h similarity index 100% rename from Query/Varset.h rename to src/Query/Varset.h diff --git a/Query/topk/DPB/DynamicTrie.cpp b/src/Query/topk/DPB/DynamicTrie.cpp similarity index 100% rename from Query/topk/DPB/DynamicTrie.cpp rename to src/Query/topk/DPB/DynamicTrie.cpp diff --git a/Query/topk/DPB/DynamicTrie.h b/src/Query/topk/DPB/DynamicTrie.h similarity index 100% rename from Query/topk/DPB/DynamicTrie.h rename to src/Query/topk/DPB/DynamicTrie.h diff --git a/Query/topk/DPB/OrderedList.cpp b/src/Query/topk/DPB/OrderedList.cpp similarity index 100% rename from Query/topk/DPB/OrderedList.cpp rename to src/Query/topk/DPB/OrderedList.cpp diff --git a/Query/topk/DPB/OrderedList.h b/src/Query/topk/DPB/OrderedList.h similarity index 100% rename from Query/topk/DPB/OrderedList.h rename to src/Query/topk/DPB/OrderedList.h diff --git a/Query/topk/DPB/Pool.cpp b/src/Query/topk/DPB/Pool.cpp similarity index 100% rename from Query/topk/DPB/Pool.cpp rename to src/Query/topk/DPB/Pool.cpp diff --git a/Query/topk/DPB/Pool.h b/src/Query/topk/DPB/Pool.h similarity index 100% rename from Query/topk/DPB/Pool.h rename to src/Query/topk/DPB/Pool.h diff --git a/Query/topk/DPBTopKUtil.cpp b/src/Query/topk/DPBTopKUtil.cpp similarity index 100% rename from Query/topk/DPBTopKUtil.cpp rename to src/Query/topk/DPBTopKUtil.cpp diff --git a/Query/topk/DPBTopKUtil.h b/src/Query/topk/DPBTopKUtil.h similarity index 100% rename from Query/topk/DPBTopKUtil.h rename to src/Query/topk/DPBTopKUtil.h diff --git a/Query/topk/MinMaxHeap.hpp b/src/Query/topk/MinMaxHeap.hpp similarity index 100% rename from Query/topk/MinMaxHeap.hpp rename to src/Query/topk/MinMaxHeap.hpp diff --git a/Query/topk/TopKSearchPlan.cpp b/src/Query/topk/TopKSearchPlan.cpp similarity index 100% rename from Query/topk/TopKSearchPlan.cpp rename to src/Query/topk/TopKSearchPlan.cpp diff --git a/Query/topk/TopKSearchPlan.h b/src/Query/topk/TopKSearchPlan.h similarity index 100% rename from Query/topk/TopKSearchPlan.h rename to src/Query/topk/TopKSearchPlan.h diff --git a/Query/topk/TopKUtil.cpp b/src/Query/topk/TopKUtil.cpp similarity index 100% rename from Query/topk/TopKUtil.cpp rename to src/Query/topk/TopKUtil.cpp diff --git a/Query/topk/TopKUtil.h b/src/Query/topk/TopKUtil.h similarity index 100% rename from Query/topk/TopKUtil.h rename to src/Query/topk/TopKUtil.h diff --git a/Server/APIServer.h b/src/Server/APIServer.h similarity index 100% rename from Server/APIServer.h rename to src/Server/APIServer.h diff --git a/src/Server/CMakeLists.txt b/src/Server/CMakeLists.txt new file mode 100644 index 00000000..f726d0b1 --- /dev/null +++ b/src/Server/CMakeLists.txt @@ -0,0 +1,5 @@ +add_library(gstore_server OBJECT + Operation.cpp + Server.cpp + Socket.cpp +) \ No newline at end of file diff --git a/Server/LICENSE b/src/Server/LICENSE similarity index 100% rename from Server/LICENSE rename to src/Server/LICENSE diff --git a/Server/MultipartParser.hpp b/src/Server/MultipartParser.hpp similarity index 100% rename from Server/MultipartParser.hpp rename to src/Server/MultipartParser.hpp diff --git a/Server/Operation.cpp b/src/Server/Operation.cpp similarity index 100% rename from Server/Operation.cpp rename to src/Server/Operation.cpp diff --git a/Server/Operation.h b/src/Server/Operation.h similarity index 100% rename from Server/Operation.h rename to src/Server/Operation.h diff --git a/Server/README.md b/src/Server/README.md similarity index 100% rename from Server/README.md rename to src/Server/README.md diff --git a/Server/Server.cpp b/src/Server/Server.cpp similarity index 100% rename from Server/Server.cpp rename to src/Server/Server.cpp diff --git a/Server/Server.h b/src/Server/Server.h similarity index 100% rename from Server/Server.h rename to src/Server/Server.h diff --git a/Server/Socket.cpp b/src/Server/Socket.cpp similarity index 100% rename from Server/Socket.cpp rename to src/Server/Socket.cpp diff --git a/Server/Socket.h b/src/Server/Socket.h similarity index 87% rename from Server/Socket.h rename to src/Server/Socket.h index 8acdd2d9..c68bbf6d 100644 --- a/Server/Socket.h +++ b/src/Server/Socket.h @@ -9,11 +9,17 @@ #define _SERVER_SOCKET_H #include "../Util/Util.h" -#include "../tools/rapidjson/document.h" -#include "../tools/rapidjson/prettywriter.h" -#include "../tools/rapidjson/writer.h" -#include "../tools/rapidjson/stringbuffer.h" -#include "../tools/rapidjson/error/en.h" + +// #include "../tools/rapidjson/document.h" +// #include "../tools/rapidjson/prettywriter.h" +// #include "../tools/rapidjson/writer.h" +// #include "../tools/rapidjson/stringbuffer.h" +// #include "../tools/rapidjson/error/en.h" +#include "rapidjson/document.h" +#include "rapidjson/prettywriter.h" +#include "rapidjson/writer.h" +#include "rapidjson/stringbuffer.h" +#include "rapidjson/error/en.h" #define BUFFER_SIZE 131072 /**< The socket send/recv buffer size. */ diff --git a/Server/client_http.hpp b/src/Server/client_http.hpp similarity index 100% rename from Server/client_http.hpp rename to src/Server/client_http.hpp diff --git a/Server/server_http.hpp b/src/Server/server_http.hpp similarity index 100% rename from Server/server_http.hpp rename to src/Server/server_http.hpp diff --git a/Server/web/PHPAPIExample.php b/src/Server/web/PHPAPIExample.php similarity index 100% rename from Server/web/PHPAPIExample.php rename to src/Server/web/PHPAPIExample.php diff --git a/Server/web/admin.html b/src/Server/web/admin.html similarity index 100% rename from Server/web/admin.html rename to src/Server/web/admin.html diff --git a/Server/web/admin.js b/src/Server/web/admin.js similarity index 100% rename from Server/web/admin.js rename to src/Server/web/admin.js diff --git a/Server/web/admin_root.html b/src/Server/web/admin_root.html similarity index 100% rename from Server/web/admin_root.html rename to src/Server/web/admin_root.html diff --git a/Server/web/api.html b/src/Server/web/api.html similarity index 99% rename from Server/web/api.html rename to src/Server/web/api.html index 29ec8b65..61775386 100644 Binary files a/Server/web/api.html and b/src/Server/web/api.html differ diff --git a/Server/web/css/bootstrap.min.css b/src/Server/web/css/bootstrap.min.css similarity index 100% rename from Server/web/css/bootstrap.min.css rename to src/Server/web/css/bootstrap.min.css diff --git a/Server/web/css/cateIns.css b/src/Server/web/css/cateIns.css similarity index 100% rename from Server/web/css/cateIns.css rename to src/Server/web/css/cateIns.css diff --git a/Server/web/css/d3-context-menu.css b/src/Server/web/css/d3-context-menu.css similarity index 100% rename from Server/web/css/d3-context-menu.css rename to src/Server/web/css/d3-context-menu.css diff --git a/Server/web/css/material-design-color-palette.min.css b/src/Server/web/css/material-design-color-palette.min.css similarity index 100% rename from Server/web/css/material-design-color-palette.min.css rename to src/Server/web/css/material-design-color-palette.min.css diff --git a/Server/web/css/spinner.scss b/src/Server/web/css/spinner.scss similarity index 100% rename from Server/web/css/spinner.scss rename to src/Server/web/css/spinner.scss diff --git a/Server/web/css/typeaheadjs.css b/src/Server/web/css/typeaheadjs.css similarity index 100% rename from Server/web/css/typeaheadjs.css rename to src/Server/web/css/typeaheadjs.css diff --git a/Server/web/dbpedia/index.html b/src/Server/web/dbpedia/index.html similarity index 100% rename from Server/web/dbpedia/index.html rename to src/Server/web/dbpedia/index.html diff --git a/Server/web/dbpedia/index.js b/src/Server/web/dbpedia/index.js similarity index 100% rename from Server/web/dbpedia/index.js rename to src/Server/web/dbpedia/index.js diff --git a/Server/web/favicon.ico b/src/Server/web/favicon.ico similarity index 100% rename from Server/web/favicon.ico rename to src/Server/web/favicon.ico diff --git a/Server/web/freebase/index.html b/src/Server/web/freebase/index.html similarity index 100% rename from Server/web/freebase/index.html rename to src/Server/web/freebase/index.html diff --git a/Server/web/freebase/index.js b/src/Server/web/freebase/index.js similarity index 100% rename from Server/web/freebase/index.js rename to src/Server/web/freebase/index.js diff --git a/Server/web/index.html b/src/Server/web/index.html similarity index 100% rename from Server/web/index.html rename to src/Server/web/index.html diff --git a/Server/web/index.js b/src/Server/web/index.js similarity index 100% rename from Server/web/index.js rename to src/Server/web/index.js diff --git a/Server/web/js/bootstrap.min.js b/src/Server/web/js/bootstrap.min.js similarity index 100% rename from Server/web/js/bootstrap.min.js rename to src/Server/web/js/bootstrap.min.js diff --git a/Server/web/js/categoryInstance.js b/src/Server/web/js/categoryInstance.js similarity index 100% rename from Server/web/js/categoryInstance.js rename to src/Server/web/js/categoryInstance.js diff --git a/Server/web/js/comm_hz.js b/src/Server/web/js/comm_hz.js similarity index 100% rename from Server/web/js/comm_hz.js rename to src/Server/web/js/comm_hz.js diff --git a/Server/web/js/d3-context-menu.js b/src/Server/web/js/d3-context-menu.js similarity index 100% rename from Server/web/js/d3-context-menu.js rename to src/Server/web/js/d3-context-menu.js diff --git a/Server/web/js/d3.min.js b/src/Server/web/js/d3.min.js similarity index 100% rename from Server/web/js/d3.min.js rename to src/Server/web/js/d3.min.js diff --git a/Server/web/js/entityTriple.js b/src/Server/web/js/entityTriple.js similarity index 100% rename from Server/web/js/entityTriple.js rename to src/Server/web/js/entityTriple.js diff --git a/Server/web/js/handlebars.js b/src/Server/web/js/handlebars.js similarity index 99% rename from Server/web/js/handlebars.js rename to src/Server/web/js/handlebars.js index 1c64ca50..f72da56e 100644 --- a/Server/web/js/handlebars.js +++ b/src/Server/web/js/handlebars.js @@ -42,7 +42,7 @@ var __module4__ = (function() { return __exports__; })(); -// handlebars/utils.js +// handlebars/scripts.js var __module3__ = (function(__dependency1__) { "use strict"; var __exports__ = {}; diff --git a/Server/web/js/jquery.min.js b/src/Server/web/js/jquery.min.js similarity index 100% rename from Server/web/js/jquery.min.js rename to src/Server/web/js/jquery.min.js diff --git a/Server/web/js/progressbar.js b/src/Server/web/js/progressbar.js similarity index 99% rename from Server/web/js/progressbar.js rename to src/Server/web/js/progressbar.js index d3ffd0f7..029deadb 100644 --- a/Server/web/js/progressbar.js +++ b/src/Server/web/js/progressbar.js @@ -1659,7 +1659,7 @@ var Tweenable = (function () { // Circle shaped progress bar var Shape = require('./shape'); -var utils = require('./utils'); +var utils = require('./scripts'); var Circle = function Circle(container, options) { // Use two arcs to form a circle @@ -1701,7 +1701,7 @@ module.exports = Circle; // Line shaped progress bar var Shape = require('./shape'); -var utils = require('./utils'); +var utils = require('./scripts'); var Line = function Line(container, options) { this._pathTemplate = 'M 0,{center} L 100,{center}'; @@ -1743,15 +1743,15 @@ module.exports = { // Undocumented. Shape: require('./shape'), - // Internal utils, undocumented. - utils: require('./utils') + // Internal scripts, undocumented. + utils: require('./scripts') }; },{"./circle":2,"./line":3,"./path":5,"./semicircle":6,"./shape":7,"./utils":8}],5:[function(require,module,exports){ // Lower level API to animate any kind of svg path var Tweenable = require('shifty'); -var utils = require('./utils'); +var utils = require('./scripts'); var EASING_ALIASES = { easeIn: 'easeInCubic', @@ -1921,7 +1921,7 @@ module.exports = Path; var Shape = require('./shape'); var Circle = require('./circle'); -var utils = require('./utils'); +var utils = require('./scripts'); var SemiCircle = function SemiCircle(container, options) { // Use one arc to form a SemiCircle @@ -1970,7 +1970,7 @@ module.exports = SemiCircle; // Base object for different progress bar shapes var Path = require('./path'); -var utils = require('./utils'); +var utils = require('./scripts'); var DESTROYED_ERROR = 'Object is destroyed'; diff --git a/Server/web/js/search_typeahead.js b/src/Server/web/js/search_typeahead.js similarity index 100% rename from Server/web/js/search_typeahead.js rename to src/Server/web/js/search_typeahead.js diff --git a/Server/web/js/typeahead.bundle.js b/src/Server/web/js/typeahead.bundle.js similarity index 100% rename from Server/web/js/typeahead.bundle.js rename to src/Server/web/js/typeahead.bundle.js diff --git a/Server/web/json/database.php b/src/Server/web/json/database.php similarity index 100% rename from Server/web/json/database.php rename to src/Server/web/json/database.php diff --git a/Server/web/json/getCateInsJson.php b/src/Server/web/json/getCateInsJson.php similarity index 100% rename from Server/web/json/getCateInsJson.php rename to src/Server/web/json/getCateInsJson.php diff --git a/Server/web/json/getEntityCatesJson.php b/src/Server/web/json/getEntityCatesJson.php similarity index 100% rename from Server/web/json/getEntityCatesJson.php rename to src/Server/web/json/getEntityCatesJson.php diff --git a/Server/web/json/getImage.php b/src/Server/web/json/getImage.php similarity index 100% rename from Server/web/json/getImage.php rename to src/Server/web/json/getImage.php diff --git a/Server/web/json/getTripleJson.php b/src/Server/web/json/getTripleJson.php similarity index 100% rename from Server/web/json/getTripleJson.php rename to src/Server/web/json/getTripleJson.php diff --git a/Server/web/json/getTypeaheadCateNames.php b/src/Server/web/json/getTypeaheadCateNames.php similarity index 100% rename from Server/web/json/getTypeaheadCateNames.php rename to src/Server/web/json/getTypeaheadCateNames.php diff --git a/Server/web/json/getTypeaheadEntityNames.php b/src/Server/web/json/getTypeaheadEntityNames.php similarity index 100% rename from Server/web/json/getTypeaheadEntityNames.php rename to src/Server/web/json/getTypeaheadEntityNames.php diff --git a/Server/web/json/php_db_com.php b/src/Server/web/json/php_db_com.php similarity index 100% rename from Server/web/json/php_db_com.php rename to src/Server/web/json/php_db_com.php diff --git a/Server/web/login.html b/src/Server/web/login.html similarity index 100% rename from Server/web/login.html rename to src/Server/web/login.html diff --git a/Server/web/login.js b/src/Server/web/login.js similarity index 100% rename from Server/web/login.js rename to src/Server/web/login.js diff --git a/Server/web/openkg.html b/src/Server/web/openkg.html similarity index 100% rename from Server/web/openkg.html rename to src/Server/web/openkg.html diff --git a/Server/web/openkg/breastcancer0/index.html b/src/Server/web/openkg/breastcancer0/index.html similarity index 100% rename from Server/web/openkg/breastcancer0/index.html rename to src/Server/web/openkg/breastcancer0/index.html diff --git a/Server/web/openkg/breastcancer0/index.js b/src/Server/web/openkg/breastcancer0/index.js similarity index 100% rename from Server/web/openkg/breastcancer0/index.js rename to src/Server/web/openkg/breastcancer0/index.js diff --git a/Server/web/openkg/breastcancer1/index.html b/src/Server/web/openkg/breastcancer1/index.html similarity index 100% rename from Server/web/openkg/breastcancer1/index.html rename to src/Server/web/openkg/breastcancer1/index.html diff --git a/Server/web/openkg/breastcancer1/index.js b/src/Server/web/openkg/breastcancer1/index.js similarity index 100% rename from Server/web/openkg/breastcancer1/index.js rename to src/Server/web/openkg/breastcancer1/index.js diff --git a/Server/web/openkg/breastcancer2/index.html b/src/Server/web/openkg/breastcancer2/index.html similarity index 100% rename from Server/web/openkg/breastcancer2/index.html rename to src/Server/web/openkg/breastcancer2/index.html diff --git a/Server/web/openkg/breastcancer2/index.js b/src/Server/web/openkg/breastcancer2/index.js similarity index 100% rename from Server/web/openkg/breastcancer2/index.js rename to src/Server/web/openkg/breastcancer2/index.js diff --git a/Server/web/openkg/breastcancer3/index.html b/src/Server/web/openkg/breastcancer3/index.html similarity index 100% rename from Server/web/openkg/breastcancer3/index.html rename to src/Server/web/openkg/breastcancer3/index.html diff --git a/Server/web/openkg/breastcancer3/index.js b/src/Server/web/openkg/breastcancer3/index.js similarity index 100% rename from Server/web/openkg/breastcancer3/index.js rename to src/Server/web/openkg/breastcancer3/index.js diff --git a/Server/web/openkg/breastcancer4/index.html b/src/Server/web/openkg/breastcancer4/index.html similarity index 100% rename from Server/web/openkg/breastcancer4/index.html rename to src/Server/web/openkg/breastcancer4/index.html diff --git a/Server/web/openkg/breastcancer4/index.js b/src/Server/web/openkg/breastcancer4/index.js similarity index 100% rename from Server/web/openkg/breastcancer4/index.js rename to src/Server/web/openkg/breastcancer4/index.js diff --git a/Server/web/openkg/clinga/index.html b/src/Server/web/openkg/clinga/index.html similarity index 100% rename from Server/web/openkg/clinga/index.html rename to src/Server/web/openkg/clinga/index.html diff --git a/Server/web/openkg/clinga/index.js b/src/Server/web/openkg/clinga/index.js similarity index 100% rename from Server/web/openkg/clinga/index.js rename to src/Server/web/openkg/clinga/index.js diff --git a/Server/web/openkg/emergency/index.html b/src/Server/web/openkg/emergency/index.html similarity index 100% rename from Server/web/openkg/emergency/index.html rename to src/Server/web/openkg/emergency/index.html diff --git a/Server/web/openkg/emergency/index.js b/src/Server/web/openkg/emergency/index.js similarity index 100% rename from Server/web/openkg/emergency/index.js rename to src/Server/web/openkg/emergency/index.js diff --git a/Server/web/openkg/music/index.html b/src/Server/web/openkg/music/index.html similarity index 100% rename from Server/web/openkg/music/index.html rename to src/Server/web/openkg/music/index.html diff --git a/Server/web/openkg/music/index.js b/src/Server/web/openkg/music/index.js similarity index 100% rename from Server/web/openkg/music/index.js rename to src/Server/web/openkg/music/index.js diff --git a/Server/web/openkg/tourist/index.html b/src/Server/web/openkg/tourist/index.html similarity index 100% rename from Server/web/openkg/tourist/index.html rename to src/Server/web/openkg/tourist/index.html diff --git a/Server/web/openkg/tourist/index.js b/src/Server/web/openkg/tourist/index.js similarity index 100% rename from Server/web/openkg/tourist/index.js rename to src/Server/web/openkg/tourist/index.js diff --git a/Server/web/style.css b/src/Server/web/style.css similarity index 100% rename from Server/web/style.css rename to src/Server/web/style.css diff --git a/Server/web/test.html b/src/Server/web/test.html similarity index 100% rename from Server/web/test.html rename to src/Server/web/test.html diff --git a/Server/web/test.js b/src/Server/web/test.js similarity index 100% rename from Server/web/test.js rename to src/Server/web/test.js diff --git a/Server/web/view.css b/src/Server/web/view.css similarity index 100% rename from Server/web/view.css rename to src/Server/web/view.css diff --git a/Server/web/view.js b/src/Server/web/view.js similarity index 100% rename from Server/web/view.js rename to src/Server/web/view.js diff --git a/src/Signature/CMakeLists.txt b/src/Signature/CMakeLists.txt new file mode 100644 index 00000000..7eb38dff --- /dev/null +++ b/src/Signature/CMakeLists.txt @@ -0,0 +1,4 @@ +add_library(gstore_signature OBJECT + SigEntry.cpp + Signature.cpp +) \ No newline at end of file diff --git a/Signature/SigEntry.cpp b/src/Signature/SigEntry.cpp similarity index 100% rename from Signature/SigEntry.cpp rename to src/Signature/SigEntry.cpp diff --git a/Signature/SigEntry.h b/src/Signature/SigEntry.h similarity index 100% rename from Signature/SigEntry.h rename to src/Signature/SigEntry.h diff --git a/Signature/Signature.cpp b/src/Signature/Signature.cpp similarity index 100% rename from Signature/Signature.cpp rename to src/Signature/Signature.cpp diff --git a/Signature/Signature.h b/src/Signature/Signature.h similarity index 100% rename from Signature/Signature.h rename to src/Signature/Signature.h diff --git a/src/StringIndex/CMakeLists.txt b/src/StringIndex/CMakeLists.txt new file mode 100644 index 00000000..34a716d1 --- /dev/null +++ b/src/StringIndex/CMakeLists.txt @@ -0,0 +1,3 @@ +add_library(gstore_stringindex OBJECT + StringIndex.cpp +) \ No newline at end of file diff --git a/StringIndex/StringIndex.cpp b/src/StringIndex/StringIndex.cpp similarity index 99% rename from StringIndex/StringIndex.cpp rename to src/StringIndex/StringIndex.cpp index fcb264f4..fe147731 100644 --- a/StringIndex/StringIndex.cpp +++ b/src/StringIndex/StringIndex.cpp @@ -344,6 +344,7 @@ unsigned StringIndex::getNum(StringIndexFile::StringIndexFileType _type) if (_type == StringIndexFile::Predicate) return this->predicate.getNum(); assert(false); + return -1; // never reach here } void StringIndex::save(KVstore &kv_store) diff --git a/StringIndex/StringIndex.h b/src/StringIndex/StringIndex.h similarity index 100% rename from StringIndex/StringIndex.h rename to src/StringIndex/StringIndex.h diff --git a/src/Trie/CMakeLists.txt b/src/Trie/CMakeLists.txt new file mode 100644 index 00000000..e0908c4f --- /dev/null +++ b/src/Trie/CMakeLists.txt @@ -0,0 +1,4 @@ +add_library(gstore_trie OBJECT + Trie.cpp + TrieNode.cpp +) \ No newline at end of file diff --git a/Trie/Trie.cpp b/src/Trie/Trie.cpp similarity index 100% rename from Trie/Trie.cpp rename to src/Trie/Trie.cpp diff --git a/Trie/Trie.h b/src/Trie/Trie.h similarity index 100% rename from Trie/Trie.h rename to src/Trie/Trie.h diff --git a/Trie/TrieNode.cpp b/src/Trie/TrieNode.cpp similarity index 100% rename from Trie/TrieNode.cpp rename to src/Trie/TrieNode.cpp diff --git a/Trie/TrieNode.h b/src/Trie/TrieNode.h similarity index 100% rename from Trie/TrieNode.h rename to src/Trie/TrieNode.h diff --git a/src/Util/Backward.cpp b/src/Util/Backward.cpp new file mode 100644 index 00000000..12d2d9e1 --- /dev/null +++ b/src/Util/Backward.cpp @@ -0,0 +1,40 @@ +// Pick your poison. +// +// On GNU/Linux, you have few choices to get the most out of your stack trace. +// +// By default you get: +// - object filename +// - function name +// +// In order to add: +// - source filename +// - line and column numbers +// - source code snippet (assuming the file is accessible) + +// Install one of the following libraries then uncomment one of the macro (or +// better, add the detection of the lib and the macro definition in your build +// system) + +// - apt-get install libdw-dev ... +// - g++/clang++ -ldw ... +// #define BACKWARD_HAS_DW 1 + +// - apt-get install binutils-dev ... +// - g++/clang++ -lbfd ... +// #define BACKWARD_HAS_BFD 1 + +// - apt-get install libdwarf-dev ... +// - g++/clang++ -ldwarf ... +// #define BACKWARD_HAS_DWARF 1 + +// Regardless of the library you choose to read the debug information, +// for potentially more detailed stack traces you can use libunwind +// - apt-get install libunwind-dev +// - g++/clang++ -lunwind +// #define BACKWARD_HAS_LIBUNWIND 1 + +#include "backward.hpp" + +namespace backward { + backward::SignalHandling sh; +} \ No newline at end of file diff --git a/Util/Bstr.cpp b/src/Util/Bstr.cpp similarity index 100% rename from Util/Bstr.cpp rename to src/Util/Bstr.cpp diff --git a/Util/Bstr.h b/src/Util/Bstr.h similarity index 100% rename from Util/Bstr.h rename to src/Util/Bstr.h diff --git a/src/Util/CMakeLists.txt b/src/Util/CMakeLists.txt new file mode 100644 index 00000000..90194f94 --- /dev/null +++ b/src/Util/CMakeLists.txt @@ -0,0 +1,27 @@ +add_library(gstore_util OBJECT + Util.cpp + WebUrl.cpp + INIParser.cpp + Slog.cpp + Stream.cpp + Bstr.cpp + Triple.cpp + VList.cpp + EvalMultitypeValue.cpp + Version.cpp + SpinLock.cpp + GraphLock.cpp + Transaction.cpp + IDTriple.cpp + Latch.cpp + IPWhiteList.cpp + IPBlackList.cpp + OrderedVector.cpp + CompressFileUtil.cpp + Backward.cpp +) + +# set definition if has debug info +if (CMAKE_BUILD_TYPE MATCHES "Deb") + target_compile_definitions(gstore_util PRIVATE BACKWARD_HAS_DWARF=1) +endif () \ No newline at end of file diff --git a/Util/ClassForVlistCache.h b/src/Util/ClassForVlistCache.h similarity index 100% rename from Util/ClassForVlistCache.h rename to src/Util/ClassForVlistCache.h diff --git a/Util/CompressFileUtil.cpp b/src/Util/CompressFileUtil.cpp similarity index 78% rename from Util/CompressFileUtil.cpp rename to src/Util/CompressFileUtil.cpp index eb32ffd6..66a3bc1c 100644 --- a/Util/CompressFileUtil.cpp +++ b/src/Util/CompressFileUtil.cpp @@ -290,4 +290,77 @@ namespace CompressUtil unzClose(unfile); return UnZipOK; } + + int GzipHelper::compress(const std::string *data, void *compress_data, size_t &compress_size) + { + z_stream c_stream; + int err = 0; + unsigned int size = data->size(); + if (data && size > 0) + { + c_stream.zalloc = (alloc_func)0; + c_stream.zfree = (free_func)0; + c_stream.opaque = (voidpf)0; + if (deflateInit2(&c_stream, Z_DEFAULT_COMPRESSION, Z_DEFLATED, MAXWBITS+GZIPENCODING, 8, Z_DEFAULT_STRATEGY) != Z_OK) + return -1; + c_stream.avail_in = size; + c_stream.avail_out= size;//Generally smaller than the encrypted string + c_stream.next_in = (Bytef *)data->c_str(); + c_stream.next_out = (Bytef *)compress_data; + while (c_stream.avail_in != 0 && c_stream.total_out < size) + { + if (deflate(&c_stream, Z_NO_FLUSH) != Z_OK) + return -1; + } + if (c_stream.avail_in != 0) + return c_stream.avail_in; + for (;;) + { + if ((err = deflate(&c_stream, Z_FINISH)) == Z_STREAM_END) + break; + if (err != Z_OK) + return -1; + } + if (deflateEnd(&c_stream) != Z_OK) + return -1; + compress_size = c_stream.total_out; + return 0; + } + return -1; + } + + int GzipHelper::unCompress(const char * data, int size, char *uncompress_data, size_t uncompress_size) + { + z_stream strm; + strm.zalloc = (alloc_func)0; + strm.zfree = (free_func)0; + strm.opaque = (voidpf)0; + strm.avail_in = size; + strm.avail_out= uncompress_size; + strm.next_in = (Bytef *)data; + strm.next_out = (Bytef *)uncompress_data; + int err = -1; + int ret = -1; + err = inflateInit2(&strm, MAXWBITS+GZIPENCODING); + if (err == Z_OK) + { + err = inflate(&strm, Z_FINISH); + if (err == Z_STREAM_END) + { + ret = strm.total_out; + } + else + { + inflateEnd(&strm); + return err; + } + } + else + { + inflateEnd(&strm); + return err; + } + inflateEnd(&strm); + return err; + } } \ No newline at end of file diff --git a/Util/CompressFileUtil.h b/src/Util/CompressFileUtil.h similarity index 81% rename from Util/CompressFileUtil.h rename to src/Util/CompressFileUtil.h index f7df5fe8..b7e3646a 100644 --- a/Util/CompressFileUtil.h +++ b/src/Util/CompressFileUtil.h @@ -5,12 +5,15 @@ #include #include #include -#include "../tools/zlib-1.3/include/unzip.h" -#include "../tools/zlib-1.3/include/zip.h" #include "../Util/Util.h" +#include "minizip/unzip.h" +#include "minizip/zip.h" + #define WRITEBUFFERSIZE (8192) #define MAXFILENAME (512) +#define MAXWBITS 15 +#define GZIPENCODING 16 using namespace std; typedef std::function foreach_cb; @@ -56,6 +59,13 @@ namespace CompressUtil void getFileList(std::vector& files, const std::string& except)const; ZipCode unCompress(); }; + + class GzipHelper + { + public: + static int compress(const std::string *data, void *compress_data, size_t &compress_size); + static int unCompress(const char * data, int size, char *uncompress_data, size_t uncompress_size); + }; } diff --git a/Util/EvalMultitypeValue.cpp b/src/Util/EvalMultitypeValue.cpp similarity index 100% rename from Util/EvalMultitypeValue.cpp rename to src/Util/EvalMultitypeValue.cpp diff --git a/Util/EvalMultitypeValue.h b/src/Util/EvalMultitypeValue.h similarity index 100% rename from Util/EvalMultitypeValue.h rename to src/Util/EvalMultitypeValue.h diff --git a/Util/GraphLock.cpp b/src/Util/GraphLock.cpp similarity index 100% rename from Util/GraphLock.cpp rename to src/Util/GraphLock.cpp diff --git a/Util/GraphLock.h b/src/Util/GraphLock.h similarity index 100% rename from Util/GraphLock.h rename to src/Util/GraphLock.h diff --git a/Util/IDTriple.cpp b/src/Util/IDTriple.cpp similarity index 95% rename from Util/IDTriple.cpp rename to src/Util/IDTriple.cpp index 77276958..8ffbbb99 100644 --- a/Util/IDTriple.cpp +++ b/src/Util/IDTriple.cpp @@ -16,6 +16,7 @@ bool IDTriple::operator < (const IDTriple& a) const } } assert(false); + return false; // never reach here } diff --git a/Util/IDTriple.h b/src/Util/IDTriple.h similarity index 100% rename from Util/IDTriple.h rename to src/Util/IDTriple.h diff --git a/Util/INIParser.cpp b/src/Util/INIParser.cpp similarity index 100% rename from Util/INIParser.cpp rename to src/Util/INIParser.cpp diff --git a/Util/INIParser.h b/src/Util/INIParser.h similarity index 100% rename from Util/INIParser.h rename to src/Util/INIParser.h diff --git a/Util/IPBlackList.cpp b/src/Util/IPBlackList.cpp similarity index 100% rename from Util/IPBlackList.cpp rename to src/Util/IPBlackList.cpp diff --git a/Util/IPBlackList.h b/src/Util/IPBlackList.h similarity index 100% rename from Util/IPBlackList.h rename to src/Util/IPBlackList.h diff --git a/Util/IPWhiteList.cpp b/src/Util/IPWhiteList.cpp similarity index 100% rename from Util/IPWhiteList.cpp rename to src/Util/IPWhiteList.cpp diff --git a/Util/IPWhiteList.h b/src/Util/IPWhiteList.h similarity index 100% rename from Util/IPWhiteList.h rename to src/Util/IPWhiteList.h diff --git a/Util/Latch.cpp b/src/Util/Latch.cpp similarity index 100% rename from Util/Latch.cpp rename to src/Util/Latch.cpp diff --git a/Util/Latch.h b/src/Util/Latch.h similarity index 100% rename from Util/Latch.h rename to src/Util/Latch.h diff --git a/Util/MD5.h b/src/Util/MD5.h similarity index 100% rename from Util/MD5.h rename to src/Util/MD5.h diff --git a/Util/OrderedVector.cpp b/src/Util/OrderedVector.cpp similarity index 100% rename from Util/OrderedVector.cpp rename to src/Util/OrderedVector.cpp diff --git a/Util/OrderedVector.h b/src/Util/OrderedVector.h similarity index 100% rename from Util/OrderedVector.h rename to src/Util/OrderedVector.h diff --git a/Util/PrettyPrint.h b/src/Util/PrettyPrint.h similarity index 100% rename from Util/PrettyPrint.h rename to src/Util/PrettyPrint.h diff --git a/Util/Slog.cpp b/src/Util/Slog.cpp similarity index 100% rename from Util/Slog.cpp rename to src/Util/Slog.cpp diff --git a/Util/Slog.h b/src/Util/Slog.h similarity index 100% rename from Util/Slog.h rename to src/Util/Slog.h diff --git a/Util/SpinLock.cpp b/src/Util/SpinLock.cpp similarity index 100% rename from Util/SpinLock.cpp rename to src/Util/SpinLock.cpp diff --git a/Util/SpinLock.h b/src/Util/SpinLock.h similarity index 100% rename from Util/SpinLock.h rename to src/Util/SpinLock.h diff --git a/Util/Stream.cpp b/src/Util/Stream.cpp similarity index 100% rename from Util/Stream.cpp rename to src/Util/Stream.cpp diff --git a/Util/Stream.h b/src/Util/Stream.h similarity index 100% rename from Util/Stream.h rename to src/Util/Stream.h diff --git a/Util/Transaction.cpp b/src/Util/Transaction.cpp similarity index 100% rename from Util/Transaction.cpp rename to src/Util/Transaction.cpp diff --git a/Util/Transaction.h b/src/Util/Transaction.h similarity index 100% rename from Util/Transaction.h rename to src/Util/Transaction.h diff --git a/Util/Triple.cpp b/src/Util/Triple.cpp similarity index 100% rename from Util/Triple.cpp rename to src/Util/Triple.cpp diff --git a/Util/Triple.h b/src/Util/Triple.h similarity index 100% rename from Util/Triple.h rename to src/Util/Triple.h diff --git a/Util/Util.cpp b/src/Util/Util.cpp similarity index 96% rename from Util/Util.cpp rename to src/Util/Util.cpp index 677e491e..10228a26 100644 --- a/Util/Util.cpp +++ b/src/Util/Util.cpp @@ -1029,6 +1029,102 @@ unsigned long long Util::count_dir_size(const char *_dir_path) return total_size; } +bool Util::remove_dir(const std::string dir_path) +{ + DIR* dirp = opendir(dir_path.c_str()); + if (!dirp) + { + return false; + } + struct dirent *dir; + struct stat st; + while ((dir = readdir(dirp)) != NULL) + { + if(strcmp(dir->d_name, ".") == 0 || strcmp(dir->d_name, "..") == 0) + { + continue; + } + std::string sub_path = dir_path + '/' + dir->d_name; + if (lstat(sub_path.c_str(),&st) == -1) + { + SLOG_ERROR("rm_dir lstat sub_path:"+sub_path); + continue; + } + if (S_ISDIR(st.st_mode)) + { + if (!remove_dir(sub_path)) + { + closedir(dirp); + return false; + } + rmdir(sub_path.c_str()); + } + else if (S_ISREG(st.st_mode)) + { + unlink(sub_path.c_str()); + } + else + { + SLOG_ERROR("rm_dir st_mode sub_path:"+sub_path); + continue; + } + } + if (rmdir(dir_path.c_str()) == -1) + { + closedir(dirp); + return false; + } + closedir(dirp); + return true; +} + +bool Util::remove_file(const std::string file_path) +{ + struct stat st; + if (lstat(file_path.c_str(), &st) == -1 || !S_ISREG(st.st_mode)) + { + return false; + } + if (unlink(file_path.c_str()) == -1) + { + return false; + } + return true; +} + +bool Util::remove_path(const std::string path) +{ + std::string file_path = path; + struct stat st; + if (lstat(file_path.c_str(), &st) == -1) + { + return false; + } + if (S_ISREG(st.st_mode)) + { + if(unlink(file_path.c_str()) == -1) + { + return false; + } + } + else if (S_ISDIR(st.st_mode)) + { + if (path == "." || path == "..") + { + return false; + } + if (!remove_dir(file_path)) + { + return false; + } + } + else + { + SLOG_ERROR("rm_path st_mode error:"+path); + } + return true; +} + long Util::get_cur_time() { @@ -1362,7 +1458,8 @@ Util::getSystemOutput(string cmd) if(ret < 0) { fprintf(stderr, "system call failed:%s\n", cmd.c_str()); - system(cmd.c_str()); + // system(cmd.c_str()); + Util::remove_path(file); return ""; } @@ -1399,7 +1496,8 @@ Util::getSystemOutput(string cmd) //*find = '\0'; //fclose(fp); //} - system(cmd.c_str()); + // system(cmd.c_str()); + Util::remove_path(file); //cerr<<"ans: "< #include #include - - -//Added for __gnu_parallel::sort #include #include -#include "../tools/rapidjson/document.h" -#include "../tools/rapidjson/prettywriter.h" -#include "../tools/rapidjson/writer.h" -#include "../tools/rapidjson/stringbuffer.h" -#include "INIParser.h" -#include "../tools/indicators/progress_bar.hpp" + +// #include "../tools/rapidjson/document.h" +// #include "../tools/rapidjson/prettywriter.h" +// #include "../tools/rapidjson/writer.h" +// #include "../tools/rapidjson/stringbuffer.h" +// #include "../tools/indicators/progress_bar.hpp" + +#include "rapidjson/document.h" +#include "rapidjson/prettywriter.h" +#include "rapidjson/writer.h" +#include "rapidjson/stringbuffer.h" +#include "indicators/progress_bar.hpp" #include "Latch.h" #include "Slog.h" #include "MD5.h" #include "PrettyPrint.h" +#include "INIParser.h" #define thread_num 1 //below is used to control if using the parallelable sort() @@ -393,6 +397,9 @@ class Util static bool create_file(const std::string _file); static size_t count_lines(const std::string _file, unsigned int _mode=0); static unsigned long long count_dir_size(const char* _dir_path); + static bool remove_dir(const std::string dir_path); + static bool remove_file(const std::string file_path); + static bool remove_path(const std::string path); static std::string getTimeName(); static std::string getTimeString(); diff --git a/Util/Util_New.cpp b/src/Util/Util_New.cpp similarity index 95% rename from Util/Util_New.cpp rename to src/Util/Util_New.cpp index 1264b661..7dfcc1bd 100644 --- a/Util/Util_New.cpp +++ b/src/Util/Util_New.cpp @@ -20,7 +20,7 @@ static string Util_New::getCurrentRootPath() char pwd[255]; char* wd; - //也可以将buffer作为输出参数 + //涔熷彲浠ュ皢buffer浣滀负杈撳嚭鍙傛暟 if ((buffer = getcwd(NULL, 0)) == NULL) { perror("getcwd error"); diff --git a/Util/Util_New.h b/src/Util/Util_New.h similarity index 100% rename from Util/Util_New.h rename to src/Util/Util_New.h diff --git a/Util/VList.cpp b/src/Util/VList.cpp similarity index 99% rename from Util/VList.cpp rename to src/Util/VList.cpp index 3fab2dc3..42566e0c 100644 --- a/Util/VList.cpp +++ b/src/Util/VList.cpp @@ -340,7 +340,7 @@ VList::readBstr(char*& _str, unsigned& _len, unsigned* _next) j = 4 - j; fseek(valfp, j, SEEK_CUR); - //NOTICE+DEBUG: I think no need to align here, later no data to read + //NOTICE+DEBUG: I think no need to align here, later no data to read //(if need to read, then fseek again to find a new value) //this->ReadAlign(_next); @@ -376,7 +376,7 @@ VList::writeBstr(const char* _str, unsigned _len, unsigned* _curnum) j = 4 - j; fseek(valfp, j, SEEK_CUR); - //NOTICE+DEBUG: I think no need to align here, later no data to write + //NOTICE+DEBUG: I think no need to align here, later no data to write //(if need to write, then fseek again to write a new value) //this->WriteAlign(_curnum); fseek(valfp, Address(*_curnum), SEEK_SET); diff --git a/Util/VList.h b/src/Util/VList.h similarity index 100% rename from Util/VList.h rename to src/Util/VList.h diff --git a/Util/Version.cpp b/src/Util/Version.cpp similarity index 100% rename from Util/Version.cpp rename to src/Util/Version.cpp diff --git a/Util/Version.h b/src/Util/Version.h similarity index 100% rename from Util/Version.h rename to src/Util/Version.h diff --git a/Util/WebUrl.cpp b/src/Util/WebUrl.cpp similarity index 100% rename from Util/WebUrl.cpp rename to src/Util/WebUrl.cpp diff --git a/Util/WebUrl.h b/src/Util/WebUrl.h similarity index 100% rename from Util/WebUrl.h rename to src/Util/WebUrl.h diff --git a/tests/.gitignore b/tests/.gitignore new file mode 100644 index 00000000..e2ee4346 --- /dev/null +++ b/tests/.gitignore @@ -0,0 +1 @@ +!*.sh \ No newline at end of file diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt new file mode 100644 index 00000000..b3378b32 --- /dev/null +++ b/tests/CMakeLists.txt @@ -0,0 +1,71 @@ +set(LIB_TESTS + # Conan Deps + Boost::system + Boost::regex + Boost::thread + minizip::minizip + OpenSSL::SSL + OpenSSL::Crypto + indicators::indicators + rapidjson + log4cplus::log4cplus + CURL::libcurl + Backward::Backward + libdwarf::libdwarf + libelf::libelf + # System Deps + OpenMP::OpenMP_CXX + ${LIB_JEMALLOC} + ${LIB_READLINE} + Threads::Threads + # Unmanaged Deps + antlr4-runtime + workflow +) + +# set definition if has debug info +if (CMAKE_BUILD_TYPE MATCHES "Deb") + LIST(APPEND LIB_TESTS + libdwarf::libdwarf + libelf::libelf + ) +endif () + +set(OBJ_TESTS + $ + $ + $ + $ + $ + $ + $ + $ + $ +) + +add_executable(update_test update_test.cpp ${OBJ_TESTS}) +target_link_libraries(update_test ${LIB_TESTS}) + +add_executable(dataset_test dataset_test.cpp ${OBJ_TESTS}) +target_link_libraries(dataset_test ${LIB_TESTS}) + +add_executable(transaction_test transaction_test.cpp ${OBJ_TESTS}) +target_link_libraries(transaction_test ${LIB_TESTS}) + +add_executable(run_transaction run_transaction.cpp ${OBJ_TESTS} $) +target_link_libraries(run_transaction ${LIB_TESTS}) + +add_executable(workload workload.cpp ${OBJ_TESTS}) +target_link_libraries(workload ${LIB_TESTS}) + +add_executable(debug_test debug_test.cpp ${OBJ_TESTS}) +target_link_libraries(debug_test ${LIB_TESTS}) + +add_executable(gtest gtest.cpp ${OBJ_TESTS}) +target_link_libraries(gtest ${LIB_TESTS}) + +get_directory_property(gstore_tests DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} BUILDSYSTEM_TARGETS) +foreach (test ${gstore_tests}) + add_dependencies(${test} prepare) + install(TARGETS ${test} DESTINATION ${GSTORE_TEST_DIR}) +endforeach () \ No newline at end of file diff --git a/scripts/PyTest/FormatHelper.py b/tests/PyTest/FormatHelper.py similarity index 100% rename from scripts/PyTest/FormatHelper.py rename to tests/PyTest/FormatHelper.py diff --git a/scripts/PyTest/ResultChecker.py b/tests/PyTest/ResultChecker.py similarity index 100% rename from scripts/PyTest/ResultChecker.py rename to tests/PyTest/ResultChecker.py diff --git a/scripts/PyTest/auto_test.py b/tests/PyTest/auto_test.py similarity index 94% rename from scripts/PyTest/auto_test.py rename to tests/PyTest/auto_test.py index acc3eccd..aff3ce57 100644 --- a/scripts/PyTest/auto_test.py +++ b/tests/PyTest/auto_test.py @@ -39,7 +39,7 @@ def TestCase(db_name: str, case_name: str, query_path: str, def ParseTest(): query_num = 50 - data_dir = "scripts/parser_test/" + data_dir = "data/parser_test/" all_passed = True db_name = "parser_test" os.system("bin/gdrop -db " + db_name + " > /dev/null") @@ -49,6 +49,11 @@ def ParseTest(): query_path = data_dir + "parser_q" + str(i) + ".sql" result_path = data_dir + "parser_r" + str(i) + ".txt" + for _f in data_path, query_path, result_path: + if not os.path.exists(_f): + print("File %s not exists" % _f) + return False + os.system("bin/gbuild -db " + db_name + " -f " + data_path + " > /dev/null") case_passed = TestCase(db_name, cast_name, query_path, result_path) all_passed = all_passed and case_passed @@ -61,7 +66,7 @@ def ParseTest(): def BFSTest(): data_set = ['bbug', 'lubm', 'num'] - data_dir = "scripts/bfs_test/" + data_dir = "data/bfs_test/" all_passed = True db_name_gstore = "bfs_test" os.system("bin/gdrop -db " + db_name_gstore + " > /dev/null") @@ -88,8 +93,8 @@ def BFSTest(): def DFSTest(): data_set = ['bbug', 'lubm', 'num'] - data_dir = "scripts/dfs_test/" - full_data_dir = "scripts/bfs_test/" + data_dir = "data/dfs_test/" + full_data_dir = "data/bfs_test/" all_passed = True db_name_gstore = "dfs_test" os.system("bin/gdrop -db " + db_name_gstore + " > /dev/null") diff --git a/scripts/PyTest/standard_result.py b/tests/PyTest/standard_result.py similarity index 100% rename from scripts/PyTest/standard_result.py rename to tests/PyTest/standard_result.py diff --git a/scripts/basic_test.sh b/tests/basic_test.sh similarity index 57% rename from scripts/basic_test.sh rename to tests/basic_test.sh index 9fa2ac7f..20c6b359 100644 --- a/scripts/basic_test.sh +++ b/tests/basic_test.sh @@ -16,6 +16,20 @@ lubm_ans=(15 227393 0 27 5916 15 0 828 27 27 5916) num_ans=(8 0 4 1) small_ans=(2 2 1 27 1 1 1 4 1 5 5) triple_num=(1988 99550 29 25 31) +#gdrop +gdrop(){ +for i in 0 1 2 3 +do + ${op[4]} -db ${db[$i]} > "1.txt" 2>&1 + "rm" "1.txt" + if test -e ${db_home}${db[$i]}.db + then + echo -e "\033[43;35m drop ${db[$i]}.db fails \033[0m" + exit + fi +done +} +gdrop #gbuild echo "gbuild......" @@ -26,11 +40,10 @@ do "rm" "1.txt" if test -e ${db_home}${db[$i]}.db/success.txt then - continue else echo -e "\033[43;35m build ${db[$i]}.db fails \033[0m" - exit + exit 1 fi done @@ -44,14 +57,16 @@ do ${op[$j]} -db ${db[$i]} -f ${path}${db[$i]}"/"${db[$i]}".nt" > "1.txt" "rm" "1.txt" done - ${op[1]} ${db[$i]} ${path}"all.sql" > "1.txt" + ${op[1]} -db ${db[$i]} -q ${path}"all.sql" | grep -A 10000 "Finish Database \`${db[$i]}\` Load" > "1.txt" ans=$(grep "There has answer" 1.txt) - if [ ${ans:18:${#ans}-18} -ne ${triple_num[$i]} ] + if [[ ${ans:18:${#ans}-18} -ne ${triple_num[$i]} ]] then echo ${ans} echo -e "\033[43;35m update triples in ${db[$i]}.db has errors \033[0m" - "rm" "1.txt" - exit + # "rm" "1.txt" + exit 2 + else + echo "update triples in ${db[$i]}.db ok" fi "rm" "1.txt" done @@ -61,16 +76,18 @@ gquery(){ # bbug for i in 0 1 2 3 4 5 6 7 do - echo "${op[1]} -db ${db[0]} -q ${path}${db[0]}/${db[0]}${bbug_sql[$i]}.sql" - ${op[1]} -db ${db[0]} -q ${path}${db[0]}"/"${db[0]}${bbug_sql[$i]}".sql" > "1.txt" + # echo "${op[1]} -db ${db[0]} -q ${path}${db[0]}/${db[0]}${bbug_sql[$i]}.sql" + ${op[1]} -db ${db[0]} -q ${path}${db[0]}"/"${db[0]}${bbug_sql[$i]}".sql" | grep -A 10000 "Finish Database \`${db[0]}\` Load" > "1.txt" if [ ${bbug_ans[$i]} -ne -1 ] then ans=$(grep "There has answer" 1.txt) - if [ ${ans:18:${#ans}-18} -ne ${bbug_ans[$i]} ] + if [[ ${ans:18:${#ans}-18} -ne ${bbug_ans[$i]} ]] then echo -e "\033[43;35m query ${db[0]}${bbug_sql[$i]}.sql in ${db[0]}.db has errors \033[0m" "rm" "1.txt" - exit + exit 3 + else + echo "query ${db[0]}${bbug_sql[$i]}.sql in ${db[0]}.db ok" fi fi "rm" "1.txt" @@ -79,14 +96,16 @@ done # lubm for i in 0 1 2 3 4 5 6 7 8 9 10 do - echo "${op[1]} -db ${db[1]} -q ${path}${db[1]}/${db[1]}${lubm_sql[$i]}.sql" - ${op[1]} -db ${db[1]} -q ${path}${db[1]}"/"${db[1]}${lubm_sql[$i]}".sql" > "1.txt" + # echo "${op[1]} -db ${db[1]} -q ${path}${db[1]}/${db[1]}${lubm_sql[$i]}.sql" + ${op[1]} -db ${db[1]} -q ${path}${db[1]}"/"${db[1]}${lubm_sql[$i]}".sql" | grep -A 10000 "Finish Database \`${db[1]}\` Load" > "1.txt" ans=$(grep "There has answer" 1.txt) - if [ ${ans:18:${#ans}-18} -ne ${lubm_ans[$i]} ] + if [[ ${ans:18:${#ans}-18} -ne ${lubm_ans[$i]} ]] then echo -e "\033[43;35m query ${db[1]}${lubm_sql[$i]}.sql in ${db[1]}.db has errors \033[0m" "rm" "1.txt" - exit + exit 4 + else + echo "query ${db[1]}${lubm_sql[$i]}.sql in ${db[1]}.db ok" fi "rm" "1.txt" done @@ -94,32 +113,35 @@ done # num for i in 0 1 2 3 do - echo "${op[1]} -db ${db[2]} -q ${path}${db[2]}/${db[2]}${num_sql[$i]}" - ${op[1]} -db ${db[2]} -q ${path}${db[2]}"/"${db[2]}${num_sql[$i]}".sql" > "1.txt" - ans=$(grep "There has answer" 1.txt) - if [ ${ans:18:${#ans}-18} -ne ${num_ans[$i]} ] - then + # echo "${op[1]} -db ${db[2]} -q ${path}${db[2]}/${db[2]}${num_sql[$i]}" + ${op[1]} -db ${db[2]} -q ${path}${db[2]}"/"${db[2]}${num_sql[$i]}".sql" | grep -A 10000 "Finish Database \`${db[2]}\` Load" > "1.txt" + ans=$(grep "There has answer" 1.txt) + if [[ ${ans:18:${#ans}-18} -ne ${num_ans[$i]} ]] + then echo -e "\033[43;35m query ${db[2]}${num_sql[$i]}.sql in ${db[2]}.db has errors \033[0m" "rm" "1.txt" - exit - fi - "rm" "1.txt" + exit 5 + else + echo "query ${db[2]}${num_sql[$i]}.sql in ${db[2]}.db ok" + fi + "rm" "1.txt" done # small for i in 0 1 2 3 4 5 6 7 8 9 10 do - echo "${op[1]} -db ${db[3]} -q ${path}${db[3]}/${db[3]}${small_sql[$i]}" - ${op[1]} -db ${db[3]} -q ${path}${db[3]}"/"${db[3]}${small_sql[$i]}".sql" > "1.txt" - ans=$(grep "There has answer" 1.txt) - if [ ${ans:18:${#ans}-18} -ne ${small_ans[$i]} ] - then + # echo "${op[1]} -db ${db[3]} -q ${path}${db[3]}/${db[3]}${small_sql[$i]}" + ${op[1]} -db ${db[3]} -q ${path}${db[3]}"/"${db[3]}${small_sql[$i]}".sql" | grep -A 10000 "Finish Database \`${db[3]}\` Load" > "1.txt" + ans=$(grep "There has answer" 1.txt) + if [[ ${ans:18:${#ans}-18} -ne ${small_ans[$i]} ]] + then echo -e "\033[43;35m query ${db[3]}${small_sql[$i]}.sql in ${db[3]}.db has errors \033[0m" "rm" "1.txt" - exit - fi - "rm" "1.txt" - + exit 6 + else + echo "query ${db[3]}${small_sql[$i]}.sql in ${db[3]}.db ok" + fi + "rm" "1.txt" done } echo "gquery......" @@ -128,40 +150,33 @@ gquery echo "small gadd......" ${op[2]} -db ${db[3]} -f ${path}${db[3]}"/small_add.nt" > "1.txt" "rm" "1.txt" -${op[1]} -db ${db[3]} -q ${path}"all.sql" > "1.txt" +${op[1]} -db ${db[3]} -q ${path}"all.sql" | grep -A 10000 "Finish Database \`${db[3]}\` Load" > "1.txt" ans=$(grep "There has answer" 1.txt) -if [ ${ans:18:${#ans}-18} -ne ${triple_num[4]} ] +if [[ ${ans:18:${#ans}-18} -ne ${triple_num[4]} ]] then echo -e "\033[43;35m update triples in ${db[3]}.db has errors \033[0m" "rm" "1.txt" - exit + exit 7 +else + echo "add triples in ${db[3]}.db ok" fi "rm" "1.txt" echo "small gsub......" ${op[3]} -db ${db[3]} -f ${path}${db[3]}"/small_add.nt" > "1.txt" "rm" "1.txt" -${op[1]} -db ${db[3]} -q ${path}"all.sql" > "1.txt" +${op[1]} -db ${db[3]} -q ${path}"all.sql" | grep -A 10000 "Finish Database \`${db[3]}\` Load" > "1.txt" ans=$(grep "There has answer" 1.txt) -if [ ${ans:18:${#ans}-18} -ne ${triple_num[3]} ] +if [[ ${ans:18:${#ans}-18} -ne ${triple_num[3]} ]] then - echo -e "\033[43;35m update triples in ${db[3]}.db has errors \033[0m" - "rm" "1.txt" - exit + echo -e "\033[43;35m update triples in ${db[3]}.db has errors \033[0m" + "rm" "1.txt" + exit 8 +else + echo "sub triples in ${db[3]}.db ok" fi "rm" "1.txt" -#gdrop echo "gdrop......" -for i in 0 1 2 3 -do - ${op[4]} -db ${db[$i]} > "1.txt" 2>&1 - "rm" "1.txt" - if test -e ${db_home}${db[$i]}.db - then - echo -e "\033[43;35m drop ${db[$i]}.db fails \033[0m" - exit - fi -done - +gdrop echo "Test passed!" \ No newline at end of file diff --git a/scripts/dataset_test.cpp b/tests/dataset_test.cpp similarity index 94% rename from scripts/dataset_test.cpp rename to tests/dataset_test.cpp index b2a9af65..1a7392c8 100644 --- a/scripts/dataset_test.cpp +++ b/tests/dataset_test.cpp @@ -6,13 +6,18 @@ # Description: used for size test =============================================================================*/ -#include "../Util/Util.h" -#include "../Database/Database.h" +#include "../src/Util/Util.h" +#include "../src/Database/Database.h" using namespace std; int main(int argc, char * argv[]) { + if (argc < 5) + { + cerr << "Usage: " << argv[0] << " " << endl; + return -1; + } Util util; Database* db; string db_name = string(argv[1]); diff --git a/scripts/debug_test.cpp b/tests/debug_test.cpp similarity index 98% rename from scripts/debug_test.cpp rename to tests/debug_test.cpp index 316c0414..9af3ded5 100644 --- a/scripts/debug_test.cpp +++ b/tests/debug_test.cpp @@ -16,7 +16,8 @@ #include #include #include -#include "../Database/Txn_manager.h" + +#include "../src/Database/Txn_manager.h" void preload4bug(vector& adds, vector& subs, const int _nums) { diff --git a/scripts/demo/full.sh b/tests/demo/full.sh similarity index 100% rename from scripts/demo/full.sh rename to tests/demo/full.sh diff --git a/scripts/demo/lubm.sh b/tests/demo/lubm.sh similarity index 100% rename from scripts/demo/lubm.sh rename to tests/demo/lubm.sh diff --git a/scripts/full_test.sh b/tests/full_test.sh similarity index 100% rename from scripts/full_test.sh rename to tests/full_test.sh diff --git a/scripts/gtest.cpp b/tests/gtest.cpp similarity index 99% rename from scripts/gtest.cpp rename to tests/gtest.cpp index dd386f49..8ad23057 100644 --- a/scripts/gtest.cpp +++ b/tests/gtest.cpp @@ -16,8 +16,8 @@ //#include //#include //#include -#include "../Database/Database.h" -#include "../Util/Util.h" +#include "../src/Database/Database.h" +#include "../src/Util/Util.h" using namespace std; diff --git a/scripts/parser_test.sh b/tests/parser_test.sh similarity index 53% rename from scripts/parser_test.sh rename to tests/parser_test.sh index 44806ffe..e69462ba 100644 --- a/scripts/parser_test.sh +++ b/tests/parser_test.sh @@ -1,8 +1,8 @@ #!/bin/bash -query_num=47 +query_num=50 # query_num=1 -data_dir="scripts/parser_test/" +data_dir="data/parser_test/" gbuild='bin/gbuild ' gquery='bin/gquery ' gdrop='bin/gdrop ' @@ -19,27 +19,30 @@ do "grep" "." $result | "sort" > "result_a.txt" - $gbuild -db parser_test -f $data > tmp.txt - $gquery -db parser_test -q $query | grep -A 10000 "final result is :" > tmp.txt + $gbuild -db parser_test -f $data > /dev/null + $gquery -db parser_test -q $query | grep -A 10000 "Finish Database \`parser_test\` Load" > all.txt + grep -A 10000 "final result is :" all.txt > tmp.txt "sed" "-i" "1d" "tmp.txt" "sed" "-i" "\$d" "tmp.txt" # "sed" "-i" "\$d" "tmp.txt" "grep" "." "tmp.txt" | "sort" > "result_b.txt" - "diff" "result_a.txt" "result_b.txt" > "equal.txt" + "diff" -w "result_a.txt" "result_b.txt" > "equal.txt" if [ -s "equal.txt" ]; then - echo "parser test #"$i" failed" + cat "equal.txt" + echo -e "\033[43;35m parser test #"$i" failed \033[0m" all_passed=false + exit 1 # exit with error code else echo "parser test #"$i" passed" fi - $gdrop -db parser_test > tmp.txt - "rm" "result_a.txt" "result_b.txt" "tmp.txt" "equal.txt" + $gdrop -db parser_test > /dev/null + "rm" "result_a.txt" "result_b.txt" "tmp.txt" "equal.txt" "all.txt" done if [ $all_passed = true ]; then echo "All parser test cases passed!" else - echo "Some parser test cases failed!" + echo -e "\033[43;35m Some parser test cases failed! \033[0m" fi diff --git a/scripts/run.sh b/tests/run.sh similarity index 100% rename from scripts/run.sh rename to tests/run.sh diff --git a/scripts/run1.sh b/tests/run1.sh similarity index 100% rename from scripts/run1.sh rename to tests/run1.sh diff --git a/scripts/run2.sh b/tests/run2.sh similarity index 100% rename from scripts/run2.sh rename to tests/run2.sh diff --git a/scripts/run3.sh b/tests/run3.sh similarity index 100% rename from scripts/run3.sh rename to tests/run3.sh diff --git a/scripts/run4.sh b/tests/run4.sh similarity index 100% rename from scripts/run4.sh rename to tests/run4.sh diff --git a/scripts/run_dbpedia.sh b/tests/run_dbpedia.sh similarity index 100% rename from scripts/run_dbpedia.sh rename to tests/run_dbpedia.sh diff --git a/scripts/run_lubm.sh b/tests/run_lubm.sh similarity index 100% rename from scripts/run_lubm.sh rename to tests/run_lubm.sh diff --git a/scripts/run_transaction.cpp b/tests/run_transaction.cpp similarity index 97% rename from scripts/run_transaction.cpp rename to tests/run_transaction.cpp index 12c92358..0ca8a7fc 100644 --- a/scripts/run_transaction.cpp +++ b/tests/run_transaction.cpp @@ -1,6 +1,7 @@ -#include "../api/http/cpp/src/GstoreConnector.h" #include -#include "../Util/Util.h" + +#include "../src/Util/Util.h" +#include "../src/Connector/GstoreConnector.h" using namespace std; using namespace rapidjson; diff --git a/scripts/run_watdiv.sh b/tests/run_watdiv.sh similarity index 100% rename from scripts/run_watdiv.sh rename to tests/run_watdiv.sh diff --git a/scripts/test.py b/tests/test.py similarity index 100% rename from scripts/test.py rename to tests/test.py diff --git a/scripts/transaction_test.cpp b/tests/transaction_test.cpp similarity index 97% rename from scripts/transaction_test.cpp rename to tests/transaction_test.cpp index 9542b64d..563c27a4 100644 --- a/scripts/transaction_test.cpp +++ b/tests/transaction_test.cpp @@ -1,6 +1,7 @@ -锘#include "../Database/Txn_manager.h" -#include "../Util/Util.h" -#include "../Database/Database.h" +#include "../src/Database/Txn_manager.h" +#include "../src/Util/Util.h" +#include "../src/Database/Database.h" + bool do_query(Database* db, string sparql) { int ret_val; diff --git a/scripts/update_test.cpp b/tests/update_test.cpp similarity index 91% rename from scripts/update_test.cpp rename to tests/update_test.cpp index 43aea3af..8e49ee68 100644 --- a/scripts/update_test.cpp +++ b/tests/update_test.cpp @@ -6,8 +6,8 @@ # Description: used to test the correctness of update triples =============================================================================*/ -#include "../Util/Util.h" -#include "../Database/Database.h" +#include "../src/Util/Util.h" +#include "../src/Database/Database.h" using namespace std; @@ -113,6 +113,8 @@ int main(int argc, char * argv[]) int test_value_region = 10; string db_name = "update_test"; string db_path = "data/update_test.nt"; + string _db_home = util.getConfigureValue("db_home"); + string _db_suffix = util.getConfigureValue("db_suffix"); if(argc == 2) { string s = argv[1]; @@ -183,20 +185,20 @@ int main(int argc, char * argv[]) //build database db = new Database(db_name); bool flag = db->build(db_path); + string _db_path = _db_home + "/" + db_name + _db_suffix; if (flag) { - string msg = db_name + ".db is built done."; + string msg = db_name + _db_suffix + " is built done."; cerr << msg << endl; ofstream f; - f.open("./" + db_name + ".db/success.txt"); + f.open(_db_path + "/success.txt"); f.close(); } else //if fails, drop database and return { - string msg = db_name + ".db is built failed."; + string msg = db_name + _db_suffix + " is built failed."; cerr << msg << endl; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); delete db; db = NULL; return 0; @@ -216,6 +218,7 @@ int main(int argc, char * argv[]) update_triples.insert(temp); for (int i = 0; i < test_group_num; i++) { + std::cerr << "test_group " << i << " of " << test_group_num << std::endl; int a = rand() % test_group_size + 1; int b = rand() % test_group_size + 1; for (int j = 0; j < a; j++) @@ -268,8 +271,7 @@ int main(int argc, char * argv[]) cerr << "Update triples exist errors." << endl; delete db; db = NULL; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); return 0; } std::set::iterator it1; @@ -283,17 +285,14 @@ int main(int argc, char * argv[]) cerr << "Update triples exist errors." << endl; delete db; db = NULL; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); return 0; } } } - delete db; db = NULL; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); } else { @@ -342,8 +341,7 @@ int main(int argc, char * argv[]) update = NULL; delete db; db = NULL; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); return 0; } TYPE_TRIPLE_NUM temp_num = triple_num - update_num; @@ -361,8 +359,7 @@ int main(int argc, char * argv[]) update = NULL; delete db; db = NULL; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); return 0; } } @@ -381,8 +378,7 @@ int main(int argc, char * argv[]) update = NULL; delete db; db = NULL; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); return 0; } query = "ASK WHERE{"; @@ -400,8 +396,7 @@ int main(int argc, char * argv[]) update = NULL; delete db; db = NULL; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); return 0; } delete[] update; @@ -409,8 +404,7 @@ int main(int argc, char * argv[]) } delete db; db = NULL; - string cmd = "rm -r " + db_name + ".db"; - system(cmd.c_str()); + util.remove_path(_db_path); } cerr << "Test passed!" << endl; return 0; diff --git a/scripts/workload.cpp b/tests/workload.cpp similarity index 98% rename from scripts/workload.cpp rename to tests/workload.cpp index 96d69ed5..9c476efb 100644 --- a/scripts/workload.cpp +++ b/tests/workload.cpp @@ -9,7 +9,7 @@ #include #include #include -#include "../Database/Txn_manager.h" +#include "../src/Database/Txn_manager.h" using namespace std; @@ -683,8 +683,8 @@ void create_versions(Txn_manager& txn_m) void no_txn_update(Database &db) { - const string insert_filename = "./scripts/insert.nt"; - const string delete_filename = "./scripts/delete.nt"; + const string insert_filename = "./tests/insert.nt"; + const string delete_filename = "./tests/delete.nt"; fstream in; string line, sparql, res; @@ -713,7 +713,7 @@ bool single_txn(int threads_num, Txn_manager& txn_m) txn_id_t TID = txn_m.Begin(static_cast(3)); ifstream in; string line, sparql, res; - in.open("./scripts/insert.nt", ios::in); + in.open("./tests/insert.nt", ios::in); int num = 0; while(getline(in, line)) { @@ -729,7 +729,7 @@ bool single_txn(int threads_num, Txn_manager& txn_m) //if(ret != 0) cerr << "wrong answer!" << endl; } in.close(); - in.open("./scripts/delete.nt", ios::in); + in.open("./tests/delete.nt", ios::in); while(getline(in, line)) { sparql = "delete data{" + line + "}"; @@ -748,7 +748,7 @@ void check_results(int threads_num, Txn_manager& txn_m) { txn_id_t TID = txn_m.Begin(); ifstream in; - in.open("./scripts/insert.nt", ios::in); + in.open("./tests/insert.nt", ios::in); for(int i = 0; i < threads_num; i++) { string line, sparql, res; @@ -764,7 +764,7 @@ void check_results(int threads_num, Txn_manager& txn_m) cout << res << endl; } in.close(); - in.open("./scripts/delete.nt", ios::in); + in.open("./tests/delete.nt", ios::in); for(int i = 0; i < threads_num; i++) { string line, sparql, query, res; @@ -812,7 +812,7 @@ int main(int argc, char* argv[]) Txn_manager txn_m(&_db, string("lubm_1M")); - TID = txn_m.Begin(2); + auto TID = txn_m.Begin(IsolationLevelType::SNAPSHOT); string res; int ret = txn_m.Query(TID, "select ?x where { ?x .}", res); @@ -829,7 +829,7 @@ int main(int argc, char* argv[]) insert data { .} select ?x where { ?x .} */ - return; + return 0; //threads_num = thread::hardware_concurrency()-1; vector pool(threads_num); diff --git a/tools/.gitignore b/tools/.gitignore deleted file mode 100644 index 36cbf1d3..00000000 --- a/tools/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -* -!.gitignore -!libantlr3c-3.4.tar.gz -!rapidjson.tar.gz -!workflow-0.10.3.tar.gz -!log4cplus-2.0.8.tar.gz -!zlib-1.3.tar.gz \ No newline at end of file diff --git a/tools/indicators.tar b/tools/indicators.tar deleted file mode 100644 index 5d74833c..00000000 Binary files a/tools/indicators.tar and /dev/null differ diff --git a/tools/log4cplus-2.0.8.tar.gz b/tools/log4cplus-2.0.8.tar.gz deleted file mode 100644 index 5fbd2252..00000000 Binary files a/tools/log4cplus-2.0.8.tar.gz and /dev/null differ diff --git a/tools/rapidjson.tar.gz b/tools/rapidjson.tar.gz deleted file mode 100644 index a8057c79..00000000 Binary files a/tools/rapidjson.tar.gz and /dev/null differ diff --git a/tools/sparql.tar.gz b/tools/sparql.tar.gz deleted file mode 100644 index 24bb4b04..00000000 Binary files a/tools/sparql.tar.gz and /dev/null differ diff --git a/tools/zlib-1.3.tar.gz b/tools/zlib-1.3.tar.gz deleted file mode 100644 index 193e628d..00000000 Binary files a/tools/zlib-1.3.tar.gz and /dev/null differ