diff --git a/.bazelrc b/.bazelrc index acfc7bdda..0088def11 100644 --- a/.bazelrc +++ b/.bazelrc @@ -15,6 +15,7 @@ # This macro is needed in order for mlmd to build with ZetaSQL which can only # be compiled upon c++17 or higher. build --cxxopt="-std=c++17" +build --host_cxxopt="-std=c++17" # Needed to avoid zetasql proto error. build --protocopt=--experimental_allow_proto3_optional @@ -23,5 +24,31 @@ build --protocopt=--experimental_allow_proto3_optional # parameter 'user_link_flags' is deprecated and will be removed soon. # It may be temporarily re-enabled by setting --incompatible_require_linker_input_cc_api=false build --incompatible_require_linker_input_cc_api=false + build:macos --apple_platform_type=macos build:macos_arm64 --cpu=darwin_arm64 +build:macos_arm64 --linkopt=-Wl,-undefined,dynamic_lookup +build:macos_arm64 --host_linkopt=-Wl,-undefined,dynamic_lookup +build --conlyopt=-std=c11 +build --host_conlyopt=-std=c11 +build --cxxopt=-std=c++17 +build --host_cxxopt=-std=c++17 +build --copt=-Wno-error +build --cxxopt=-Wno-error +build --cxxopt=-fpermissive +build --conlyopt=-Wno-array-parameter +build --conlyopt=-Wno-implicit-function-declaration +build --host_copt=-Wno-error +build --host_cxxopt=-Wno-error +build --host_cxxopt=-fpermissive +build --host_conlyopt=-Wno-array-parameter +build --host_conlyopt=-Wno-implicit-function-declaration +# Linux-only linker flags (not supported on macOS) +build:linux --linkopt=-Wl,--no-as-needed +build:linux --host_linkopt=-Wl,--no-as-needed +build:macos_arm64 --copt=-Wno-error=c23-extensions +build:macos_arm64 --host_copt=-Wno-error=c23-extensions +build:macos_arm64 --copt=-Wno-c23-extensions +build:macos_arm64 --host_copt=-Wno-c23-extensions +build:macos_arm64 --action_env=CONDA_PREFIX +build:macos_arm64 --action_env=CMAKE_ICONV_FLAG="-DCMAKE_DISABLE_FIND_PACKAGE_Iconv=ON -DICONV_LIBRARIES=$CONDA_PREFIX/lib/libiconv.dylib -DICONV_INCLUDE_DIR=$CONDA_PREFIX/include" diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index e68283ecc..000000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Build ml-metadata - -on: - push: - branches: - - master - pull_request: - branches: - - master - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.9", "3.10", "3.11"] - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Build ml-metadata - id: build-ml-metadata - uses: ./.github/reusable-build - with: - python-version: ${{ matrix.python-version }} - - upload_to_pypi: - name: Upload to PyPI - runs-on: ubuntu-latest - if: (github.event_name == 'release' && startsWith(github.ref, 'refs/tags')) || (github.event_name == 'workflow_dispatch') - needs: [build] - environment: - name: pypi - url: https://pypi.org/p/ml-metadata/ - permissions: - id-token: write - steps: - - name: Retrieve wheels - uses: actions/download-artifact@v4.1.8 - with: - merge-multiple: true - path: wheels - - - name: List the build artifacts - run: | - ls -lAs wheels/ - - - name: Upload to PyPI - uses: pypa/gh-action-pypi-publish@release/v1.9 - with: - packages_dir: wheels/ diff --git a/.github/workflows/conda-build.yml b/.github/workflows/conda-build.yml new file mode 100644 index 000000000..3efd7fd84 --- /dev/null +++ b/.github/workflows/conda-build.yml @@ -0,0 +1,97 @@ +name: Build ml-metadata with Conda + +on: + push: + branches: + - master + pull_request: + branches: + - master + workflow_dispatch: + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + python-version: ["3.9", "3.10", "3.11", "3.13"] + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Micromamba + uses: mamba-org/setup-micromamba@v1 + with: + environment-file: ${{ matrix.os == 'macos-latest' && 'ci/environment-macos.yml' || 'ci/environment.yml' }} + cache-environment: true + create-args: >- + python=${{ matrix.python-version }} + + - name: Display environment info + shell: bash -l {0} + run: | + micromamba info + micromamba list + + - name: Install Bazel + shell: bash -l {0} + run: | + # Install Bazelisk (manages Bazel versions) + if [ "$RUNNER_OS" == "Linux" ]; then + curl -Lo /tmp/bazelisk https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-amd64 + elif [ "$RUNNER_OS" == "macOS" ]; then + curl -Lo /tmp/bazelisk https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-darwin-amd64 + fi + chmod +x /tmp/bazelisk + sudo mv /tmp/bazelisk /usr/local/bin/bazel + echo "USE_BAZEL_VERSION=6.1.0" >> $GITHUB_ENV + bazel --version + + - name: Build the package + shell: bash -l {0} + run: | + python setup.py bdist_wheel + + - name: Repair wheel (Linux) + if: runner.os == 'Linux' + shell: bash -l {0} + run: | + WHEEL_PATH="$(ls dist/*.whl)" + WHEEL_DIR=$(dirname "${WHEEL_PATH}") + auditwheel repair --plat manylinux2014_x86_64 -w "${WHEEL_DIR}" "${WHEEL_PATH}" + rm "${WHEEL_PATH}" + + - name: Upload wheel artifact + uses: actions/upload-artifact@v4.4.0 + with: + name: ml-metadata-wheel-${{ matrix.os }}-py${{ matrix.python-version }} + path: dist/*.whl + + upload_to_pypi: + name: Upload to PyPI + runs-on: ubuntu-latest + if: (github.event_name == 'release' && startsWith(github.ref, 'refs/tags')) || (github.event_name == 'workflow_dispatch') + needs: [build] + environment: + name: pypi + url: https://pypi.org/p/ml-metadata/ + permissions: + id-token: write + steps: + - name: Retrieve wheels + uses: actions/download-artifact@v4.1.8 + with: + merge-multiple: true + path: wheels + + - name: List the build artifacts + run: | + ls -lAs wheels/ + + - name: Upload to PyPI + uses: pypa/gh-action-pypi-publish@release/v1.9 + with: + packages_dir: wheels/ diff --git a/.github/workflows/conda-test.yml b/.github/workflows/conda-test.yml new file mode 100644 index 000000000..86facba63 --- /dev/null +++ b/.github/workflows/conda-test.yml @@ -0,0 +1,76 @@ +name: Test ml-metadata with Conda + +on: + push: + branches: + - master + pull_request: + branches: + - master + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + python-version: ["3.9", "3.10", "3.11", "3.13"] + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Micromamba + uses: mamba-org/setup-micromamba@v1 + with: + environment-file: ${{ matrix.os == 'macos-latest' && 'ci/environment-macos.yml' || 'ci/environment.yml' }} + cache-environment: true + create-args: >- + python=${{ matrix.python-version }} + + - name: Display environment info + shell: bash -l {0} + run: | + micromamba info + micromamba list + + - name: Install Bazel + shell: bash -l {0} + run: | + # Install Bazelisk (manages Bazel versions) + if [ "$RUNNER_OS" == "Linux" ]; then + curl -Lo /tmp/bazelisk https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-amd64 + elif [ "$RUNNER_OS" == "macOS" ]; then + curl -Lo /tmp/bazelisk https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-darwin-amd64 + fi + chmod +x /tmp/bazelisk + sudo mv /tmp/bazelisk /usr/local/bin/bazel + echo "USE_BAZEL_VERSION=6.1.0" >> $GITHUB_ENV + bazel --version + + - name: Build the package + shell: bash -l {0} + run: | + python setup.py bdist_wheel + + - name: Install built wheel (Linux/macOS) + if: runner.os != 'Windows' + shell: bash -l {0} + run: | + pip install dist/*.whl + + - name: Install built wheel (Windows) + if: runner.os == 'Windows' + shell: bash -l {0} + run: | + pip install (Get-ChildItem dist/*.whl | Select-Object -First 1).FullName + + - name: Run tests + shell: bash -l {0} + run: | + # cleanup (interferes with tests) + rm -rf bazel-* + # run tests + pytest -vv diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 5d7a58420..000000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Test ml-metadata - -on: - push: - branches: - - master - pull_request: - branches: - - master - workflow_dispatch: - -jobs: - test: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.9", "3.10", "3.11"] - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Build ml-metadata - id: build-ml-metadata - uses: ./.github/reusable-build - with: - python-version: ${{ matrix.python-version }} - - - name: Test - run: | - # cleanup (interferes with tests) - rm -rf bazel-* - # run tests - pytest -vv diff --git a/WORKSPACE b/WORKSPACE index 246515185..939998db7 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -24,18 +24,13 @@ http_archive( sha256 = "5308fc1d8865406a49427ba24a9ab53087f17f5266a7aabbfc28823f3916e1ca", ) -# Install version 0.9.0 of rules_foreign_cc, as default version causes an -# invalid escape sequence error to be raised, which can't be avoided with -# the --incompatible_restrict_string_escapes=false flag (flag was removed in -# Bazel 5.0). -RULES_FOREIGN_CC_VERSION = "0.9.0" +# Install version 0.12.0 of rules_foreign_cc +RULES_FOREIGN_CC_VERSION = "0.12.0" http_archive( name = "rules_foreign_cc", - sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51", + sha256 = "a2e6fb56e649c1ee79703e99aa0c9d13c6cc53c8d7a0cbb8797ab2888bbc99a3", strip_prefix = "rules_foreign_cc-%s" % RULES_FOREIGN_CC_VERSION, url = "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/%s.tar.gz" % RULES_FOREIGN_CC_VERSION, - patch_tool = "patch", - patches = ["//ml_metadata/third_party:rules_foreign_cc.patch",], ) load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies") @@ -43,9 +38,9 @@ rules_foreign_cc_dependencies() http_archive( name = "com_google_absl", - urls = ["https://github.com/abseil/abseil-cpp/archive/940c06c25d2953f44310b68eb8aab6114dba11fb.zip"], - strip_prefix = "abseil-cpp-940c06c25d2953f44310b68eb8aab6114dba11fb", - sha256 = "0e800799aa64d0b4d354f3ff317bbd5fbf42f3a522ab0456bb749fc8d3b67415", + urls = ["https://github.com/abseil/abseil-cpp/archive/refs/tags/20230802.1.tar.gz"], + strip_prefix = "abseil-cpp-20230802.1", + sha256 = "987ce98f02eefbaf930d6e38ab16aa05737234d7afbab2d5c4ea7adbe50c28ed", ) http_archive( @@ -60,18 +55,18 @@ http_archive( http_archive( name = "org_sqlite", build_file = clean_dep("//ml_metadata/third_party:sqlite.BUILD"), - sha256 = "87775784f8b22d0d0f1d7811870d39feaa7896319c7c20b849a4181c5a50609b", - strip_prefix = "sqlite-amalgamation-3390200", + sha256 = "aa73d8748095808471deaa8e6f34aa700e37f2f787f4425744f53fdd15a89c40", + strip_prefix = "sqlite-amalgamation-3470200", urls = [ - "https://www.sqlite.org/2022/sqlite-amalgamation-3390200.zip", + "https://www.sqlite.org/2024/sqlite-amalgamation-3470200.zip", ], ) http_archive( name = "com_google_googletest", - sha256 = "81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2", - strip_prefix = "googletest-release-1.12.1", - urls = ["https://github.com/google/googletest/archive/refs/tags/release-1.12.1.tar.gz"], + sha256 = "7b42dc4b2106035276f8f0a5019c929a77d9c606ab43b8e0e1c4b7cc27c8e5ce", + strip_prefix = "googletest-release-1.15.2", + urls = ["https://github.com/google/googletest/archive/refs/tags/release-1.15.2.tar.gz"], ) http_archive( @@ -84,13 +79,13 @@ http_archive( sha256 = "6281aa4eeecb9e932d7091f99872e7b26fa6aacece49c15ce5b14af2b7ec050f", ) -# 1.5.0 +# 1.7.1 http_archive( name = "bazel_skylib", - sha256 = "cd55a062e763b9349921f0f5db8c3933288dc8ba4f76dd9416aac68acee3cb94", + sha256 = "bc283cdfcd526a52c3201279cda4bc298652efa898b10b4db0837dc51652756f", urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.5.0/bazel-skylib-1.5.0.tar.gz", - "https://github.com/bazelbuild/bazel-skylib/releases/download/1.5.0/bazel-skylib-1.5.0.tar.gz", + "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.7.1/bazel-skylib-1.7.1.tar.gz", + "https://github.com/bazelbuild/bazel-skylib/releases/download/1.7.1/bazel-skylib-1.7.1.tar.gz", ], ) @@ -117,13 +112,26 @@ load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") protobuf_deps() +# Override upb from protobuf_deps() to apply our patch +http_archive( + name = "upb", + sha256 = "017a7e8e4e842d01dba5dc8aa316323eee080cd1b75986a7d1f94d87220e6502", + strip_prefix = "upb-e4635f223e7d36dfbea3b722a4ca4807a7e882e2", + urls = [ + "https://storage.googleapis.com/grpc-bazel-mirror/github.com/protocolbuffers/upb/archive/e4635f223e7d36dfbea3b722a4ca4807a7e882e2.tar.gz", + "https://github.com/protocolbuffers/upb/archive/e4635f223e7d36dfbea3b722a4ca4807a7e882e2.tar.gz", + ], + patches = ["//ml_metadata/third_party:upb.patch"], + patch_args = ["-p0"], +) + # Needed by Protobuf. http_archive( name = "zlib", build_file = "@com_google_protobuf//:third_party/zlib.BUILD", - sha256 = "d8688496ea40fb61787500e863cc63c9afcbc524468cedeb478068924eb54932", - strip_prefix = "zlib-1.2.12", - urls = ["https://github.com/madler/zlib/archive/v1.2.12.tar.gz"], + sha256 = "17e88863f3600672ab49182f217281b6fc4d3c762bde361935e436a95214d05c", + strip_prefix = "zlib-1.3.1", + urls = ["https://github.com/madler/zlib/archive/v1.3.1.tar.gz"], ) http_archive( @@ -136,9 +144,10 @@ http_archive( http_archive( name = "pybind11", urls = [ - "https://github.com/pybind/pybind11/archive/v2.10.1.tar.gz", + "https://github.com/pybind/pybind11/archive/v2.13.6.tar.gz", ], - strip_prefix = "pybind11-2.10.1", + sha256 = "e08cb87f4773da97fa7b5f035de8763abc656d87d5773e62f6da0587d1f0ec20", + strip_prefix = "pybind11-2.13.6", build_file = "@pybind11_bazel//:pybind11.BUILD", ) @@ -247,33 +256,7 @@ http_archive( url = "https://github.com/gflags/gflags/archive/a738fdf9338412f83ab3f26f31ac11ed3f3ec4bd.zip", ) -ZETASQL_COMMIT = "ac37cf5c0d80b5605176fc0f29e87b12f00be693" # 08/10/2022 -http_archive( - name = "com_google_zetasql", - urls = ["https://github.com/google/zetasql/archive/%s.zip" % ZETASQL_COMMIT], - strip_prefix = "zetasql-%s" % ZETASQL_COMMIT, - #patches = ["//ml_metadata/third_party:zetasql.patch"], - sha256 = '651a768cd51627f58aa6de7039aba9ddab22f4b0450521169800555269447840' -) - -load("@com_google_zetasql//bazel:zetasql_deps_step_1.bzl", "zetasql_deps_step_1") -zetasql_deps_step_1() -load("@com_google_zetasql//bazel:zetasql_deps_step_2.bzl", "zetasql_deps_step_2") -zetasql_deps_step_2( - analyzer_deps = True, - evaluator_deps = True, - tools_deps = False, - java_deps = False, - testing_deps = False) - -# This is part of what zetasql_deps_step_3() does. -load("@com_google_googleapis//:repository_rules.bzl", "switched_rules_by_language") -switched_rules_by_language( - name = "com_google_googleapis_imports", - cc = True, -) - - +# ZetaSQL removed - not needed for core functionality # Please add all new ML Metadata dependencies in workspace.bzl. load("//ml_metadata:workspace.bzl", "ml_metadata_workspace") diff --git a/ci/environment-macos.yml b/ci/environment-macos.yml new file mode 100644 index 000000000..39d363994 --- /dev/null +++ b/ci/environment-macos.yml @@ -0,0 +1,22 @@ +# Conda environment for building and testing ml-metadata on macOS +name: mlmd-dev +channels: + - conda-forge +dependencies: + # Note: Bazel is installed separately via official installer (conda package is unreliable) + - setuptools + - wheel + - pip + - numpy + - pytest + - pytest-cov + - cmake=3.29 # Required for building libmysqlclient (CMake 4.x is incompatible) + - six # Required by ZetaSQL build tools + - libiconv # Required by libmysqlclient for character encoding conversions + + # C/C++ compilers + - clang + - clangxx + + - pip: + - delocate # For macOS wheel repair diff --git a/ci/environment.yml b/ci/environment.yml new file mode 100644 index 000000000..7509c12c6 --- /dev/null +++ b/ci/environment.yml @@ -0,0 +1,22 @@ +# Conda environment for building and testing ml-metadata on Linux +name: mlmd-dev +channels: + - conda-forge +dependencies: + # Note: Bazel is installed separately via official installer (conda package is unreliable) + - setuptools + - wheel + - pip + - numpy + - pytest + - pytest-cov + - patchelf # For wheel repair on Linux + - cmake=3.29 + + # C/C++ compilers - GCC 8.x to match manylinux2014 devtoolset-8 + - gcc_linux-64 + - gxx_linux-64 + - sysroot_linux-64=2.17 # CentOS 7/manylinux2014 compatible glibc headers + + - pip: + - auditwheel # For manylinux wheel compliance diff --git a/mkdocs.yml b/mkdocs.yml index f9a4ba2c0..d11bfb0f8 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -68,8 +68,8 @@ plugins: - "!_test$" extensions: - griffe_inherited_docstrings - import: - - https://docs.python.org/3/objects.inv + import: + - https://docs.python.org/3/objects.inv - caption: figure: ignore_alt: true diff --git a/ml_metadata/libmysqlclient.BUILD b/ml_metadata/libmysqlclient.BUILD index 6b2b00d8c..73cc65646 100644 --- a/ml_metadata/libmysqlclient.BUILD +++ b/ml_metadata/libmysqlclient.BUILD @@ -39,6 +39,12 @@ config_setting( visibility = ["//visibility:public"], ) +config_setting( + name = "darwin_arm64", + values = {"cpu": "darwin_arm64"}, + visibility = ["//visibility:public"], +) + cc_library( name = "libmysqlclient", srcs = configure_out_srcs + [ @@ -91,7 +97,19 @@ cc_library( "-DLIBICONV_PLUG", "-DHAVE_OPENSSL", "-DHAVE_TLS", - ], + # Fix implicit function declarations + "-D_GNU_SOURCE", + ] + select({ + # On Linux/GCC 13 / Ubuntu 24.04: define legacy MySQL types using stdint.h types + # On macOS: these types are already correct size, don't redefine to avoid conflicts + ":darwin": [], + ":darwin_arm64": [], + "//conditions:default": [ + "-Duint=uint32_t", + "-Dushort=uint16_t", + "-Dulong=uint64_t", + ], + }), includes = [ "build/include/", "include/", @@ -103,6 +121,7 @@ cc_library( "-lm", ] + select({ ":darwin": ["-liconv"], + ":darwin_arm64": ["-liconv"], "//conditions:default": [], }), visibility = ["//visibility:public"], diff --git a/ml_metadata/metadata_store/BUILD b/ml_metadata/metadata_store/BUILD index 84fdef837..1ed7d6bfb 100644 --- a/ml_metadata/metadata_store/BUILD +++ b/ml_metadata/metadata_store/BUILD @@ -271,8 +271,9 @@ cc_library( "@com_google_absl//absl/strings", "//ml_metadata/proto:metadata_source_proto", "//ml_metadata/proto:metadata_store_proto", - "//ml_metadata/query:filter_query_ast_resolver", - "//ml_metadata/query:filter_query_builder", + # ZetaSQL filter query dependencies removed + # "//ml_metadata/query:filter_query_ast_resolver", + # "//ml_metadata/query:filter_query_builder", "//ml_metadata/util:return_utils", "//ml_metadata/util:struct_utils", "@com_google_glog//:glog", @@ -999,8 +1000,9 @@ cc_library( "@com_google_absl//absl/strings", "//ml_metadata/proto:metadata_source_proto", "//ml_metadata/proto:metadata_store_proto", - "//ml_metadata/query:filter_query_ast_resolver", - "//ml_metadata/query:filter_query_builder", + # ZetaSQL filter query dependencies removed + # "//ml_metadata/query:filter_query_ast_resolver", + # "//ml_metadata/query:filter_query_builder", "//ml_metadata/util:return_utils", "//ml_metadata/util:struct_utils", "@com_google_glog//:glog", diff --git a/ml_metadata/metadata_store/metadata_store_test.py b/ml_metadata/metadata_store/metadata_store_test.py index 85cf8f78a..920066f36 100644 --- a/ml_metadata/metadata_store/metadata_store_test.py +++ b/ml_metadata/metadata_store/metadata_store_test.py @@ -1781,51 +1781,6 @@ def test_get_contexts_by_order_by_field(self): for i in range(103): self.assertEqual(got_contexts[i].id, context_ids[199 - i]) - @parameterized.parameters( - (_create_example_artifact_type, mlmd.MetadataStore.put_artifact_type, - metadata_store_pb2.Artifact, mlmd.MetadataStore.put_artifacts, - mlmd.MetadataStore.get_artifacts), - (_create_example_execution_type, mlmd.MetadataStore.put_execution_type, - metadata_store_pb2.Execution, mlmd.MetadataStore.put_executions, - mlmd.MetadataStore.get_executions), - (_create_example_context_type, mlmd.MetadataStore.put_context_type, - metadata_store_pb2.Context, mlmd.MetadataStore.put_contexts, - mlmd.MetadataStore.get_contexts)) - def test_get_nodes_by_filter_query(self, create_type_fn, put_type_fn, - node_cls, put_nodes_fn, get_nodes_fn): - store = _get_metadata_store(self.cli_args) - node_type = create_type_fn(self._get_test_type_name()) - type_id = put_type_fn(store, node_type) - - nodes = [] - for i in range(200): - nodes.append(node_cls(name=f"node_{i}", type_id=type_id)) - nodes[i].custom_properties["p"].int_value = i - node_ids = put_nodes_fn(store, nodes) - - got_nodes = get_nodes_fn( - store, - list_options=mlmd.ListOptions( - order_by=mlmd.OrderByField.ID, - is_asc=True, - filter_query=("custom_properties.p.int_value < 21 AND " - "name LIKE 'node_2%'") - )) - self.assertLen(got_nodes, 2) - self.assertEqual(got_nodes[0].id, node_ids[2]) - self.assertEqual(got_nodes[0].name, "node_2") - self.assertEqual(got_nodes[1].id, node_ids[20]) - self.assertEqual(got_nodes[1].name, "node_20") - - @parameterized.parameters((mlmd.MetadataStore.get_artifacts), - (mlmd.MetadataStore.get_executions), - (mlmd.MetadataStore.get_contexts)) - def test_get_nodes_by_filter_query_syntax_errors(self, get_nodes_fn): - store = _get_metadata_store(self.cli_args) - with self.assertRaises(errors.InvalidArgumentError): - _ = get_nodes_fn( - store, list_options=mlmd.ListOptions(filter_query="invalid syntax")) - def test_put_contexts_get_context_by_type_and_name(self): # Prepare test data. store = _get_metadata_store(self.cli_args) @@ -1947,420 +1902,6 @@ def test_update_context_get_context(self): [context_result] = store.get_contexts_by_id([context_id_3]) self.assertEqual(context_result.type_id, type_id) - def test_put_lineage_subgraph_get_lineage_subgraph(self): - store = _get_metadata_store(self.cli_args) - execution_type = _create_example_execution_type(self._get_test_type_name()) - execution_type_id = store.put_execution_type(execution_type) - artifact_type = _create_example_artifact_type(self._get_test_type_name()) - artifact_type_id = store.put_artifact_type(artifact_type) - context_type = _create_example_context_type(self._get_test_type_name()) - context_type_id = store.put_context_type(context_type) - - existing_context = metadata_store_pb2.Context( - type_id=context_type_id, name="existing_context") - [existing_context_id] = store.put_contexts([existing_context]) - new_context = metadata_store_pb2.Context( - type_id=context_type_id, name="new_context") - request_contexts = [existing_context, new_context] - - existing_execution = metadata_store_pb2.Execution( - type_id=execution_type_id, name="existing_execution") - [existing_execution_id] = store.put_executions([existing_execution]) - existing_execution.id = existing_execution_id - new_execution = metadata_store_pb2.Execution( - type_id=execution_type_id, name="new_execution") - request_executions = [existing_execution, new_execution] - - input_artifact = metadata_store_pb2.Artifact( - type_id=artifact_type_id, uri="testuri") - [input_artifact_id] = store.put_artifacts([input_artifact]) - input_artifact.id = input_artifact_id - output_artifact = metadata_store_pb2.Artifact( - type_id=artifact_type_id, uri="output_artifact") - request_artifacts = [input_artifact, output_artifact] - - input_event_for_existing_execution = metadata_store_pb2.Event( - type=metadata_store_pb2.Event.INPUT, - execution_id=existing_execution_id, - artifact_id=input_artifact_id) - input_event_for_new_execution = metadata_store_pb2.Event( - type=metadata_store_pb2.Event.INPUT, artifact_id=input_artifact_id) - output_event_for_existing_execution = metadata_store_pb2.Event( - type=metadata_store_pb2.Event.OUTPUT, - execution_id=existing_execution_id) - output_event_for_new_execution = metadata_store_pb2.Event( - type=metadata_store_pb2.Event.OUTPUT) - request_event_edges = [(0, 0, input_event_for_existing_execution), - (None, 1, output_event_for_existing_execution), - (1, None, input_event_for_new_execution), - (1, 1, output_event_for_new_execution)] - - # Request should fail since existing_context already inserted - with self.assertRaises(errors.AlreadyExistsError): - store.put_lineage_subgraph(request_executions, request_artifacts, - request_contexts, request_event_edges) - - # Request should succeed with `reuse_context_if_already_exist` set - execution_ids, artifact_ids, context_ids = store.put_lineage_subgraph( - request_executions, - request_artifacts, - request_contexts, - request_event_edges, - reuse_context_if_already_exist=True) - for execution, execution_id in zip(request_executions, execution_ids): - execution.id = execution_id - for artifact, artifact_id in zip(request_artifacts, artifact_ids): - artifact.id = artifact_id - for context, context_id in zip(request_contexts, context_ids): - context.id = context_id - - # Verify inserted items - self.assertLen(execution_ids, 2) - self.assertEqual(execution_ids[0], existing_execution_id) - self.assertLen(artifact_ids, 2) - self.assertEqual(artifact_ids[0], input_artifact_id) - self.assertLen(context_ids, 2) - self.assertEqual(context_ids[0], existing_context_id) - - get_contexts_results = store.get_contexts_by_type( - type_name=context_type.name) - self.assertLen(get_contexts_results, 2) - get_contexts_results = { - context.id: context for context in get_contexts_results - } - self.assertIn(existing_context.id, get_contexts_results) - self.assertEqual(get_contexts_results[existing_context.id].name, - existing_context.name) - self.assertEqual(get_contexts_results[existing_context.id].type_id, - existing_context.type_id) - self.assertIn(new_context.id, get_contexts_results) - self.assertEqual(get_contexts_results[new_context.id].name, - new_context.name) - self.assertEqual(get_contexts_results[new_context.id].type_id, - new_context.type_id) - - get_artifacts_by_existing_context_result = store.get_artifacts_by_context( - existing_context.id) - get_artifacts_by_new_context_result = store.get_artifacts_by_context( - new_context.id) - self.assertEqual(get_artifacts_by_existing_context_result, - get_artifacts_by_new_context_result) - self.assertLen(get_artifacts_by_new_context_result, 2) - get_artifacts_result = { - artifact.id: artifact - for artifact in get_artifacts_by_new_context_result - } - self.assertIn(artifact_ids[0], get_artifacts_result) - self.assertEqual(get_artifacts_result[artifact_ids[0]].type_id, - input_artifact.type_id) - self.assertEqual(get_artifacts_result[artifact_ids[0]].uri, - input_artifact.uri) - self.assertIn(artifact_ids[1], get_artifacts_result) - self.assertEqual(get_artifacts_result[artifact_ids[1]].type_id, - output_artifact.type_id) - self.assertEqual(get_artifacts_result[artifact_ids[1]].uri, - output_artifact.uri) - - get_executions_by_existing_context_result = store.get_executions_by_context( - existing_context.id) - get_executions_by_new_context_result = store.get_executions_by_context( - new_context.id) - self.assertEqual(get_executions_by_existing_context_result, - get_executions_by_new_context_result) - self.assertLen(get_executions_by_new_context_result, 2) - get_executions_result = { - execution.id: execution - for execution in get_executions_by_new_context_result - } - self.assertIn(execution_ids[0], get_executions_result) - self.assertEqual(get_executions_result[execution_ids[0]].type_id, - existing_execution.type_id) - self.assertEqual(get_executions_result[execution_ids[0]].name, - existing_execution.name) - self.assertIn(execution_ids[1], get_executions_result) - self.assertEqual(get_executions_result[execution_ids[1]].type_id, - new_execution.type_id) - self.assertEqual(get_executions_result[execution_ids[1]].name, - new_execution.name) - - get_events_result = store.get_events_by_execution_ids(execution_ids) - self.assertLen(get_events_result, 4) - get_events_result = {(event.execution_id, event.artifact_id): event - for event in get_events_result} - input_event_for_existing_execution_key = (existing_execution.id, - input_artifact.id) - self.assertIn(input_event_for_existing_execution_key, get_events_result) - self.assertEqual( - get_events_result[input_event_for_existing_execution_key].type, - input_event_for_existing_execution.type) - output_event_for_existing_execution_key = (new_execution.id, - output_artifact.id) - self.assertIn(output_event_for_existing_execution_key, get_events_result) - self.assertEqual( - get_events_result[output_event_for_existing_execution_key].type, - output_event_for_existing_execution.type) - input_event_for_new_execution_key = (existing_execution.id, - input_artifact.id) - self.assertIn(input_event_for_new_execution_key, get_events_result) - self.assertEqual(get_events_result[input_event_for_new_execution_key].type, - input_event_for_new_execution.type) - output_event_for_new_execution_key = (new_execution.id, output_artifact.id) - self.assertIn(output_event_for_new_execution_key, get_events_result) - self.assertEqual(get_events_result[output_event_for_new_execution_key].type, - output_event_for_new_execution.type) - - # Test get_lineage_subgraph() with max_num_hops = 10 and field mask paths = - # ["events", "associations", "attributions"], the whole lineage subgraph - # skeleton will be returned. - query_options = metadata_store_pb2.LineageSubgraphQueryOptions( - starting_artifacts=metadata_store_pb2.LineageSubgraphQueryOptions.StartingNodes( - filter_query="uri = 'output_artifact'" - ), - max_num_hops=10, - ) - - subgraph_skeleton = store.get_lineage_subgraph( - query_options, ["events", "associations", "attributions"] - ) - self.assertEmpty(subgraph_skeleton.artifacts) - self.assertEmpty(subgraph_skeleton.executions) - self.assertEmpty(subgraph_skeleton.contexts) - self.assertEmpty(subgraph_skeleton.artifact_types) - self.assertEmpty(subgraph_skeleton.execution_types) - self.assertEmpty(subgraph_skeleton.context_types) - self.assertLen(subgraph_skeleton.events, 4) - self.assertLen(subgraph_skeleton.associations, 4) - self.assertLen(subgraph_skeleton.attributions, 4) - - # Test get_lineage_subgraph() with max_num_hops = 10 and an empty - # field_mask_paths list, the whole lineage subgraph with node details will - # be returned. - subgraph = store.get_lineage_subgraph(query_options) - self.assertLen(subgraph.artifacts, 2) - self.assertSameElements( - [subgraph.artifacts[0].uri, subgraph.artifacts[1].uri], - [input_artifact.uri, output_artifact.uri], - ) - self.assertLen(subgraph.executions, 2) - self.assertSameElements( - [subgraph.executions[0].name, subgraph.executions[1].name], - [existing_execution.name, new_execution.name], - ) - self.assertLen(subgraph.contexts, 2) - self.assertSameElements( - [subgraph.contexts[0].name, subgraph.contexts[1].name], - [existing_context.name, new_context.name], - ) - self.assertLen(subgraph.artifact_types, 1) - self.assertSameElements( - [subgraph.artifact_types[0].name], [artifact_type.name] - ) - self.assertLen(subgraph.execution_types, 1) - self.assertSameElements( - [subgraph.execution_types[0].name], [execution_type.name] - ) - self.assertLen(subgraph.context_types, 1) - self.assertSameElements( - [subgraph.context_types[0].name], [context_type.name] - ) - self.assertLen(subgraph_skeleton.events, 4) - self.assertLen(subgraph_skeleton.associations, 4) - self.assertLen(subgraph_skeleton.attributions, 4) - - # Test get_lineage_subgraph() with max_num_hops = 0 from starting executions - # filtered by context name. All the executions will be returned. - query_options = metadata_store_pb2.LineageSubgraphQueryOptions( - starting_executions=metadata_store_pb2.LineageSubgraphQueryOptions.StartingNodes( - filter_query="contexts_a.name='existing_context'" - ), - max_num_hops=0, - ) - subgraph = store.get_lineage_subgraph(query_options) - self.assertEmpty(subgraph.artifacts) - self.assertLen(subgraph.executions, 2) - self.assertSameElements( - [subgraph.executions[0].name, subgraph.executions[1].name], - [existing_execution.name, new_execution.name], - ) - self.assertLen(subgraph.contexts, 2) - self.assertSameElements( - [subgraph.contexts[0].name, subgraph.contexts[1].name], - [existing_context.name, new_context.name], - ) - self.assertEmpty(subgraph.artifact_types) - self.assertLen(subgraph.execution_types, 1) - self.assertLen(subgraph.context_types, 1) - self.assertEmpty(subgraph.events) - self.assertLen(subgraph.associations, 4) - self.assertEmpty(subgraph.attributions) - - # Test get_lineage_subgraph() with various field mask paths. - query_options = metadata_store_pb2.LineageSubgraphQueryOptions( - starting_artifacts=metadata_store_pb2.LineageSubgraphQueryOptions.StartingNodes( - filter_query="uri = 'output_artifact'" - ), - max_num_hops=10, - ) - - subgraph = store.get_lineage_subgraph( - query_options, ["artifact_types", "execution_types", "context_types"] - ) - self.assertEmpty(subgraph.artifacts) - self.assertEmpty(subgraph.executions) - self.assertEmpty(subgraph.contexts) - self.assertLen(subgraph.artifact_types, 1) - self.assertLen(subgraph.execution_types, 1) - self.assertLen(subgraph.context_types, 1) - self.assertEmpty(subgraph.events) - self.assertEmpty(subgraph.associations) - self.assertEmpty(subgraph.attributions) - - subgraph = store.get_lineage_subgraph( - query_options, ["artifacts", "executions", "contexts"] - ) - self.assertLen(subgraph.artifacts, 2) - self.assertSameElements( - [subgraph.artifacts[0].uri, subgraph.artifacts[1].uri], - [input_artifact.uri, output_artifact.uri], - ) - self.assertLen(subgraph.executions, 2) - self.assertSameElements( - [subgraph.executions[0].name, subgraph.executions[1].name], - [existing_execution.name, new_execution.name], - ) - self.assertLen(subgraph.contexts, 2) - self.assertSameElements( - [subgraph.contexts[0].name, subgraph.contexts[1].name], - [existing_context.name, new_context.name], - ) - self.assertEmpty(subgraph.artifact_types) - self.assertEmpty(subgraph.execution_types) - self.assertEmpty(subgraph.context_types) - self.assertEmpty(subgraph.events) - self.assertEmpty(subgraph.associations) - self.assertEmpty(subgraph.attributions) - - def test_put_lineage_subgraph_get_lineage_subgraph_with_direction(self): - # Test with a simple lineage graph: - # input_artifact -> execution -> output_artifact. - store = _get_metadata_store(self.cli_args) - execution_type = _create_example_execution_type(self._get_test_type_name()) - execution_type_id = store.put_execution_type(execution_type) - artifact_type = _create_example_artifact_type(self._get_test_type_name()) - artifact_type_id = store.put_artifact_type(artifact_type) - context_type = _create_example_context_type(self._get_test_type_name()) - context_type_id = store.put_context_type(context_type) - - context = metadata_store_pb2.Context( - type_id=context_type_id, name="test_context" - ) - [context.id] = store.put_contexts([context]) - - input_artifact = metadata_store_pb2.Artifact( - type_id=artifact_type_id, uri="input_artifact_uri" - ) - output_artifact = metadata_store_pb2.Artifact( - type_id=artifact_type_id, uri="output_artifact_uri" - ) - [input_artifact.id, output_artifact.id] = store.put_artifacts( - [input_artifact, output_artifact] - ) - - execution = metadata_store_pb2.Execution( - type_id=execution_type_id, name="test_execution" - ) - [execution.id] = store.put_executions([execution]) - - input_event = metadata_store_pb2.Event( - type=metadata_store_pb2.Event.INPUT, - execution_id=execution.id, - artifact_id=input_artifact.id, - ) - output_event = metadata_store_pb2.Event( - type=metadata_store_pb2.Event.OUTPUT, - execution_id=execution.id, - artifact_id=output_artifact.id, - ) - - request_event_edges = [(0, 0, input_event), (0, 1, output_event)] - store.put_lineage_subgraph( - [execution], - [input_artifact, output_artifact], - [context], - request_event_edges, - ) - - # Test get_lineage_subgraph() with direction. - query_options = metadata_store_pb2.LineageSubgraphQueryOptions( - starting_executions=metadata_store_pb2.LineageSubgraphQueryOptions.StartingNodes( - filter_query="name = 'test_execution'" - ), - max_num_hops=2, - direction=metadata_store_pb2.LineageSubgraphQueryOptions.Direction.DOWNSTREAM, - ) - subgraph = store.get_lineage_subgraph(query_options) - self.assertLen(subgraph.artifacts, 1) - self.assertLen(subgraph.executions, 1) - self.assertSameElements( - [subgraph.artifacts[0].uri], - [output_artifact.uri], - ) - self.assertSameElements( - [subgraph.executions[0].name], - [execution.name], - ) - self.assertLen(subgraph.contexts, 1) - self.assertSameElements( - [subgraph.contexts[0].name], - [context.name], - ) - self.assertLen(subgraph.events, 1) - self.assertLen(subgraph.artifact_types, 1) - self.assertSameElements( - [subgraph.artifact_types[0].name], [artifact_type.name] - ) - self.assertLen(subgraph.execution_types, 1) - self.assertSameElements( - [subgraph.execution_types[0].name], [execution_type.name] - ) - self.assertLen(subgraph.context_types, 1) - self.assertSameElements( - [subgraph.context_types[0].name], [context_type.name] - ) - - query_options.direction = ( - metadata_store_pb2.LineageSubgraphQueryOptions.Direction.UPSTREAM - ) - subgraph = store.get_lineage_subgraph(query_options) - self.assertLen(subgraph.artifacts, 1) - self.assertLen(subgraph.executions, 1) - self.assertSameElements( - [subgraph.artifacts[0].uri], - [input_artifact.uri], - ) - self.assertSameElements( - [subgraph.executions[0].name], - [execution.name], - ) - self.assertLen(subgraph.contexts, 1) - self.assertSameElements( - [subgraph.contexts[0].name], - [context.name], - ) - self.assertLen(subgraph.events, 1) - self.assertLen(subgraph.artifact_types, 1) - self.assertSameElements( - [subgraph.artifact_types[0].name], [artifact_type.name] - ) - self.assertLen(subgraph.execution_types, 1) - self.assertSameElements( - [subgraph.execution_types[0].name], [execution_type.name] - ) - self.assertLen(subgraph.context_types, 1) - self.assertSameElements( - [subgraph.context_types[0].name], [context_type.name] - ) - def test_put_and_use_attributions_and_associations(self): store = _get_metadata_store(self.cli_args) context_type = _create_example_context_type(self._get_test_type_name()) diff --git a/ml_metadata/metadata_store/postgresql_query_executor.cc b/ml_metadata/metadata_store/postgresql_query_executor.cc index 501a9251b..705b76836 100644 --- a/ml_metadata/metadata_store/postgresql_query_executor.cc +++ b/ml_metadata/metadata_store/postgresql_query_executor.cc @@ -32,8 +32,9 @@ limitations under the License. #include "ml_metadata/metadata_store/query_executor.h" #include "ml_metadata/proto/metadata_source.pb.h" #include "ml_metadata/proto/metadata_store.pb.h" -#include "ml_metadata/query/filter_query_ast_resolver.h" -#include "ml_metadata/query/filter_query_builder.h" +// ZetaSQL filter query removed - feature disabled +// #include "ml_metadata/query/filter_query_ast_resolver.h" +// #include "ml_metadata/query/filter_query_builder.h" #include "ml_metadata/util/return_utils.h" #include "ml_metadata/util/struct_utils.h" @@ -884,31 +885,10 @@ absl::Status PostgreSQLQueryExecutor::ListNodeIDsUsingOptions( "Invalid Node passed to ListNodeIDsUsingOptions"); } + // ZetaSQL filter_query feature removed - not supported if (options.has_filter_query() && !options.filter_query().empty()) { - node_table_alias = ml_metadata::FilterQueryBuilder::kBaseTableAlias; - ml_metadata::FilterQueryAstResolver ast_resolver( - options.filter_query()); - const absl::Status ast_gen_status = ast_resolver.Resolve(); - if (!ast_gen_status.ok()) { - return absl::InvalidArgumentError( - absl::StrCat("Invalid `filter_query`: ", ast_gen_status.message())); - } - // Generate SQL - ml_metadata::FilterQueryBuilder query_builder; - const absl::Status sql_gen_status = - ast_resolver.GetAst()->Accept(&query_builder); - if (!sql_gen_status.ok()) { - return absl::InvalidArgumentError( - absl::StrCat("Failed to construct valid SQL from `filter_query`: ", - sql_gen_status.message())); - } - sql_query = absl::Substitute( - "SELECT distinct $0.id, $0.create_time_since_epoch FROM $1 WHERE $2 " - "AND ", - *node_table_alias, - // TODO(b/257334039): remove query_version-conditional logic - query_builder.GetFromClause(query_version), - query_builder.GetWhereClause()); + return absl::UnimplementedError( + "filter_query is not supported - ZetaSQL dependency removed"); } if (candidate_ids) { diff --git a/ml_metadata/metadata_store/query_config_executor.cc b/ml_metadata/metadata_store/query_config_executor.cc index d369a511d..cf4e512cb 100644 --- a/ml_metadata/metadata_store/query_config_executor.cc +++ b/ml_metadata/metadata_store/query_config_executor.cc @@ -33,8 +33,9 @@ limitations under the License. #include "ml_metadata/metadata_store/list_operation_query_helper.h" #include "ml_metadata/proto/metadata_source.pb.h" #include "ml_metadata/proto/metadata_store.pb.h" -#include "ml_metadata/query/filter_query_ast_resolver.h" -#include "ml_metadata/query/filter_query_builder.h" +// ZetaSQL filter query removed - feature disabled +// #include "ml_metadata/query/filter_query_ast_resolver.h" +// #include "ml_metadata/query/filter_query_builder.h" #include "ml_metadata/util/return_utils.h" #include "ml_metadata/util/struct_utils.h" @@ -825,29 +826,10 @@ absl::Status QueryConfigExecutor::ListNodeIDsUsingOptions( "Invalid Node passed to ListNodeIDsUsingOptions"); } + // ZetaSQL filter_query feature removed - not supported if (options.has_filter_query() && !options.filter_query().empty()) { - node_table_alias = ml_metadata::FilterQueryBuilder::kBaseTableAlias; - ml_metadata::FilterQueryAstResolver ast_resolver( - options.filter_query()); - const absl::Status ast_gen_status = ast_resolver.Resolve(); - if (!ast_gen_status.ok()) { - return absl::InvalidArgumentError( - absl::StrCat("Invalid `filter_query`: ", ast_gen_status.message())); - } - // Generate SQL - ml_metadata::FilterQueryBuilder query_builder; - const absl::Status sql_gen_status = - ast_resolver.GetAst()->Accept(&query_builder); - if (!sql_gen_status.ok()) { - return absl::InvalidArgumentError( - absl::StrCat("Failed to construct valid SQL from `filter_query`: ", - sql_gen_status.message())); - } - sql_query = absl::Substitute( - "SELECT distinct $0.`id` FROM $1 WHERE $2 AND ", *node_table_alias, - // TODO(b/257334039): remove query_version-conditional logic - query_builder.GetFromClause(query_version), - query_builder.GetWhereClause()); + return absl::UnimplementedError( + "filter_query is not supported - ZetaSQL dependency removed"); } if (candidate_ids) { diff --git a/ml_metadata/ml_metadata.bzl b/ml_metadata/ml_metadata.bzl index 8a6e685a6..a22aff948 100644 --- a/ml_metadata/ml_metadata.bzl +++ b/ml_metadata/ml_metadata.bzl @@ -214,7 +214,16 @@ def ml_metadata_pybind_extension( prefix = name[:p + 1] so_file = "%s%s.so" % (prefix, sname) pyd_file = "%s%s.pyd" % (prefix, sname) - exported_symbols = [ + + # For macOS, only export PyInit_* (Python 3) + # macOS linker requires all exported symbols to exist + exported_symbols_macos = [ + "PyInit_%s" % sname, + ] + + # For Linux, include Python 2 symbols for compatibility + # (version script allows undefined symbols) + exported_symbols_linux = [ "init%s" % sname, "init_%s" % sname, "PyInit_%s" % sname, @@ -223,8 +232,8 @@ def ml_metadata_pybind_extension( exported_symbols_file = "%s-exported-symbols.lds" % name version_script_file = "%s-version-script.lds" % name - exported_symbols_output = "\n".join(["_%s" % symbol for symbol in exported_symbols]) - version_script_output = "\n".join([" %s;" % symbol for symbol in exported_symbols]) + exported_symbols_output = "\n".join(["_%s" % symbol for symbol in exported_symbols_macos]) + version_script_output = "\n".join([" %s;" % symbol for symbol in exported_symbols_linux]) native.genrule( name = name + "_exported_symbols", diff --git a/ml_metadata/postgresql.BUILD b/ml_metadata/postgresql.BUILD index 42e2f811b..503d24c20 100644 --- a/ml_metadata/postgresql.BUILD +++ b/ml_metadata/postgresql.BUILD @@ -94,7 +94,9 @@ cc_library( "config/pg_config_paths.h", "config/pg_config_types.h", ], - copts = [], + copts = [ + "-Wno-implicit-function-declaration", + ], defines = [ "FRONTEND", ] + select({ @@ -103,6 +105,7 @@ cc_library( "HAVE_STRLCPY=1", "HAVE_STRUCT_SOCKADDR_STORAGE_SS_LEN=1", "HAVE_SYS_UCRED_H=1", + "HAVE_STRCHRNUL=1", ], "//conditions:default": [ "_GNU_SOURCE", @@ -899,7 +902,7 @@ genrule( "#define HAVE_STDLIB_H 1", "", "/* Define to 1 if you have the `strchrnul' function. */", - "/* #undef HAVE_STRCHRNUL */", + "#define HAVE_STRCHRNUL 1", "", "/* Define to 1 if you have the `strerror_r' function. */", "#define HAVE_STRERROR_R 1", @@ -1146,7 +1149,7 @@ genrule( "/* #undef HAVE__CPUID */", "", "/* Define to 1 if you have __get_cpuid. */", - "#define HAVE__GET_CPUID 1", + "/* #undef HAVE__GET_CPUID */", "", "/* Define to 1 if your compiler understands _Static_assert. */", "#define HAVE__STATIC_ASSERT 1", diff --git a/ml_metadata/query/BUILD b/ml_metadata/query/BUILD index 6c2f42c4b..d82e9b105 100644 --- a/ml_metadata/query/BUILD +++ b/ml_metadata/query/BUILD @@ -19,64 +19,65 @@ load( licenses(["notice"]) -cc_library( - name = "filter_query_ast_resolver", - srcs = ["filter_query_ast_resolver.cc"], - hdrs = ["filter_query_ast_resolver.h"], - visibility = ["//ml_metadata:__subpackages__"], - deps = [ - "@com_google_absl//absl/container:flat_hash_set", - "@com_google_absl//absl/status", - "@com_google_absl//absl/status:statusor", - "@com_google_absl//absl/strings", - "//ml_metadata/proto:metadata_store_proto", - "//ml_metadata/util:return_utils", - "@com_googlesource_code_re2//:re2", - "@com_google_zetasql//zetasql/public:analyzer", - "@com_google_zetasql//zetasql/public:simple_catalog", - ], -) +# ZetaSQL-dependent libraries removed - these require ZetaSQL for SQL parsing +# cc_library( +# name = "filter_query_ast_resolver", +# srcs = ["filter_query_ast_resolver.cc"], +# hdrs = ["filter_query_ast_resolver.h"], +# visibility = ["//ml_metadata:__subpackages__"], +# deps = [ +# "@com_google_absl//absl/container:flat_hash_set", +# "@com_google_absl//absl/status", +# "@com_google_absl//absl/status:statusor", +# "@com_google_absl//absl/strings", +# "//ml_metadata/proto:metadata_store_proto", +# "//ml_metadata/util:return_utils", +# "@com_googlesource_code_re2//:re2", +# "@com_google_zetasql//zetasql/public:analyzer", +# "@com_google_zetasql//zetasql/public:simple_catalog", +# ], +# ) -ml_metadata_cc_test( - name = "filter_query_ast_resolver_test", - size = "small", - srcs = ["filter_query_ast_resolver_test.cc"], - deps = [ - ":filter_query_ast_resolver", - "@com_google_googletest//:gtest_main", - "@com_google_absl//absl/status", - "//ml_metadata/metadata_store:test_util", - "//ml_metadata/proto:metadata_store_proto", - ], -) +# ml_metadata_cc_test( +# name = "filter_query_ast_resolver_test", +# size = "small", +# srcs = ["filter_query_ast_resolver_test.cc"], +# deps = [ +# ":filter_query_ast_resolver", +# "@com_google_googletest//:gtest_main", +# "@com_google_absl//absl/status", +# "//ml_metadata/metadata_store:test_util", +# "//ml_metadata/proto:metadata_store_proto", +# ], +# ) -cc_library( - name = "filter_query_builder", - srcs = ["filter_query_builder.cc"], - hdrs = ["filter_query_builder.h"], - visibility = ["//ml_metadata:__subpackages__"], - deps = [ - "@com_google_absl//absl/container:btree", - "@com_google_absl//absl/status", - "@com_google_absl//absl/strings", - "//ml_metadata/metadata_store:constants", - "//ml_metadata/proto:metadata_store_proto", - "@com_google_glog//:glog", - "@com_google_zetasql//zetasql/public:strings", - "@com_google_zetasql//zetasql/resolved_ast:sql_builder", - ], -) +# cc_library( +# name = "filter_query_builder", +# srcs = ["filter_query_builder.cc"], +# hdrs = ["filter_query_builder.h"], +# visibility = ["//ml_metadata:__subpackages__"], +# deps = [ +# "@com_google_absl//absl/container:btree", +# "@com_google_absl//absl/status", +# "@com_google_absl//absl/strings", +# "//ml_metadata/metadata_store:constants", +# "//ml_metadata/proto:metadata_store_proto", +# "@com_google_glog//:glog", +# "@com_google_zetasql//zetasql/public:strings", +# "@com_google_zetasql//zetasql/resolved_ast:sql_builder", +# ], +# ) -ml_metadata_cc_test( - name = "filter_query_builder_test", - size = "small", - srcs = ["filter_query_builder_test.cc"], - deps = [ - ":filter_query_ast_resolver", - ":filter_query_builder", - "@com_google_googletest//:gtest_main", - "@com_google_absl//absl/status", - "//ml_metadata/metadata_store:test_util", - "//ml_metadata/proto:metadata_store_proto", - ], -) +# ml_metadata_cc_test( +# name = "filter_query_builder_test", +# size = "small", +# srcs = ["filter_query_builder_test.cc"], +# deps = [ +# ":filter_query_ast_resolver", +# ":filter_query_builder", +# "@com_google_googletest//:gtest_main", +# "@com_google_absl//absl/status", +# "//ml_metadata/metadata_store:test_util", +# "//ml_metadata/proto:metadata_store_proto", +# ], +# ) diff --git a/ml_metadata/third_party/upb.patch b/ml_metadata/third_party/upb.patch new file mode 100644 index 000000000..18e451941 --- /dev/null +++ b/ml_metadata/third_party/upb.patch @@ -0,0 +1,10 @@ +--- bazel/build_defs.bzl.orig ++++ bazel/build_defs.bzl +@@ -40,7 +40,6 @@ + _DEFAULT_COPTS.extend([ + "-std=c99", + "-pedantic", +- "-Werror=pedantic", + "-Wall", + "-Wstrict-prototypes", + # GCC (at least) emits spurious warnings for this that cannot be fixed