Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3121,10 +3121,11 @@ class BeamModulePlugin implements Plugin<Project> {
// TODO: https://github.com/apache/beam/issues/29022
// pip 23.3 is failing due to Hash mismatch between expected SHA of the packaged and actual SHA.
// until it is resolved on pip's side, don't use pip's cache.
// pip 25.1 casues :sdks:python:installGcpTest stuck. Pin to 25.0.1 for now.
// Use pip 26.0.1 for improved resolution performance and bug fixes. See #34798.
args '-c', ". ${project.ext.envdir}/bin/activate && " +
"pip install --pre --retries 10 --upgrade pip==25.0.1 --no-cache-dir && " +
"pip install --pre --retries 10 --upgrade tox --no-cache-dir"
"pip install --pre --retries 10 --upgrade pip==26.0.1 --no-cache-dir && " +
"pip install --pre --retries 10 --upgrade tox --no-cache-dir && " +
"pip install uv"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i am vary of using uv in our tests, unless it were so common that our users were doing the same, and docs instructed users to do so.

}
}
// Gradle will delete outputs whenever it thinks they are stale. Putting a
Expand Down Expand Up @@ -3169,9 +3170,19 @@ class BeamModulePlugin implements Plugin<Project> {
packages += ",${extra}"
}

def pythonSdkDir = project.project(":sdks:python").projectDir
def constraintsPath = "${pythonSdkDir}/constraints.txt"
def constraintFile = project.file(constraintsPath)
def constraintFlag = constraintFile.exists() ? "--constraint ${constraintsPath}" : ""

// Use uv instead of pip - pip was hitting resolution-too-deep on tensorflow->keras->namex/optree.
// Include namex/optree as explicit deps to constrain resolution.
// --prerelease allow: envoy-data-plane depends on betterproto==2.0.0b6 (beta).
def anchorPkgs = "namex==0.0.9 optree==0.16.0"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think these deps are specific to a certain test suite, and thus configuring them is a matter of the test suite that needs them; BeamModulePlugin should concern with global configuration matters for the project; adding these deps doesn't feel like the right direction to me

def installCmd = ". ${project.ext.envdir}/bin/activate && uv pip install --prerelease allow ${constraintFlag} ${anchorPkgs} ${distTarBall}[${packages}]".replaceAll(/ +/, ' ').trim()
project.exec {
executable 'sh'
args '-c', ". ${project.ext.envdir}/bin/activate && pip install --pre --retries 10 ${distTarBall}[${packages}]"
args '-c', installCmd
}
}
}
Expand Down
24 changes: 24 additions & 0 deletions sdks/python/constraints.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Pins for installGcpTest. numpy handled via setup.py python_version markers.
# Use google-api-core 2.29.0 for py313 compat (2.16.2 lacks it).
googleapis-common-protos==1.72.0
grpc-google-iam-v1>=0.12.4,<1.0.0
google-api-core==2.29.0
optree==0.16.0
namex==0.0.9
2 changes: 1 addition & 1 deletion sdks/python/container/ml/py310/base_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ multidict==6.7.1
namex==0.1.0
networkx==3.4.2
nltk==3.9.2
numpy==2.2.6
numpy==1.26.4
oauth2client==4.1.3
objsize==0.7.1
opentelemetry-api==1.39.1
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/container/ml/py310/gpu_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ networkx==3.4.2
ninja==1.13.0
nltk==3.9.2
numba==0.61.2
numpy==2.2.6
numpy==1.26.4
nvidia-cublas-cu12==12.6.4.1
nvidia-cuda-cupti-cu12==12.6.80
nvidia-cuda-nvrtc-cu12==12.6.77
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/container/ml/py311/base_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ multidict==6.7.1
namex==0.1.0
networkx==3.6.1
nltk==3.9.2
numpy==2.4.2
numpy==1.26.4
oauth2client==4.1.3
objsize==0.7.1
opentelemetry-api==1.39.1
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/container/ml/py311/gpu_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ networkx==3.6.1
ninja==1.13.0
nltk==3.9.2
numba==0.61.2
numpy==2.2.6
numpy==1.26.4
nvidia-cublas-cu12==12.6.4.1
nvidia-cuda-cupti-cu12==12.6.80
nvidia-cuda-nvrtc-cu12==12.6.77
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/container/ml/py312/base_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ multidict==6.7.1
namex==0.1.0
networkx==3.6.1
nltk==3.9.2
numpy==2.4.2
numpy==1.26.4
oauth2client==4.1.3
objsize==0.7.1
opentelemetry-api==1.39.1
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/container/ml/py312/gpu_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ networkx==3.6.1
ninja==1.13.0
nltk==3.9.2
numba==0.61.2
numpy==2.2.6
numpy==1.26.4
nvidia-cublas-cu12==12.6.4.1
nvidia-cuda-cupti-cu12==12.6.80
nvidia-cuda-nvrtc-cu12==12.6.77
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/container/py310/base_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ mock==5.2.0
more-itertools==10.8.0
multidict==6.7.1
nltk==3.9.2
numpy==2.2.6
numpy==1.26.4
oauth2client==4.1.3
objsize==0.7.1
opentelemetry-api==1.39.1
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/container/py311/base_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ mock==5.2.0
more-itertools==10.8.0
multidict==6.7.1
nltk==3.9.2
numpy==2.4.2
numpy==1.26.4
oauth2client==4.1.3
objsize==0.7.1
opentelemetry-api==1.39.1
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/container/py312/base_image_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ mock==5.2.0
more-itertools==10.8.0
multidict==6.7.1
nltk==3.9.2
numpy==2.4.2
numpy==1.26.4
oauth2client==4.1.3
objsize==0.7.1
opentelemetry-api==1.39.1
Expand Down
5 changes: 3 additions & 2 deletions sdks/python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,9 @@ requires = [
"mypy-protobuf==3.5.0",
# Avoid https://github.com/pypa/virtualenv/issues/2006
"distlib==0.3.9",
# Numpy headers
"numpy>=1.14.3,<2.5.0", # Update setup.py as well.
# Numpy headers. py313 requires 2.1+; py<3.13 use 1.x for pandas ABI compat.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't follow why we need to cap numpy to < 2 for Python 3.10-3.12.

<2.5.0 seems like a wide enough bound , and i'd expect each python version to pick the right version that is compatible

"numpy>=1.26.0,<2.0.0; python_version < '3.13'",
"numpy>=2.1.0; python_version >= '3.13'",
# having cython here will create wheels that are platform dependent.
"cython>=3.0,<4",
## deps for generating external transform wrappers:
Expand Down
17 changes: 13 additions & 4 deletions sdks/python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,9 +384,10 @@ def get_portability_package_data():
'grpcio>=1.67.0; python_version >= "3.13"',
'httplib2>=0.8,<0.32.0',
'jsonpickle>=3.0.0,<4.0.0',
# numpy can have breaking changes in minor versions.
# Use a strict upper bound.
'numpy>=1.14.3,<2.5.0', # Update pyproject.toml as well.
# numpy: py310-312 use 1.x; py313 needs 2.x (1.x unsupported; avoids # pylint: disable=line-too-long
# pandas ABI mismatch).
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

are there some detaills on this mismatch?

'numpy>=1.26.0,<2.0.0; python_version < "3.13"',
'numpy>=2.1.0; python_version >= "3.13"',
'objsize>=0.6.1,<0.8.0',
'packaging>=22.0',
'pillow',
Expand Down Expand Up @@ -541,6 +542,9 @@ def get_portability_package_data():
# tensorflow-transform requires dill, but doesn't set dill as a
# hard requirement in setup.py.
'dill',
# namex/optree: pin to avoid resolver issues (lack version bounds) # pylint: disable=line-too-long
'namex==0.0.9',
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not an opinion, just wondering - should this be defined in ml_base instead? will

'optree==0.16.0',
'tensorflow-transform',
# Comment out xgboost as it is breaking presubmit python ml
# tests due to tag check introduced since pip 24.2
Expand All @@ -549,8 +553,13 @@ def get_portability_package_data():
] + ml_base,
'p312_ml_test': [
'datatable',
'namex==0.0.9',
'optree==0.16.0',
] + ml_base,
'p313_ml_test': [
'namex==0.0.9',
'optree==0.16.0',
] + ml_base,
'p313_ml_test': ml_base,
'aws': ['boto3>=1.9,<2'],
'azure': [
'azure-storage-blob>=12.3.2,<13',
Expand Down
Loading