Skip to content

Commit

Permalink
Build PyArrow with AzureFS Support (#126)
Browse files Browse the repository at this point in the history
automerged PR by conda-forge/automerge-action
  • Loading branch information
github-actions[bot] authored Jun 27, 2024
2 parents a67ef2b + 9f90bd0 commit d66af3e
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
1 change: 1 addition & 0 deletions recipe/build-pyarrow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export PARQUET_HOME=$PREFIX
export SETUPTOOLS_SCM_PRETEND_VERSION=$PKG_VERSION
export PYARROW_BUILD_TYPE=release
export PYARROW_WITH_ACERO=1
export PYARROW_WITH_AZURE=1
export PYARROW_WITH_DATASET=1
export PYARROW_WITH_FLIGHT=1
export PYARROW_WITH_GANDIVA=1
Expand Down
13 changes: 7 additions & 6 deletions recipe/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,10 @@ source:
folder: testing

build:
number: 3
number: 4
# for cuda support, building with one version is enough to be compatible with
# all later versions, since arrow is only using libcuda, and not libcudart.
skip: true # [cuda_compiler_version not in ("None", cuda_compiler_version_min)]
# arrow promises API- & ABI-compatibility along SemVer, see #1096

outputs:
- name: pyarrow-core
Expand Down Expand Up @@ -95,15 +94,18 @@ outputs:
# running for pyarrow-core, where the aim is a low storage footprint.
- libarrow {{ version }}.*=*{{ build_ext }}
- python
# orc>=2.0.1 will look in $CONDA_PREFIX/share/zoneinfo
- tzdata
# this is redundant with libarrow, but we want smithy to pick up that
# cuda_compiler_version_min is present, to populate the CI configs
- __cuda >={{ cuda_compiler_version_min }} # [cuda_compiler_version != "None"]
run_constrained:
- apache-arrow-proc =*={{ build_ext }}
# need new enough orc for using our own tzdb
- orc >=2.0.1
# need libarrow builds from that have azure bindings built, i.e.
# https://github.com/conda-forge/arrow-cpp-feedstock/pull/1431 or later;
# to ensure a new enough library is selected, enforce most recently done
# migration as a run-constraint (can be dropped for v17)
- aws-crt-cpp >=0.26.12

test:
imports:
Expand Down Expand Up @@ -266,8 +268,7 @@ outputs:
- pandas
- s3fs >=2023
- scipy
# disable until numba is rebuilt for numpy 2.0
# - sparse
- sparse
# these are generally (far) behind on migrating abseil/grpc/protobuf,
# and using them as test dependencies blocks the migrator unnecessarily
# - pytorch
Expand Down

0 comments on commit d66af3e

Please sign in to comment.