Skip to content

Fix support for numpy-2 #9109

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
Mar 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 10 additions & 9 deletions .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -270,15 +270,16 @@ jobs:
WHEEL_ARTIFACT_NAME: macos-arm64-wheel
secrets: inherit

test-wheel-macos-x64:
needs: [checks, build-wheel-macos-x64]
name: "macos-x64: Test Wheels"
uses: ./.github/workflows/reusable_test_wheels.yml
with:
CONCURRENCY: nightly-macos-x64
PLATFORM: macos-x64
WHEEL_ARTIFACT_NAME: macos-x64-wheel
secrets: inherit
# TODO(#9108): Test macos wheels
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm afraid that will backfire, but I don't see a way around it, and this PR answers an actual, pressing need. So yeah, let's take the plunge.

# test-wheel-macos-x64:
# needs: [checks, build-wheel-macos-x64]
# name: "macos-x64: Test Wheels"
# uses: ./.github/workflows/reusable_test_wheels.yml
# with:
# CONCURRENCY: nightly-macos-x64
# PLATFORM: macos-x64
# WHEEL_ARTIFACT_NAME: macos-x64-wheel
# secrets: inherit

test-wheel-windows-x64:
needs: [checks, build-wheel-windows-x64]
Expand Down
8 changes: 4 additions & 4 deletions crates/build/re_types_builder/src/codegen/python/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1397,9 +1397,9 @@ fn quote_array_method_from_obj(
let field_name = &obj.fields[0].name;
unindent(&format!(
"
def __array__(self, dtype: npt.DTypeLike=None) -> npt.NDArray[Any]:
def __array__(self, dtype: npt.DTypeLike=None, copy: bool|None=None) -> npt.NDArray[Any]:
# You can define your own __array__ function as a member of {} in {}
return np.asarray(self.{field_name}, dtype=dtype)
return np.asarray(self.{field_name}, dtype=dtype, copy=copy)
",
ext_class.name, ext_class.file_name
))
Expand Down Expand Up @@ -2651,8 +2651,8 @@ fn quote_columnar_methods(reporter: &Reporter, obj: &Object, objects: &Objects)
param = kwargs[batch.component_descriptor().archetype_field_name] # type: ignore[index]
shape = np.shape(param) # type: ignore[arg-type]

batch_length = shape[1] if len(shape) > 1 else 1
num_rows = shape[0] if len(shape) >= 1 else 1
batch_length = shape[1] if len(shape) > 1 else 1 # type: ignore[redundant-expr,misc]
num_rows = shape[0] if len(shape) >= 1 else 1 # type: ignore[redundant-expr,misc]
sizes = batch_length * np.ones(num_rows)
else:
# For non-primitive types, default to partitioning each element separately.
Expand Down
2 changes: 1 addition & 1 deletion examples/python/controlnet/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ dependencies = [
"pillow",
"diffusers==0.27.2",
"numpy",
"torch==2.2.2", # This must match the version captured in pixi.toml
"torch==2.3.1", # This must match the version captured in pixi.toml
"transformers",
"rerun-sdk",
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ dependencies = [
"requests>=2.31,<3",
"rerun-sdk",
"scipy",
"torch==2.2.2", # This must match the version captured in pixi.toml
"torch==2.3.1", # This must match the version captured in pixi.toml
"transformers>=4.26.0",
]

Expand Down
2 changes: 1 addition & 1 deletion examples/python/detect_and_track_objects/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ dependencies = [
"requests>=2.31,<3",
"rerun-sdk",
"timm==0.9.11",
"torch==2.2.2", # This must match the version captured in pixi.toml
"torch==2.3.1", # This must match the version captured in pixi.toml
"transformers",
]

Expand Down
2 changes: 1 addition & 1 deletion examples/python/segment_anything_model/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ dependencies = [
"opencv-python",
"requests>=2.31,<3",
"rerun-sdk",
"torch==2.2.2", # This must match the version captured in pixi.toml
"torch==2.3.1", # This must match the version captured in pixi.toml
"torchvision",
"tqdm",
]
Expand Down
15,556 changes: 5,308 additions & 10,248 deletions pixi.lock

Large diffs are not rendered by default.

41 changes: 15 additions & 26 deletions pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ jinja2 = ">=3.1.3,<3.2" # For `build_screenshot_compare.py` and other
mypy = "1.14.1.*"
nasm = ">=2.16" # Required by https://github.com/memorysafety/rav1d for native video support
ninja = "1.11.1.*"
numpy = ">=1.23,<2"
numpy = ">=2"
prettier = "3.2.5.*"
pyarrow = "18.0.0.*"
pytest = ">=7"
Expand Down Expand Up @@ -537,7 +537,7 @@ platforms = ["linux-64", "linux-aarch64", "osx-arm64", "osx-64", "win-64"]
#
# This is also redundantly defined in `python-pypi`
opencv = ">4.6"
numpy = "<2" # Rerun still needs numpy <2. Enforce this outside of the pypi dep tree so we pick up the conda version.
numpy = ">=2" # Rerun still needs numpy <2. Enforce this outside of the pypi dep tree so we pick up the conda version.


[feature.python-dev.activation]
Expand Down Expand Up @@ -570,11 +570,14 @@ mike = "==1.1.2"
setuptools = ">75"
sphobjinv = "==2.3.1"

[feature.py-test-deps]
platforms = ["linux-64", "linux-aarch64", "osx-arm64", "win-64"]

[feature.py-test-deps.pypi-dependencies]
# Torch is needed to run the rerun unit tests.
# This dependency is here to get torch into the pixi cache because it's large and installing it
# during the wheel-test task itself is otherwise painful.
torch = "==2.2.2" # This must match the torch version in any package we install during `wheel-test`
torch = "==2.3.1" # This must match the torch version in any package we install during `wheel-test`

[feature.wheel-test.dependencies]
pip = ">=23"
Expand All @@ -586,18 +589,17 @@ platforms = ["linux-64", "linux-aarch64", "osx-arm64", "osx-64", "win-64"]
[feature.python-pypi.dependencies]
# Note these are the same as in `python-dev` but we need to repeat them here because the two are mutually exclusive.
opencv = ">4.6"
numpy = "<2" # Rerun still needs numpy <2. Enforce this outside of the pypi dep tree so we pick up the conda version.
numpy = ">=2" # Rerun still needs numpy <2. Enforce this outside of the pypi dep tree so we pick up the conda version.


[feature.python-pypi.pypi-dependencies]
rerun-sdk = "==0.17.0"
rerun-notebook = "==0.17.0"
rerun-sdk = "==0.22.1"
rerun-notebook = "==0.22.1"

# EXAMPLES ENVIRONMENT
[feature.examples-common]
# Ideally we would could remove`linux-aarch64` from here, but it breaks other things.
# See: https://github.com/prefix-dev/pixi/issues/1051
platforms = ["linux-64", "linux-aarch64", "osx-arm64", "osx-64", "win-64"]
# TODO(#9180): Split `examples` into high-dep and low-dep so we can run some examples on `linux-aarch64` and `osx-64`.
platforms = ["linux-64", "osx-arm64", "win-64"]


[feature.examples-common.system-requirements]
Expand All @@ -609,6 +611,9 @@ python = "=3.11"
# TODO(ab): otherwise pixi will try to build from source and fail, not sure why (used by air_traffic_data)
pyproj = ">=3.7.0"

# Need to use the conda version of umap-learn to be compatible with numpy-2
umap-learn = "==0.5.7"

[feature.examples-common.pypi-dependencies]
# External deps
jupyter = ">=1.0"
Expand All @@ -634,6 +639,7 @@ incremental_logging = { path = "examples/python/incremental_logging", editable =
lidar = { path = "examples/python/lidar", editable = true }
live_camera_edge_detection = { path = "examples/python/live_camera_edge_detection", editable = true }
live_scrolling_plot = { path = "examples/python/live_scrolling_plot", editable = true }
# This depends on umap-learn which is incompatible with numpy-2 if it is installed from pypi.
llm_embedding_ner = { path = "examples/python/llm_embedding_ner", editable = true }
log_file = { path = "examples/python/log_file", editable = true }
minimal = { path = "examples/python/minimal", editable = true }
Expand All @@ -657,23 +663,6 @@ signed_distance_fields = { path = "examples/python/signed_distance_fields", edit
stdio = { path = "examples/python/stdio", editable = true }
structure_from_motion = { path = "examples/python/structure_from_motion", editable = true }

# TODO(jleibs): Do this with less duplication
[feature.examples-common.target.linux-64.pypi-dependencies]
face_tracking = { path = "examples/python/face_tracking", editable = true }
gesture_detection = { path = "examples/python/gesture_detection", editable = true }
human_pose_tracking = { path = "examples/python/human_pose_tracking", editable = true }

[feature.examples-common.target.osx-arm64.pypi-dependencies]
face_tracking = { path = "examples/python/face_tracking", editable = true }
gesture_detection = { path = "examples/python/gesture_detection", editable = true }
human_pose_tracking = { path = "examples/python/human_pose_tracking", editable = true }

[feature.examples-common.target.osx-64.pypi-dependencies]
face_tracking = { path = "examples/python/face_tracking", editable = true }
gesture_detection = { path = "examples/python/gesture_detection", editable = true }
human_pose_tracking = { path = "examples/python/human_pose_tracking", editable = true }

[feature.examples-common.target.win-64.pypi-dependencies]
face_tracking = { path = "examples/python/face_tracking", editable = true }
gesture_detection = { path = "examples/python/gesture_detection", editable = true }
human_pose_tracking = { path = "examples/python/human_pose_tracking", editable = true }
Expand Down
11 changes: 7 additions & 4 deletions rerun_py/rerun_sdk/rerun/_baseclasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,17 +398,20 @@ def __init__(self, component_batch: ComponentBatchLike, *, lengths: npt.ArrayLik
if "Indicator" in component_batch.component_descriptor().component_name:
if lengths is None:
# Indicator component, no lengths -> zero-sized batches by default
self.lengths = np.zeros(len(component_batch.as_arrow_array()))
self.lengths = np.zeros(len(component_batch.as_arrow_array()), dtype=np.int32)
else:
# Normal component, lengths specified -> respect outer length, but enforce zero-sized batches still
self.lengths = np.zeros(len(np.array(lengths)))
self.lengths = np.zeros(len(np.array(lengths)), dtype=np.int32)
else:
if lengths is None:
# Normal component, no lengths -> unit-sized batches by default
self.lengths = np.ones(len(component_batch.as_arrow_array()))
self.lengths = np.ones(len(component_batch.as_arrow_array()), dtype=np.int32)
else:
# Normal component, lengths specified -> follow instructions
self.lengths = np.array(lengths)
lengths = np.array(lengths)
if lengths.ndim != 1:
raise ValueError("Lengths must be a 1D array.")
self.lengths = lengths.flatten().astype(np.int32)

def component_descriptor(self) -> ComponentDescriptor:
"""
Expand Down
4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/any_value.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,14 +80,14 @@ def __init__(self, descriptor: str | ComponentDescriptor, value: Any, drop_untyp
if np_type is not None:
if value is None:
value = []
np_value = np.atleast_1d(np.array(value, copy=False, dtype=np_type))
np_value = np.atleast_1d(np.asarray(value, dtype=np_type))
self.pa_array = pa.array(np_value, type=pa_type)
else:
if value is None:
if not drop_untyped_nones:
raise ValueError("Cannot convert None to arrow array. Type is unknown.")
else:
np_value = np.atleast_1d(np.array(value, copy=False))
np_value = np.atleast_1d(np.asarray(value))
self.pa_array = pa.array(np_value)
ANY_VALUE_TYPE_REGISTRY[descriptor] = (np_value.dtype, self.pa_array.type)

Expand Down
4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/annotation_context.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/arrows2d.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/arrows3d.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/asset3d.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/asset_video.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/bar_chart.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/boxes2d.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion rerun_py/rerun_sdk/rerun/archetypes/boxes2d_ext.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def __init__(
raise ValueError("Cannot specify both `array` and `centers` at the same time.")

if type(array) is not np.ndarray:
array = np.array(array)
array = np.asarray(array)

if np.any(array):
if array.ndim == 1:
Expand Down
4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/boxes3d.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/capsules3d.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/clear.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/depth_image.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion rerun_py/rerun_sdk/rerun/archetypes/depth_image_ext.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def _to_numpy(tensor: ImageLike) -> npt.NDArray[Any]:
# Make available to the cpu
return tensor.numpy(force=True) # type: ignore[union-attr]
except AttributeError:
return np.array(tensor, copy=False)
return np.asarray(tensor)


class DepthImageExt:
Expand Down
4 changes: 2 additions & 2 deletions rerun_py/rerun_sdk/rerun/archetypes/ellipsoids3d.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading