Skip to content

API Reference

bhut: Barnes-Hut N-body accelerator that is array-agnostic.

This package provides efficient N-body force calculations using the Barnes-Hut algorithm, with support for both NumPy and Dask arrays.

Tree

Barnes-Hut tree for efficient N-body force calculations.

This class provides an object-oriented interface to the Barnes-Hut algorithm, allowing for tree construction, refitting, and force evaluation.

Parameters:

Name Type Description Default
positions array_like

Initial particle positions, shape (N, dim)

required
masses array_like

Initial particle masses, shape (N,)

required
leaf_size int

Maximum particles per leaf node. Default: 32

32
backend str

Array backend ("numpy", "dask", or "auto"). Default: "auto"

'auto'
dim int

Spatial dimensions (2 or 3). Default: 3

3

Attributes:

Name Type Description
positions ndarray

Particle positions, shape (N, dim)

masses ndarray

Particle masses, shape (N,)

dim int

Spatial dimensions

leaf_size int

Maximum particles per leaf node

backend str

Array backend being used

_tree TreeData | None

Internal tree data structure (None until built)

_is_built bool

Whether tree has been built

Source code in bhut/api.py
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
class Tree:
    """
    Barnes-Hut tree for efficient N-body force calculations.

    This class provides an object-oriented interface to the Barnes-Hut algorithm,
    allowing for tree construction, refitting, and force evaluation.

    Parameters
    ----------
    positions : array_like
        Initial particle positions, shape (N, dim)
    masses : array_like
        Initial particle masses, shape (N,)
    leaf_size : int, optional
        Maximum particles per leaf node. Default: 32
    backend : str, optional
        Array backend ("numpy", "dask", or "auto"). Default: "auto"
    dim : int, optional
        Spatial dimensions (2 or 3). Default: 3

    Attributes
    ----------
    positions : ndarray
        Particle positions, shape (N, dim)
    masses : ndarray  
        Particle masses, shape (N,)
    dim : int
        Spatial dimensions
    leaf_size : int
        Maximum particles per leaf node
    backend : str
        Array backend being used
    _tree : TreeData | None
        Internal tree data structure (None until built)
    _is_built : bool
        Whether tree has been built
    """

    def __init__(
        self,
        positions: ArrayLike,
        masses: ArrayLike,
        *,
        leaf_size: int = 32,
        backend: str = "auto",
        dim: int = 3,
    ) -> None:
        # Validate and store parameters
        _validate_dimensions(dim)

        # Convert and validate inputs
        self.positions, self.masses, _ = _validate_accelerations_inputs(
            positions, masses,
            dim=dim, softening=0.0, theta=0.5, G=1.0, leaf_size=leaf_size
        )

        self.dim = dim
        self.leaf_size = leaf_size
        self.backend = backend
        self._tree: TreeData | None = None
        self._is_built = False

        # Get namespace for this backend
        self._xp = get_namespace(self.positions, backend)

    def build(self) -> "Tree":
        """
        Build the Barnes-Hut tree structure.

        Returns
        -------
        self : Tree
            Returns self for method chaining
        """
        self._tree = build_tree(
            self.positions, self.masses, 
            leaf_size=self.leaf_size, backend=self.backend, dim=self.dim
        )
        self._is_built = True
        return self

    def refit(self, new_positions: ArrayLike, new_masses: Optional[ArrayLike] = None) -> "Tree":
        """
        Refit the tree with new positions, keeping the same topology.

        This is faster than rebuilding when particles move slightly.
        Uses efficient O(M) algorithm where M is number of tree nodes.

        Parameters
        ----------
        new_positions : array_like
            New particle positions, shape (N, dim)
        new_masses : array_like, optional
            New particle masses, shape (N,). If None, keeps existing masses.

        Returns
        -------
        self : Tree
            Returns self for method chaining

        Raises
        ------
        ValueError
            If arrays have incompatible shapes or tree is not built
        """
        if not self._is_built or self._tree is None:
            raise ValueError("Tree must be built before refitting. Call build() first.")

        # Use existing masses if not provided
        if new_masses is None:
            new_masses = self.masses

        # Validate inputs
        new_positions, new_masses, _ = _validate_accelerations_inputs(
            new_positions, new_masses,
            dim=self.dim, softening=0.0, theta=0.5, G=1.0, leaf_size=self.leaf_size
        )

        # Check if refit is recommended vs rebuild
        if not should_refit_vs_rebuild(self._tree, new_positions):
            # Fall back to rebuild for major changes
            return self.rebuild(new_positions, new_masses)

        # Perform efficient refit
        self._tree = refit_tree(self._tree, new_positions, new_masses, self._xp)

        # Update stored arrays
        self.positions = new_positions
        self.masses = new_masses

        return self

    def rebuild(
        self, new_positions: ArrayLike, new_masses: Optional[ArrayLike] = None
    ) -> "Tree":
        """
        Rebuild the tree with new positions and optionally new masses.

        Parameters
        ----------
        new_positions : array_like
            New particle positions, shape (N, dim)
        new_masses : array_like, optional
            New particle masses, shape (N,). If None, keeps existing masses.

        Returns
        -------
        self : Tree
            Returns self for method chaining

        Raises
        ------
        ValueError
            If new arrays have incompatible shapes
        """
        # Use existing masses if not provided
        if new_masses is None:
            new_masses = self.masses

        # Validate new inputs
        new_positions, new_masses, _ = _validate_accelerations_inputs(
            new_positions, new_masses,
            dim=self.dim, softening=0.0, theta=0.5, G=1.0, leaf_size=self.leaf_size
        )

        # Update stored arrays
        self.positions = new_positions
        self.masses = new_masses

        # Mark as needing rebuild
        self._is_built = False
        self._tree = None

        # Rebuild tree
        return self.build()

    def accelerations(
        self,
        targets: Optional[ArrayLike] = None,
        *,
        theta: float = 0.5,
        softening: Union[float, ArrayLike] = 0.0,
        G: float = 1.0,
    ) -> NDArray[np.floating[Any]]:
        """
        Compute gravitational accelerations for target positions.

        Parameters
        ----------
        targets : array_like, optional
            Target positions, shape (M, dim). If None, evaluates accelerations
            for the particles used to build the tree (self-evaluation).
        theta : float, optional
            Opening angle criterion. Default: 0.5
        softening : float or array_like, optional
            Plummer softening length. Default: 0.0
        G : float, optional
            Gravitational constant. Default: 1.0

        Returns
        -------
        accelerations : ndarray
            Gravitational accelerations, shape (M, dim)

        Raises
        ------
        ValueError
            If tree is not built or targets have wrong shape
        """
        if not self._is_built:
            raise ValueError("Tree must be built before evaluating accelerations")

        # Handle self-evaluation case
        if targets is None:
            return self.evaluate_accelerations(softening, theta, G)

        # Check if targets is a Dask array and handle accordingly
        if _is_dask_array(targets):
            # Use Dask implementation for targets
            return self._accelerations_dask_targets(
                targets, theta=theta, softening=softening, G=G
            )

        # NumPy path - validate targets
        targets = np.asarray(targets, dtype=np.float64)
        if len(targets.shape) != 2:
            raise ValueError(f"targets must be 2D array, got shape {targets.shape}")
        if targets.shape[1] != self.dim:
            raise ValueError(
                f"targets has {targets.shape[1]} dimensions but tree has {self.dim}"
            )

        # Validate softening for target array size
        if not np.isscalar(softening):
            softening = np.asarray(softening, dtype=np.float64)
            M = targets.shape[0]
            if softening.shape != (M,):
                raise ValueError(
                    f"softening array shape {softening.shape} does not match "
                    f"targets shape ({M},)"
                )

        # Validate other parameters
        if theta < 0:
            raise ValueError(f"theta must be non-negative, got {theta}")

        # Evaluate accelerations using tree  
        # For targets, we need to compute the acceleration at each target point
        # Use the specialized function for external targets
        from .traverse.bh import barnes_hut_accelerations_targets

        # Compute accelerations at target positions
        acc = barnes_hut_accelerations_targets(
            self._tree, self.positions, self.masses, targets, softening, theta, G
        )

        return acc

    def _accelerations_dask_targets(
        self,
        targets: ArrayLike,
        *,
        theta: float = 0.5,
        softening: Union[float, ArrayLike] = 0.0,
        G: float = 1.0,
    ) -> ArrayLike:
        """Compute accelerations for Dask target arrays."""
        # Import Dask array operations
        try:
            import dask.array as da
        except ImportError as e:
            raise RuntimeError("Dask not available") from e

        # Get the Dask namespace
        from .backends.dask_ import get_dask_namespace
        xp = get_dask_namespace()

        # Define chunk function for map_blocks
        def _compute_targets_chunk_accelerations(targets_chunk, *, tree_data, source_pos, source_masses, 
                                               theta_val, softening_val, G_val):
            """Compute accelerations for a chunk of target positions."""
            from .traverse.bh import barnes_hut_accelerations_targets

            # Compute accelerations for this chunk
            return barnes_hut_accelerations_targets(
                tree_data, source_pos, source_masses, targets_chunk, 
                softening_val, theta_val, G_val
            )

        # Use map_blocks to compute accelerations preserving chunking
        return xp.map_blocks_accelerations(
            _compute_targets_chunk_accelerations,
            targets,  # targets (Dask array)
            self._tree,       # tree data
            self.positions,  # source positions (NumPy)
            self.masses,     # source masses (NumPy)
            theta_val=theta,
            softening_val=softening,
            G_val=G
        )

    def evaluate_accelerations(
        self, 
        softening: Union[float, NDArray[np.floating[Any]]], 
        theta: float = 0.5,
        G: float = 1.0
    ) -> NDArray[np.floating[Any]]:
        """
        Evaluate gravitational accelerations for all particles in the tree.

        Parameters
        ----------
        softening : float or array_like
            Gravitational softening length(s). If scalar, same softening is used
            for all particles. If array, must have shape (N,) for per-particle
            softening.
        theta : float, default 0.5
            Opening angle criterion for Barnes-Hut approximation. Smaller values
            give higher accuracy but slower computation. Typical range: 0.1-1.0.
        G : float, default 1.0
            Gravitational constant

        Returns
        -------
        accelerations : array_like, shape (N, D)
            Gravitational accelerations for each particle

        Raises
        ------
        RuntimeError
            If tree has not been built (call `build()` first)
        ValueError
            If input parameters have invalid shapes or values
        """
        if self._tree is None:
            raise RuntimeError("Tree not built. Call build() first.")

        # Validate inputs
        if not isinstance(softening, (int, float)):
            raise ValueError("Only scalar softening is currently supported")

        if softening < 0:
            raise ValueError("Softening must be non-negative")

        if theta < 0:
            raise ValueError("Opening angle theta must be non-negative")

        # Use the standalone function to compute accelerations
        accelerations_np = evaluate_accelerations(
            self._tree, self.positions, self.masses, theta, softening, G
        )

        # Convert back to original array type  
        xp = get_namespace(self.positions, self.backend)
        return xp.asarray(accelerations_np)

accelerations

accelerations(targets=None, *, theta=0.5, softening=0.0, G=1.0)

Compute gravitational accelerations for target positions.

Parameters:

Name Type Description Default
targets array_like

Target positions, shape (M, dim). If None, evaluates accelerations for the particles used to build the tree (self-evaluation).

None
theta float

Opening angle criterion. Default: 0.5

0.5
softening float or array_like

Plummer softening length. Default: 0.0

0.0
G float

Gravitational constant. Default: 1.0

1.0

Returns:

Name Type Description
accelerations ndarray

Gravitational accelerations, shape (M, dim)

Raises:

Type Description
ValueError

If tree is not built or targets have wrong shape

Source code in bhut/api.py
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
def accelerations(
    self,
    targets: Optional[ArrayLike] = None,
    *,
    theta: float = 0.5,
    softening: Union[float, ArrayLike] = 0.0,
    G: float = 1.0,
) -> NDArray[np.floating[Any]]:
    """
    Compute gravitational accelerations for target positions.

    Parameters
    ----------
    targets : array_like, optional
        Target positions, shape (M, dim). If None, evaluates accelerations
        for the particles used to build the tree (self-evaluation).
    theta : float, optional
        Opening angle criterion. Default: 0.5
    softening : float or array_like, optional
        Plummer softening length. Default: 0.0
    G : float, optional
        Gravitational constant. Default: 1.0

    Returns
    -------
    accelerations : ndarray
        Gravitational accelerations, shape (M, dim)

    Raises
    ------
    ValueError
        If tree is not built or targets have wrong shape
    """
    if not self._is_built:
        raise ValueError("Tree must be built before evaluating accelerations")

    # Handle self-evaluation case
    if targets is None:
        return self.evaluate_accelerations(softening, theta, G)

    # Check if targets is a Dask array and handle accordingly
    if _is_dask_array(targets):
        # Use Dask implementation for targets
        return self._accelerations_dask_targets(
            targets, theta=theta, softening=softening, G=G
        )

    # NumPy path - validate targets
    targets = np.asarray(targets, dtype=np.float64)
    if len(targets.shape) != 2:
        raise ValueError(f"targets must be 2D array, got shape {targets.shape}")
    if targets.shape[1] != self.dim:
        raise ValueError(
            f"targets has {targets.shape[1]} dimensions but tree has {self.dim}"
        )

    # Validate softening for target array size
    if not np.isscalar(softening):
        softening = np.asarray(softening, dtype=np.float64)
        M = targets.shape[0]
        if softening.shape != (M,):
            raise ValueError(
                f"softening array shape {softening.shape} does not match "
                f"targets shape ({M},)"
            )

    # Validate other parameters
    if theta < 0:
        raise ValueError(f"theta must be non-negative, got {theta}")

    # Evaluate accelerations using tree  
    # For targets, we need to compute the acceleration at each target point
    # Use the specialized function for external targets
    from .traverse.bh import barnes_hut_accelerations_targets

    # Compute accelerations at target positions
    acc = barnes_hut_accelerations_targets(
        self._tree, self.positions, self.masses, targets, softening, theta, G
    )

    return acc

build

build()

Build the Barnes-Hut tree structure.

Returns:

Name Type Description
self Tree

Returns self for method chaining

Source code in bhut/api.py
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
def build(self) -> "Tree":
    """
    Build the Barnes-Hut tree structure.

    Returns
    -------
    self : Tree
        Returns self for method chaining
    """
    self._tree = build_tree(
        self.positions, self.masses, 
        leaf_size=self.leaf_size, backend=self.backend, dim=self.dim
    )
    self._is_built = True
    return self

evaluate_accelerations

evaluate_accelerations(softening, theta=0.5, G=1.0)

Evaluate gravitational accelerations for all particles in the tree.

Parameters:

Name Type Description Default
softening float or array_like

Gravitational softening length(s). If scalar, same softening is used for all particles. If array, must have shape (N,) for per-particle softening.

required
theta float

Opening angle criterion for Barnes-Hut approximation. Smaller values give higher accuracy but slower computation. Typical range: 0.1-1.0.

0.5
G float

Gravitational constant

1.0

Returns:

Name Type Description
accelerations (array_like, shape(N, D))

Gravitational accelerations for each particle

Raises:

Type Description
RuntimeError

If tree has not been built (call build() first)

ValueError

If input parameters have invalid shapes or values

Source code in bhut/api.py
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
def evaluate_accelerations(
    self, 
    softening: Union[float, NDArray[np.floating[Any]]], 
    theta: float = 0.5,
    G: float = 1.0
) -> NDArray[np.floating[Any]]:
    """
    Evaluate gravitational accelerations for all particles in the tree.

    Parameters
    ----------
    softening : float or array_like
        Gravitational softening length(s). If scalar, same softening is used
        for all particles. If array, must have shape (N,) for per-particle
        softening.
    theta : float, default 0.5
        Opening angle criterion for Barnes-Hut approximation. Smaller values
        give higher accuracy but slower computation. Typical range: 0.1-1.0.
    G : float, default 1.0
        Gravitational constant

    Returns
    -------
    accelerations : array_like, shape (N, D)
        Gravitational accelerations for each particle

    Raises
    ------
    RuntimeError
        If tree has not been built (call `build()` first)
    ValueError
        If input parameters have invalid shapes or values
    """
    if self._tree is None:
        raise RuntimeError("Tree not built. Call build() first.")

    # Validate inputs
    if not isinstance(softening, (int, float)):
        raise ValueError("Only scalar softening is currently supported")

    if softening < 0:
        raise ValueError("Softening must be non-negative")

    if theta < 0:
        raise ValueError("Opening angle theta must be non-negative")

    # Use the standalone function to compute accelerations
    accelerations_np = evaluate_accelerations(
        self._tree, self.positions, self.masses, theta, softening, G
    )

    # Convert back to original array type  
    xp = get_namespace(self.positions, self.backend)
    return xp.asarray(accelerations_np)

rebuild

rebuild(new_positions, new_masses=None)

Rebuild the tree with new positions and optionally new masses.

Parameters:

Name Type Description Default
new_positions array_like

New particle positions, shape (N, dim)

required
new_masses array_like

New particle masses, shape (N,). If None, keeps existing masses.

None

Returns:

Name Type Description
self Tree

Returns self for method chaining

Raises:

Type Description
ValueError

If new arrays have incompatible shapes

Source code in bhut/api.py
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
def rebuild(
    self, new_positions: ArrayLike, new_masses: Optional[ArrayLike] = None
) -> "Tree":
    """
    Rebuild the tree with new positions and optionally new masses.

    Parameters
    ----------
    new_positions : array_like
        New particle positions, shape (N, dim)
    new_masses : array_like, optional
        New particle masses, shape (N,). If None, keeps existing masses.

    Returns
    -------
    self : Tree
        Returns self for method chaining

    Raises
    ------
    ValueError
        If new arrays have incompatible shapes
    """
    # Use existing masses if not provided
    if new_masses is None:
        new_masses = self.masses

    # Validate new inputs
    new_positions, new_masses, _ = _validate_accelerations_inputs(
        new_positions, new_masses,
        dim=self.dim, softening=0.0, theta=0.5, G=1.0, leaf_size=self.leaf_size
    )

    # Update stored arrays
    self.positions = new_positions
    self.masses = new_masses

    # Mark as needing rebuild
    self._is_built = False
    self._tree = None

    # Rebuild tree
    return self.build()

refit

refit(new_positions, new_masses=None)

Refit the tree with new positions, keeping the same topology.

This is faster than rebuilding when particles move slightly. Uses efficient O(M) algorithm where M is number of tree nodes.

Parameters:

Name Type Description Default
new_positions array_like

New particle positions, shape (N, dim)

required
new_masses array_like

New particle masses, shape (N,). If None, keeps existing masses.

None

Returns:

Name Type Description
self Tree

Returns self for method chaining

Raises:

Type Description
ValueError

If arrays have incompatible shapes or tree is not built

Source code in bhut/api.py
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
def refit(self, new_positions: ArrayLike, new_masses: Optional[ArrayLike] = None) -> "Tree":
    """
    Refit the tree with new positions, keeping the same topology.

    This is faster than rebuilding when particles move slightly.
    Uses efficient O(M) algorithm where M is number of tree nodes.

    Parameters
    ----------
    new_positions : array_like
        New particle positions, shape (N, dim)
    new_masses : array_like, optional
        New particle masses, shape (N,). If None, keeps existing masses.

    Returns
    -------
    self : Tree
        Returns self for method chaining

    Raises
    ------
    ValueError
        If arrays have incompatible shapes or tree is not built
    """
    if not self._is_built or self._tree is None:
        raise ValueError("Tree must be built before refitting. Call build() first.")

    # Use existing masses if not provided
    if new_masses is None:
        new_masses = self.masses

    # Validate inputs
    new_positions, new_masses, _ = _validate_accelerations_inputs(
        new_positions, new_masses,
        dim=self.dim, softening=0.0, theta=0.5, G=1.0, leaf_size=self.leaf_size
    )

    # Check if refit is recommended vs rebuild
    if not should_refit_vs_rebuild(self._tree, new_positions):
        # Fall back to rebuild for major changes
        return self.rebuild(new_positions, new_masses)

    # Perform efficient refit
    self._tree = refit_tree(self._tree, new_positions, new_masses, self._xp)

    # Update stored arrays
    self.positions = new_positions
    self.masses = new_masses

    return self

accelerations

accelerations(positions, masses, *, theta=0.5, softening=0.0, G=1.0, dim=3, backend='auto', leaf_size=32, criterion='bh', multipole='mono')

Compute gravitational accelerations using the Barnes-Hut algorithm.

Parameters:

Name Type Description Default
positions array_like

Particle positions, shape (N, dim)

required
masses array_like

Particle masses, shape (N,)

required
theta float

Opening angle criterion. 0.0 gives direct sum, larger values are more approximate. Default: 0.5

0.5
softening float or array_like

Plummer softening length to avoid singularities. Can be scalar or array of shape (N,). Default: 0.0

0.0
G float

Gravitational constant. Default: 1.0

1.0
dim int

Spatial dimensions (2 or 3). Default: 3

3
backend str

Array backend ("numpy", "dask", or "auto"). Default: "auto"

'auto'
leaf_size int

Maximum particles per leaf node. Default: 32

32
criterion str

Tree opening criterion ("bh" for Barnes-Hut). Default: "bh"

'bh'
multipole str

Multipole expansion order ("mono" for monopole). Default: "mono"

'mono'

Returns:

Name Type Description
accelerations ndarray

Gravitational accelerations, shape (N, dim)

Raises:

Type Description
ValueError

If input shapes are incompatible or parameters are invalid

Source code in bhut/api.py
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
def accelerations(
    positions: ArrayLike,
    masses: ArrayLike,
    *,
    theta: float = 0.5,
    softening: Union[float, ArrayLike] = 0.0,
    G: float = 1.0,
    dim: int = 3,
    backend: str = "auto",
    leaf_size: int = 32,
    criterion: str = "bh",
    multipole: str = "mono",
) -> NDArray[np.floating[Any]]:
    """
    Compute gravitational accelerations using the Barnes-Hut algorithm.

    Parameters
    ----------
    positions : array_like
        Particle positions, shape (N, dim)
    masses : array_like
        Particle masses, shape (N,)
    theta : float, optional
        Opening angle criterion. 0.0 gives direct sum, larger values are more approximate.
        Default: 0.5
    softening : float or array_like, optional
        Plummer softening length to avoid singularities. Can be scalar or array of shape (N,).
        Default: 0.0
    G : float, optional
        Gravitational constant. Default: 1.0
    dim : int, optional
        Spatial dimensions (2 or 3). Default: 3
    backend : str, optional
        Array backend ("numpy", "dask", or "auto"). Default: "auto"
    leaf_size : int, optional
        Maximum particles per leaf node. Default: 32
    criterion : str, optional
        Tree opening criterion ("bh" for Barnes-Hut). Default: "bh"
    multipole : str, optional
        Multipole expansion order ("mono" for monopole). Default: "mono"

    Returns
    -------
    accelerations : ndarray
        Gravitational accelerations, shape (N, dim)

    Raises
    ------
    ValueError
        If input shapes are incompatible or parameters are invalid
    """
    # Validate criterion and multipole parameters
    if criterion != "bh":
        raise ValueError(f"criterion must be 'bh', got '{criterion}'")
    if multipole != "mono":
        raise ValueError(f"multipole must be 'mono', got '{multipole}'")

    # Get array namespace for backend
    xp = get_namespace(positions, backend)

    # Check if we're using Dask backend for special handling
    is_dask = backend == "dask" or (backend == "auto" and _is_dask_array(positions))

    if is_dask:
        # For Dask arrays, do basic validation without converting to NumPy
        # Validate dimensions parameter
        _validate_dimensions(dim)

        # Validate other parameters
        if theta < 0:
            raise ValueError(f"theta must be non-negative, got {theta}")
        if isinstance(softening, (int, float)) and softening < 0:
            raise ValueError(f"softening must be non-negative, got {softening}")
        if leaf_size <= 0:
            raise ValueError(f"leaf_size must be positive, got {leaf_size}")

        # Basic shape validation for Dask arrays
        if len(positions.shape) != 2:
            raise ValueError(f"positions must be 2D array, got shape {positions.shape}")
        if len(masses.shape) != 1:
            raise ValueError(f"masses must be 1D array, got shape {masses.shape}")

        N, actual_dim = positions.shape
        if actual_dim != dim:
            raise ValueError(f"positions has {actual_dim} dimensions but dim={dim}")

        if masses.shape[0] != N:
            raise ValueError(f"positions has {N} particles but masses has {masses.shape[0]} elements")

        # Skip numpy conversion and use specialized computation path
        return _accelerations_dask(
            positions, masses, theta=theta, softening=softening, G=G,
            dim=dim, leaf_size=leaf_size, xp=xp
        )
    else:
        # Standard computation path for NumPy arrays
        # Validate and convert inputs
        positions, masses, softening = _validate_accelerations_inputs(
            positions, masses, 
            dim=dim, softening=softening, theta=theta, G=G, leaf_size=leaf_size
        )

        # Build tree structure 
        tree = build_tree(positions, masses, leaf_size=leaf_size, backend=backend, dim=dim)

        # Evaluate accelerations
        acc = evaluate_accelerations(
            tree, positions, masses, theta=theta, softening=softening, G=G
        )

        return acc