Skip to content

Reference for ultralytics/nn/backends/rknn.py

Improvements

This page is sourced from https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/backends/rknn.py. Have an improvement or example to add? Open a Pull Request — thank you! 🙏


class ultralytics.nn.backends.rknn.RKNNBackend

RKNNBackend()

Bases: BaseBackend

Rockchip RKNN inference backend for Rockchip NPU hardware.

Loads and runs inference with RKNN models (.rknn files) using the RKNN-Toolkit-Lite2 runtime. Only supported on Rockchip devices with NPU hardware (e.g., RK3588, RK3566).

Methods

NameDescription
forwardRun inference on the Rockchip NPU.
load_modelLoad a Rockchip RKNN model from a .rknn file or model directory.
Source code in ultralytics/nn/backends/rknn.pyView on GitHub
class RKNNBackend(BaseBackend):


method ultralytics.nn.backends.rknn.RKNNBackend.forward

def forward(self, im: torch.Tensor) -> list

Run inference on the Rockchip NPU.

Args

NameTypeDescriptionDefault
imtorch.TensorInput image tensor in BCHW format, normalized to [0, 1].required

Returns

TypeDescription
listModel predictions as a list of output arrays.
Source code in ultralytics/nn/backends/rknn.pyView on GitHub
def forward(self, im: torch.Tensor) -> list:
    """Run inference on the Rockchip NPU.

    Args:
        im (torch.Tensor): Input image tensor in BCHW format, normalized to [0, 1].

    Returns:
        (list): Model predictions as a list of output arrays.
    """
    im = (im.cpu().numpy() * 255).astype("uint8")
    im = im if isinstance(im, (list, tuple)) else [im]
    return self.model.inference(inputs=im)


method ultralytics.nn.backends.rknn.RKNNBackend.load_model

def load_model(self, weight: str | Path) -> None

Load a Rockchip RKNN model from a .rknn file or model directory.

Args

NameTypeDescriptionDefault
weightstr | PathPath to the .rknn file or directory containing the model.required

Raises

TypeDescription
OSErrorIf not running on a Rockchip device.
RuntimeErrorIf model loading or runtime initialization fails.
Source code in ultralytics/nn/backends/rknn.pyView on GitHub
def load_model(self, weight: str | Path) -> None:
    """Load a Rockchip RKNN model from a .rknn file or model directory.

    Args:
        weight (str | Path): Path to the .rknn file or directory containing the model.

    Raises:
        OSError: If not running on a Rockchip device.
        RuntimeError: If model loading or runtime initialization fails.
    """
    if not is_rockchip():
        raise OSError("RKNN inference is only supported on Rockchip devices.")

    LOGGER.info(f"Loading {weight} for RKNN inference...")
    check_requirements("rknn-toolkit-lite2")
    from rknnlite.api import RKNNLite

    w = Path(weight)
    if not w.is_file():
        w = next(w.rglob("*.rknn"))

    self.model = RKNNLite()
    ret = self.model.load_rknn(str(w))
    if ret != 0:
        raise RuntimeError(f"Failed to load RKNN model: {ret}")

    ret = self.model.init_runtime()
    if ret != 0:
        raise RuntimeError(f"Failed to init RKNN runtime: {ret}")

    # Load metadata
    metadata_file = w.parent / "metadata.yaml"
    if metadata_file.exists():
        from ultralytics.utils import YAML

        self.apply_metadata(YAML.load(metadata_file))





📅 Created 0 days ago ✏️ Updated 0 days ago
Laughing-q