burn depth pro model inference
| input | metric depth |
|---|---|
use burn::prelude::*;
use burn_depth::{InferenceBackend, model::depth_pro::DepthPro};
// NdArray backend (alternatively: burn::backend::Cuda, burn::backend::Cpu)
let device = <InferenceBackend as Backend>::Device::default();
let model = DepthPro::<InferenceBackend>::load(&device, "assets/model/depth_pro.mpk")?;
// Image tensor with shape [1, 3, H, W] (batch, channels, height, width)
let input: Tensor<InferenceBackend, 4> = Tensor::zeros([1, 3, 512, 512], &device);
let result = model.infer(input, None);
// result.depth: Tensor<InferenceBackend, 3> with shape [1, H, W]
// result.focallength_px: Tensor<InferenceBackend, 1> with shape [1]- download
depth_pro.pttoassets/model/ cargo run --bin import --features importcargo run --example inference