burn depth pro model inference
| input | metric depth |
|---|---|
use burn::prelude::*;
use burn_depth::{InferenceBackend, model::depth_pro::DepthPro};
let device = <InferenceBackend as Backend>::Device::default();
let model = DepthPro::<InferenceBackend>::load(&device, "assets/model/depth_pro.mpk")?;
// Image tensor with shape [1, 3, H, W] (batch, channels, height, width)
let input: Tensor<InferenceBackend, 4> = Tensor::zeros([1, 3, 512, 512], &device);
let result = model.infer(input, None);
// result.depth: Tensor<InferenceBackend, 3> with shape [1, H, W]
// result.focallength_px: Tensor<InferenceBackend, 1> with shape [1]cargo run --example inference -- \
--model depth-pro \
--checkpoint assets/model/depth_pro.mpk \
--image assets/image/test.jpg
cargo run --example inference -- \
--model depth-anything3 \
--checkpoint assets/model/da3_metric_large.mpk \
--image assets/image/test.jpg-
download
depth_pro.pttoassets/model/ -
cargo run --bin import_depth_pro --features import -
download
da3_metric_large.safetensorstoassets/model/ -
cargo run --bin import_da3 --features import -
cargo run --example inference -- --help