+
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion benches/batchnorm2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ fn main() {
loop {
let img: Tensor<InputShape, Dtype, _> = dev.sample_normal();

let start = Instant::now();
let _ = m.forward(img.clone());
dev.synchronize();
let infer_dur = start.elapsed();

let start = Instant::now();
let out = m.forward_mut(img.traced(grads));
let loss = out.square().mean();
Expand All @@ -36,6 +41,6 @@ fn main() {
grads = loss.backward();
dev.synchronize();
let bwd_dur = start.elapsed();
println!("fwd={:?} bwd={:?}", fwd_dur, bwd_dur);
println!("infer={infer_dur:?}, fwd={fwd_dur:?} bwd={bwd_dur:?}");
}
}
8 changes: 7 additions & 1 deletion benches/conv2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ fn main() {
loop {
let img: Tensor<InputShape, Dtype, _> = dev.sample_normal();

let start = Instant::now();
let _ = m.forward(img.clone());
dev.synchronize();
let infer_dur = start.elapsed();

let start = Instant::now();
let out = m.forward_mut(img.leaky_traced());
let loss = out.square().mean();
Expand All @@ -38,7 +43,8 @@ fn main() {
let _ = loss.backward();
dev.synchronize();
let bwd_dur = start.elapsed();
println!("fwd={:?} bwd={:?}", fwd_dur, bwd_dur);

println!("infer={infer_dur:?}, fwd={fwd_dur:?} bwd={bwd_dur:?}");
}
}

Expand Down
13 changes: 10 additions & 3 deletions benches/softmax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,24 @@ fn main() {

loop {
let img: Tensor<InputShape, Dtype, _> = dev.sample_normal();
let grads = Gradients::leaky();

let start = Instant::now();
let y = img.traced(grads).softmax::<Ax>();
let _ = img.softmax::<Ax>();
dev.synchronize();
let infer_dur = start.elapsed();

let img: Tensor<InputShape, Dtype, _> = dev.sample_normal();

let start = Instant::now();
let y = img.leaky_traced().softmax::<Ax>();
dev.synchronize();
let fwd_dur = start.elapsed();

let start = Instant::now();
let _ = y.sum().backward();
dev.synchronize();
let bwd_dur = start.elapsed();
println!("fwd={:?} bwd={:?}", fwd_dur, bwd_dur);

println!("infer={infer_dur:?}, fwd={fwd_dur:?} bwd={bwd_dur:?}");
}
}
8 changes: 7 additions & 1 deletion benches/sum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@ fn main() {
let img: Tensor<InputShape, Dtype, _> = dev.sample_normal();
let grads = Gradients::leaky();

let start = Instant::now();
let _ = img.clone().sum::<(), _>();
dev.synchronize();
let infer_dur = start.elapsed();

let start = Instant::now();
let y = img.traced(grads).sum();
dev.synchronize();
Expand All @@ -33,6 +38,7 @@ fn main() {
let _ = y.backward();
dev.synchronize();
let bwd_dur = start.elapsed();
println!("fwd={:?} bwd={:?}", fwd_dur, bwd_dur);

println!("infer={infer_dur:?}, fwd={fwd_dur:?} bwd={bwd_dur:?}");
}
}
7 changes: 4 additions & 3 deletions src/nn/batchnorm2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,13 @@ where
let shape = *x.shape();

// statistics for normalizing
let std = (var.clone().try_add(epsilon)?).try_sqrt()?;
let std = var.clone().try_add(epsilon)?.try_sqrt()?;

let scale = scale.clone().try_div(std)?.try_broadcast_like(&shape)?;

// normalize & affine
let x = x.try_sub(mean.clone().try_broadcast_like(&shape)?)?;
let x = x.try_div(std.try_broadcast_like(&shape)?)?;
let x = x.try_mul(scale.clone().try_broadcast_like(&shape)?)?;
let x = x.try_mul(scale)?;
x.try_add(bias.clone().try_broadcast_like(&shape)?)
}

Expand Down
4 changes: 2 additions & 2 deletions src/tensor_ops/utilities/ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ pub(crate) fn try_unary_op<
let (inp, mut tape) = inp.split_tape();
let inp_ghost = inp.ghost();
let dev = inp.device.clone();
if D::BACKWARD_WITHOUT_DATA {
if !T::OWNS_TAPE || D::BACKWARD_WITHOUT_DATA {
let out = inp_ghost.dev.forward(op.clone(), Err(inp))?;
let out_ghost = out.ghost();
tape.add_backward_op(move |grads| {
Expand Down Expand Up @@ -105,7 +105,7 @@ pub(crate) fn try_binary_op<
let lhs_ghost = lhs.ghost();
let rhs_ghost = rhs.ghost();
let mut tape = ltape.merge(rtape);
if D::BACKWARD_WITHOUT_DATA {
if !LhsTape::OWNS_TAPE || D::BACKWARD_WITHOUT_DATA {
let out = lhs_ghost.dev.forward(op, Err(lhs), Err(rhs))?;
let out_ghost = out.ghost();
tape.add_backward_op(move |grads| {
Expand Down
点击 这是indexloc提供的php浏览器服务,不要输入任何密码和下载