Skip to content

Commit

Permalink
feat: Add test case for segfault
Browse files Browse the repository at this point in the history
  • Loading branch information
uttarayan21 committed Sep 26, 2024
1 parent 9406fd2 commit 8564b09
Show file tree
Hide file tree
Showing 11 changed files with 108 additions and 46 deletions.
2 changes: 1 addition & 1 deletion .gitmodules
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[submodule "mnn-sys/vendor"]
path = mnn-sys/vendor
url = git@github.com:alibaba/mnn
url = https://github.com/alibaba/mnn
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ mnn-threadpool = ["mnn-sys/mnn-threadpool"]
tracing = ["dep:tracing"]
profile = ["tracing"]

default = ["mnn-threadpool"]
default = ["mnn-threadpool", "opencl"]


[dev-dependencies]
Expand Down
15 changes: 8 additions & 7 deletions examples/inspect.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,14 @@ pub fn main() -> anyhow::Result<()> {
println!("{}: {:?}", x.name(), tensor.shape());
tensor.fill(1.0f32);
});
time!(interpreter.run_session_with_callback(&session, |_, name| {
println!("Before Callback: {:?}", name);
true
},|_ , name| {
println!("After Callback: {:?}", name);
true
} , true)?;"run session");
// time!(interpreter.run_session_with_callback(&session, |_, name| {
// println!("Before Callback: {:?}", name);
// true
// },|_ , name| {
// println!("After Callback: {:?}", name);
// true
// } , true)?;"run session");
interpreter.run_session(&session)?;
let outputs = interpreter.outputs(&session);
outputs.iter().for_each(|x| {
let tensor = x.tensor::<f32>().expect("No tensor");
Expand Down
7 changes: 4 additions & 3 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
flake = false;
};
mnn-src = {
url = "github:alibaba/MNN";
url = "github:alibaba/MNN/2.9.5";
flake = false;
};
};
Expand Down
1 change: 1 addition & 0 deletions mnn-sys/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ fn ensure_vendor_exists(vendor: impl AsRef<Path>) -> Result<()> {

fn main() -> Result<()> {
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-env-changed=MNN_SRC");
let out_dir = PathBuf::from(std::env::var("OUT_DIR")?);
let source = PathBuf::from(
std::env::var("MNN_SRC")
Expand Down
16 changes: 9 additions & 7 deletions src/interpreter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,12 +381,12 @@ impl Interpreter {
Ok(())
}

/// Wait for all output tensors to be ready after computation
pub fn wait(&self, session: &crate::session::Session) {
self.outputs(session).iter().for_each(|tinfo| {
tinfo.raw_tensor().wait_read(true);
});
}
// /// Wait for all output tensors to be ready after computation
// pub fn wait(&self, session: &crate::session::Session) {
// self.outputs(session).iter().for_each(|tinfo| {
// tinfo.raw_tensor().wait_read(true);
// });
// }
}

#[repr(transparent)]
Expand All @@ -397,9 +397,11 @@ pub struct TensorInfo<'t, 'tl> {

impl core::fmt::Debug for TensorInfo<'_, '_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
// let tensor = self.raw_tensor();
// let shape = tensor.shape().clone();
f.debug_struct("TensorInfo")
.field("name", &self.name())
.field("tensor", &self.raw_tensor().shape())
// .field("tensor", &shape)
.finish()
}
}
Expand Down
11 changes: 3 additions & 8 deletions src/tensor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,7 @@ where
}

pub fn get_dimension_type(&self) -> DimensionType {
debug_assert!(!self.tensor.is_null());
From::from(unsafe { Tensor_getDimensionType(self.tensor) })
}

Expand Down Expand Up @@ -720,15 +721,9 @@ impl<'r> RawTensor<'r> {
self.shape().as_ref().contains(&-1)
}

pub fn wait_read(&self, finish: bool) {
unsafe {
Tensor_wait(self.inner, MapType::MAP_TENSOR_READ, finish as i32);
}
}

pub fn wait_write(&self, finish: bool) {
pub fn wait(&self, map_type: MapType, finish: bool) {
unsafe {
Tensor_wait(self.inner, MapType::MAP_TENSOR_WRITE, finish as i32);
Tensor_wait(self.inner, map_type, finish as i32);
}
}

Expand Down
51 changes: 44 additions & 7 deletions tests/basic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,46 @@ fn test_basic_metal() {
}
#[cfg(feature = "opencl")]
#[test]
fn test_basic_opencl() {
test_basic(ForwardType::OpenCL).unwrap();
fn test_basic_opencl() -> Result<(), Box<dyn std::error::Error>> {
let backend = ForwardType::OpenCL;
let realesr = std::path::Path::new("tests/assets/realesr.mnn");
use mnn::BackendConfig;

let mut net = mnn::Interpreter::from_file(realesr)?;
net.set_cache_file(realesr.with_extension("cache"), 128)?;
let mut config = ScheduleConfig::new();
config.set_type(backend);
let mut session = net.create_session(config)?;
net.update_cache_file(&mut session);

net.inputs(&session).iter().for_each(|x| {
let mut tensor = x.tensor::<f32>().expect("No tensor");
println!("{}: {:?}", x.name(), tensor.shape());
tensor.fill(1.0f32);
});
net.run_session(&session)?;
let outputs = net.outputs(&session);
outputs.iter().for_each(|x| {
let tensor = x.tensor::<f32>().expect("No tensor");
tensor.wait(ffi::MapType::MAP_TENSOR_READ, true);
println!("Waiting for tensor: {}", x.name());
println!("{}: {:?}", x.name(), tensor.shape());
// let _ = tensor.create_host_tensor_from_device(true);
});

// drop(outputs);
// drop(session);
// drop(net);
Ok(())
}
// net.run_session(&session)?;
// let outputs = net.outputs(&session);
// for output in outputs.iter() {
// println!("output: {:?}", output);
// let tensor = output.tensor::<f32>()?;
// let shape = tensor.shape();
// assert_eq!(shape.as_ref(), [1, 3, 2048, 2048]);
// }
#[cfg(feature = "coreml")]
#[test]
fn test_basic_coreml() {
Expand All @@ -34,8 +71,8 @@ fn test_multi_path_cpu_cpu() {
test_multipath_session(ForwardType::CPU, ForwardType::CPU).unwrap();
}

#[cfg(feature = "opencl")]
#[test]
fn test_multi_path_opencl_cpu() {
test_multipath_session(ForwardType::OpenCL, ForwardType::CPU).unwrap();
}
// #[cfg(feature = "opencl")]
// #[test]
// fn test_multi_path_opencl_cpu() {
// test_multipath_session(ForwardType::OpenCL, ForwardType::CPU).unwrap();
// }
17 changes: 6 additions & 11 deletions tests/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,20 +28,15 @@ impl AsRef<[u8]> for Model {
pub fn test_basic(backend: ForwardType) -> Result<()> {
use mnn::BackendConfig;

let mut net = mnn::Interpreter::from_bytes(Model::new())?;
let mut net = mnn::Interpreter::from_file("tests/assets/realesr.mnn")?;
let mut config = ScheduleConfig::new();
config.set_type(backend);
config.set_backup_type(backend);
let mut bc = BackendConfig::new();
bc.set_memory_mode(mnn::MemoryMode::High);
bc.set_precision_mode(mnn::PrecisionMode::High);
bc.set_power_mode(mnn::PowerMode::High);
config.set_backend_config(bc);
let session = net.create_session(config)?;
for input in &net.inputs(&session) {
println!("input: {:?}", input);
input.tensor::<f32>()?.fill(0.0);
}
net.inputs(&session).iter().for_each(|x| {
let mut tensor = x.tensor::<f32>().expect("No tensor");
println!("{}: {:?}", x.name(), tensor.shape());
tensor.fill(1.0f32);
});
net.run_session(&session)?;
let outputs = net.outputs(&session);
for output in outputs.iter() {
Expand Down
30 changes: 30 additions & 0 deletions tests/segfault.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
use mnn::*;

/// This segfault on OpenCL backend if we print the tensorinfo
#[cfg(feature = "opencl")]
#[test]
fn test_segfault_case_1_() -> Result<(), Box<dyn std::error::Error>> {
let backend = ForwardType::OpenCL;
let realesr = std::path::Path::new("tests/assets/realesr.mnn");
use mnn::BackendConfig;

let mut net = mnn::Interpreter::from_file(realesr)?;
net.set_cache_file(realesr.with_extension("cache"), 128)?;
let mut config = ScheduleConfig::new();
config.set_type(backend);
let mut session = net.create_session(config)?;
net.update_cache_file(&mut session);

net.inputs(&session).iter().for_each(|x| {
let mut tensor = x.tensor::<f32>().expect("No tensor");
// println!("{}: {:?}", x.name(), tensor.shape());
println!("{:?}", x);
tensor.fill(1.0f32);
});
net.run_session(&session)?;
let outputs = net.outputs(&session);
drop(outputs);
drop(session);
drop(net);
Ok(())
}

0 comments on commit 8564b09

Please sign in to comment.