edge_impulse_runner/inference/
model.rs

1use crate::backends::{BackendConfig, InferenceBackend, create_backend};
2use crate::error::EdgeImpulseError;
3use crate::inference::messages::InferenceResponse;
4use crate::types::{ModelParameters, SensorType, VisualAnomalyResult};
5use std::path::Path;
6
7/// Main Edge Impulse model interface that abstracts over different backends
8///
9/// This struct provides a unified interface for running inference on Edge Impulse models,
10/// regardless of whether you're using EIM binary communication or FFI direct calls.
11/// The backend is automatically selected based on the constructor used.
12///
13/// ## Examples
14///
15/// ```no_run
16/// use edge_impulse_runner::EdgeImpulseModel;
17///
18/// // EIM mode (default)
19/// let mut model = EdgeImpulseModel::new("model.eim")?;
20///
21/// // FFI mode
22/// let mut model = EdgeImpulseModel::new_ffi(false)?;
23///
24/// // Run inference
25/// let result = model.infer(vec![0.1, 0.2, 0.3], None)?;
26/// # Ok::<(), Box<dyn std::error::Error>>(())
27/// ```
28pub struct EdgeImpulseModel {
29    backend: Box<dyn InferenceBackend>,
30}
31
32impl EdgeImpulseModel {
33    /// Create a new model instance using EIM backend
34    pub fn new<P: AsRef<Path>>(model_path: P) -> Result<Self, EdgeImpulseError> {
35        let config = BackendConfig::Eim {
36            path: model_path.as_ref().to_path_buf(),
37            socket_path: None,
38        };
39        let backend = create_backend(config)?;
40        Ok(Self { backend })
41    }
42
43    /// Create a new model instance using EIM backend with custom socket path
44    pub fn new_with_socket<P: AsRef<Path>>(
45        model_path: P,
46        socket_path: P,
47    ) -> Result<Self, EdgeImpulseError> {
48        let config = BackendConfig::Eim {
49            path: model_path.as_ref().to_path_buf(),
50            socket_path: Some(socket_path.as_ref().to_path_buf()),
51        };
52        let backend = create_backend(config)?;
53        Ok(Self { backend })
54    }
55
56    /// Create a new model instance using EIM backend with debug output
57    pub fn new_with_debug<P: AsRef<Path>>(
58        model_path: P,
59        debug: bool,
60    ) -> Result<Self, EdgeImpulseError> {
61        let config = BackendConfig::Eim {
62            path: model_path.as_ref().to_path_buf(),
63            socket_path: None,
64        };
65        let mut backend = create_backend(config)?;
66        if debug {
67            backend.set_debug_callback(Box::new(|msg| println!("[DEBUG] {msg}")));
68        }
69        Ok(Self { backend })
70    }
71
72    /// Create a new model instance using FFI backend
73    pub fn new_ffi(debug: bool) -> Result<Self, EdgeImpulseError> {
74        let config = BackendConfig::Ffi { debug };
75        let mut backend = create_backend(config)?;
76        if debug {
77            backend.set_debug_callback(Box::new(|msg| println!("[DEBUG] {msg}")));
78        }
79        Ok(Self { backend })
80    }
81
82    /// Run inference on the provided features
83    pub fn infer(
84        &mut self,
85        features: Vec<f32>,
86        debug: Option<bool>,
87    ) -> Result<InferenceResponse, EdgeImpulseError> {
88        self.backend.infer(features, debug)
89    }
90
91    /// Get model parameters
92    pub fn parameters(&self) -> Result<&ModelParameters, EdgeImpulseError> {
93        self.backend.parameters()
94    }
95
96    /// Get sensor type
97    pub fn sensor_type(&self) -> Result<SensorType, EdgeImpulseError> {
98        self.backend.sensor_type()
99    }
100
101    /// Get input size
102    pub fn input_size(&self) -> Result<usize, EdgeImpulseError> {
103        self.backend.input_size()
104    }
105
106    /// Normalize visual anomaly results
107    pub fn normalize_visual_anomaly(
108        &self,
109        anomaly: f32,
110        max: f32,
111        mean: f32,
112        regions: &[(f32, u32, u32, u32, u32)],
113    ) -> VisualAnomalyResult {
114        self.backend
115            .normalize_visual_anomaly(anomaly, max, mean, regions)
116    }
117}
118
119impl std::fmt::Debug for EdgeImpulseModel {
120    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
121        f.debug_struct("EdgeImpulseModel")
122            .field("backend", &"<backend>")
123            .finish()
124    }
125}