edge_impulse_runner/
types.rs

1//! Common types and parameters used throughout the Edge Impulse Runner.
2//!
3//! This module contains the core data structures that define model configuration,
4//! project information, and performance metrics. These types are used to configure
5//! the model and interpret its outputs across both EIM and FFI modes.
6//!
7//! ## Key Types
8//!
9//! - **ModelParameters**: Model configuration and capabilities
10//! - **ProjectInfo**: Edge Impulse project metadata
11//! - **TimingInfo**: Performance timing information
12//! - **BoundingBox**: Object detection results
13//! - **SensorType**: Supported sensor types
14
15use serde::Deserialize;
16use serde::Serialize;
17
18/// Enum representing different types of anomaly detection supported by the model
19#[derive(Debug, Clone, Copy, PartialEq, Eq)]
20pub enum RunnerHelloHasAnomaly {
21    None = 0,
22    KMeans = 1,
23    GMM = 2,
24    VisualGMM = 3,
25}
26
27impl From<u32> for RunnerHelloHasAnomaly {
28    fn from(value: u32) -> Self {
29        match value {
30            0 => Self::None,
31            1 => Self::KMeans,
32            2 => Self::GMM,
33            3 => Self::VisualGMM,
34            _ => Self::None,
35        }
36    }
37}
38
39/// Parameters that define a model's configuration and capabilities.
40///
41/// These parameters are received from the model during initialization and describe
42/// the model's input requirements, processing settings, and output characteristics.
43#[derive(Debug, Deserialize, Clone)]
44pub struct ModelParameters {
45    /// Number of axes for motion/positional data (e.g., 3 for xyz accelerometer)
46    pub axis_count: u32,
47    /// Sampling frequency in Hz for time-series data
48    pub frequency: f32,
49    /// Indicates if the model supports anomaly detection
50    #[serde(deserialize_with = "deserialize_anomaly_type")]
51    pub has_anomaly: RunnerHelloHasAnomaly,
52    /// Indicates if the model supports object tracking (0 = no, 1 = yes)
53    #[serde(default)]
54    pub has_object_tracking: bool,
55    /// Number of color channels in input images (1 = grayscale, 3 = RGB)
56    pub image_channel_count: u32,
57    /// Number of consecutive frames required for video input
58    pub image_input_frames: u32,
59    /// Required height of input images in pixels
60    pub image_input_height: u32,
61    /// Required width of input images in pixels
62    pub image_input_width: u32,
63    /// Method used to resize input images ("fit" or "fill")
64    pub image_resize_mode: String,
65    /// Type of inferencing engine (0 = TensorFlow Lite, 1 = TensorFlow.js)
66    pub inferencing_engine: u32,
67    /// Total number of input features expected by the model
68    pub input_features_count: u32,
69    /// Time interval between samples in milliseconds
70    pub interval_ms: f32,
71    /// Number of classification labels
72    pub label_count: u32,
73    /// Vector of classification labels
74    pub labels: Vec<String>,
75    /// Type of model ("classification", "object-detection", etc.)
76    pub model_type: String,
77    /// Type of input sensor (see SensorType enum)
78    pub sensor: i32,
79    /// Size of the processing window for time-series data
80    pub slice_size: u32,
81    /// Vector of thresholds for different types of detections
82    #[serde(default)]
83    pub thresholds: Vec<ModelThreshold>,
84    /// Whether the model supports continuous mode operation
85    pub use_continuous_mode: bool,
86}
87
88impl Default for ModelParameters {
89    fn default() -> Self {
90        Self {
91            axis_count: 0,
92            frequency: 0.0,
93            has_anomaly: RunnerHelloHasAnomaly::None,
94            has_object_tracking: false,
95            image_channel_count: 0,
96            image_input_frames: 1,
97            image_input_height: 0,
98            image_input_width: 0,
99            image_resize_mode: String::from("fit"),
100            inferencing_engine: 0,
101            input_features_count: 0,
102            interval_ms: 0.0,
103            label_count: 0,
104            labels: Vec::new(),
105            model_type: String::from("classification"),
106            sensor: -1,
107            slice_size: 0,
108            thresholds: Vec::new(),
109            use_continuous_mode: false,
110        }
111    }
112}
113
114fn deserialize_anomaly_type<'de, D>(deserializer: D) -> Result<RunnerHelloHasAnomaly, D::Error>
115where
116    D: serde::Deserializer<'de>,
117{
118    let value = u32::deserialize(deserializer)?;
119    Ok(RunnerHelloHasAnomaly::from(value))
120}
121
122#[derive(Debug, Deserialize, Clone)]
123#[serde(tag = "type")]
124pub enum ModelThreshold {
125    #[serde(rename = "object_detection")]
126    ObjectDetection { id: u32, min_score: f32 },
127    #[serde(rename = "anomaly_gmm")]
128    AnomalyGMM { id: u32, min_anomaly_score: f32 },
129    #[serde(rename = "object_tracking")]
130    ObjectTracking {
131        id: u32,
132        keep_grace: u32,
133        max_observations: u32,
134        threshold: f32,
135    },
136    #[serde(rename = "unknown")]
137    Unknown { id: u32, unknown: f32 },
138}
139
140impl Default for ModelThreshold {
141    fn default() -> Self {
142        Self::ObjectDetection {
143            id: 0,
144            min_score: 0.5,
145        }
146    }
147}
148
149/// Information about the Edge Impulse project that created the model.
150///
151/// Contains metadata about the project's origin and version.
152#[derive(Deserialize, Debug, Clone)]
153pub struct ProjectInfo {
154    /// Version number of the deployment
155    pub deploy_version: u32,
156    /// Unique project identifier
157    pub id: u32,
158    /// Name of the project
159    pub name: String,
160    /// Username of the project owner
161    pub owner: String,
162}
163
164/// Performance timing information for different processing stages.
165///
166/// Provides detailed timing breakdowns for each step of the inference pipeline,
167/// useful for performance monitoring and optimization.
168#[derive(Deserialize, Debug)]
169pub struct TimingInfo {
170    /// Time spent on digital signal processing (DSP) in microseconds
171    pub dsp: u32,
172    /// Time spent on classification inference in microseconds
173    pub classification: u32,
174    /// Time spent on anomaly detection in microseconds
175    pub anomaly: u32,
176    /// Time spent on JSON serialization/deserialization in microseconds
177    pub json: u32,
178    /// Time spent reading from standard input in microseconds
179    pub stdin: u32,
180}
181
182/// Represents a detected object's location and classification.
183///
184/// Used in object detection models to specify where objects were found
185/// in an image and their classification details.
186#[derive(Debug, Deserialize, Serialize)]
187pub struct BoundingBox {
188    /// Height of the bounding box in pixels
189    pub height: i32,
190    /// Classification label for the detected object
191    pub label: String,
192    /// Confidence score for the detection (0.0 to 1.0)
193    pub value: f32,
194    /// Width of the bounding box in pixels
195    pub width: i32,
196    /// X-coordinate of the top-left corner
197    pub x: i32,
198    /// Y-coordinate of the top-left corner
199    pub y: i32,
200}
201
202/// Represents the normalized results of visual anomaly detection
203pub type VisualAnomalyResult = (f32, f32, f32, Vec<(f32, u32, u32, u32, u32)>);
204
205/// Represents the type of sensor used for data collection.
206///
207/// This enum defines the supported sensor types for Edge Impulse models,
208/// mapping to the numeric values used in the protocol:
209/// - -1 or unknown: Unknown
210/// - 1: Microphone
211/// - 2: Accelerometer
212/// - 3: Camera
213/// - 4: Positional
214#[derive(Debug, Clone, Copy, PartialEq)]
215pub enum SensorType {
216    /// Unknown or unsupported sensor type (-1 or default)
217    Unknown = -1,
218    /// Microphone sensor for audio input (1)
219    Microphone = 1,
220    /// Accelerometer sensor for motion data (2)
221    Accelerometer = 2,
222    /// Camera sensor for image/video input (3)
223    Camera = 3,
224    /// Positional sensor for location/orientation data (4)
225    Positional = 4,
226}
227
228impl From<i32> for SensorType {
229    fn from(value: i32) -> Self {
230        match value {
231            -1 => SensorType::Unknown,
232            1 => SensorType::Microphone,
233            2 => SensorType::Accelerometer,
234            3 => SensorType::Camera,
235            4 => SensorType::Positional,
236            _ => SensorType::Unknown,
237        }
238    }
239}