Skip to content

Commit 6d26482

Browse files
committed
refactor: Improved Error Handling (#194)
Signed-off-by: Anush008 <[email protected]>
1 parent 5ffa3f2 commit 6d26482

File tree

11 files changed

+66
-31
lines changed

11 files changed

+66
-31
lines changed

.github/workflows/release.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,12 @@ jobs:
1313
with:
1414
fetch-depth: 0
1515

16-
- uses: actions/setup-node@v4
16+
- uses: actions/setup-node@v6
1717
with:
18-
node-version: 20
18+
node-version: 24
1919

2020
- name: "🔧 setup Bun"
21-
uses: oven-sh/setup-bun@v1
21+
uses: oven-sh/setup-bun@v2
2222

2323
- name: "📦 install dependencies"
2424
run: bun install -D @semantic-release/git conventional-changelog-conventionalcommits semantic-release-cargo

src/image_embedding/impl.rs

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ impl ImageEmbedding {
122122
ImageEmbedding::list_supported_models()
123123
.into_iter()
124124
.find(|m| &m.model == model)
125-
.expect("Model not found.")
125+
.expect("Model not found in supported models list. This is a bug - please report it.")
126126
}
127127

128128
/// Method to generate image embeddings for a Vec of image bytes
@@ -217,7 +217,10 @@ impl ImageEmbedding {
217217
// Try to get the only output key
218218
// If multiple, then default to few known keys `image_embeds` and `last_hidden_state`
219219
let last_hidden_state_key = match outputs.len() {
220-
1 => vec![outputs.keys().next().unwrap()],
220+
1 => vec![outputs
221+
.keys()
222+
.next()
223+
.ok_or_else(|| anyhow!("Expected one output but found none"))?],
221224
_ => vec!["image_embeds", "last_hidden_state"],
222225
};
223226

@@ -252,8 +255,12 @@ impl ImageEmbedding {
252255
// For 2D output [batch_size, hidden_size]
253256
output_array
254257
.outer_iter()
255-
.map(|row| normalize(row.as_slice().unwrap()))
256-
.collect()
258+
.map(|row| {
259+
row.as_slice()
260+
.ok_or_else(|| anyhow!("Failed to convert array row to slice"))
261+
.map(normalize)
262+
})
263+
.collect::<anyhow::Result<Vec<_>>>()?
257264
}
258265
_ => {
259266
return Err(anyhow!(

src/image_embedding/utils.rs

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -142,17 +142,21 @@ impl Transform for Normalize {
142142
let array = data.array()?;
143143
let mean = Array::from_vec(self.mean.clone())
144144
.into_shape_with_order((3, 1, 1))
145-
.unwrap();
145+
.map_err(|e| anyhow!("Failed to reshape mean array: {}", e))?;
146146
let std = Array::from_vec(self.std.clone())
147147
.into_shape_with_order((3, 1, 1))
148-
.unwrap();
148+
.map_err(|e| anyhow!("Failed to reshape std array: {}", e))?;
149149

150150
let shape = array.shape().to_vec();
151151
match shape.as_slice() {
152152
[c, h, w] => {
153-
let array_normalized = array
154-
.sub(mean.broadcast((*c, *h, *w)).unwrap())
155-
.div(std.broadcast((*c, *h, *w)).unwrap());
153+
let mean_broadcast = mean.broadcast((*c, *h, *w)).ok_or_else(|| {
154+
anyhow!("Failed to broadcast mean array to shape {:?}", (*c, *h, *w))
155+
})?;
156+
let std_broadcast = std.broadcast((*c, *h, *w)).ok_or_else(|| {
157+
anyhow!("Failed to broadcast std array to shape {:?}", (*c, *h, *w))
158+
})?;
159+
let array_normalized = array.sub(mean_broadcast).div(std_broadcast);
156160
Ok(TransformData::NdArray(array_normalized))
157161
}
158162
_ => Err(anyhow!(
@@ -229,18 +233,21 @@ fn load_preprocessor(config: serde_json::Value) -> anyhow::Result<Compose> {
229233
if config["do_center_crop"].as_bool().unwrap_or(false) {
230234
let crop_size = config["crop_size"].clone();
231235
let (height, width) = if crop_size.is_u64() {
232-
let size = crop_size.as_u64().unwrap() as u32;
236+
let size = crop_size
237+
.as_u64()
238+
.ok_or_else(|| anyhow!("crop_size must be a valid u64"))?
239+
as u32;
233240
(size, size)
234241
} else if crop_size.is_object() {
235242
(
236243
crop_size["height"]
237244
.as_u64()
238245
.map(|height| height as u32)
239-
.ok_or(anyhow!("crop_size height must be contained"))?,
246+
.ok_or_else(|| anyhow!("crop_size height must be contained"))?,
240247
crop_size["width"]
241248
.as_u64()
242249
.map(|width| width as u32)
243-
.ok_or(anyhow!("crop_size width must be contained"))?,
250+
.ok_or_else(|| anyhow!("crop_size width must be contained"))?,
244251
)
245252
} else {
246253
return Err(anyhow!("Invalid crop size: {:?}", crop_size));
@@ -304,18 +311,21 @@ fn load_preprocessor(config: serde_json::Value) -> anyhow::Result<Compose> {
304311
if config["do_center_crop"].as_bool().unwrap_or(false) {
305312
let crop_size = config["crop_size"].clone();
306313
let (height, width) = if crop_size.is_u64() {
307-
let size = crop_size.as_u64().unwrap() as u32;
314+
let size = crop_size
315+
.as_u64()
316+
.ok_or_else(|| anyhow!("crop_size must be a valid u64"))?
317+
as u32;
308318
(size, size)
309319
} else if crop_size.is_object() {
310320
(
311321
crop_size["height"]
312322
.as_u64()
313323
.map(|height| height as u32)
314-
.ok_or(anyhow!("crop_size height must be contained"))?,
324+
.ok_or_else(|| anyhow!("crop_size height must be contained"))?,
315325
crop_size["width"]
316326
.as_u64()
317327
.map(|width| width as u32)
318-
.ok_or(anyhow!("crop_size width must be contained"))?,
328+
.ok_or_else(|| anyhow!("crop_size width must be contained"))?,
319329
)
320330
} else {
321331
return Err(anyhow!("Invalid crop size: {:?}", crop_size));

src/models/image_embedding.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ impl Display for ImageEmbeddingModel {
8181
let model_info = models_list()
8282
.into_iter()
8383
.find(|model| model.model == *self)
84-
.unwrap();
84+
.ok_or(std::fmt::Error)?;
8585
write!(f, "{}", model_info.model_code)
8686
}
8787
}

src/models/reranking.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ impl Display for RerankerModel {
5454
let model_info = reranker_model_list()
5555
.into_iter()
5656
.find(|model| model.model == *self)
57-
.expect("Model not found in supported models list.");
57+
.ok_or(std::fmt::Error)?;
5858
write!(f, "{}", model_info.model_code)
5959
}
6060
}

src/models/sparse.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ impl Display for SparseModel {
2626
let model_info = models_list()
2727
.into_iter()
2828
.find(|model| model.model == *self)
29-
.unwrap();
29+
.ok_or(std::fmt::Error)?;
3030
write!(f, "{}", model_info.model_code)
3131
}
3232
}

src/models/text_embedding.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -402,7 +402,7 @@ impl ModelTrait for EmbeddingModel {
402402

403403
impl Display for EmbeddingModel {
404404
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
405-
let model_info = EmbeddingModel::get_model_info(self).expect("Model not found.");
405+
let model_info = EmbeddingModel::get_model_info(self).ok_or(std::fmt::Error)?;
406406
write!(f, "{}", model_info.model_code)
407407
}
408408
}

src/reranking/impl.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ impl TextRerank {
4242
TextRerank::list_supported_models()
4343
.into_iter()
4444
.find(|m| &m.model == model)
45-
.expect("Model not found.")
45+
.expect("Model not found in supported models list. This is a bug - please report it.")
4646
}
4747

4848
pub fn list_supported_models() -> Vec<RerankerModelInfo> {
@@ -140,7 +140,10 @@ impl TextRerank {
140140
.encode_batch(inputs, true)
141141
.map_err(|e| anyhow::Error::msg(e.to_string()).context("Failed to encode batch"))?;
142142

143-
let encoding_length = encodings[0].len();
143+
let encoding_length = encodings
144+
.first()
145+
.ok_or_else(|| anyhow::anyhow!("Tokenizer returned empty encodings"))?
146+
.len();
144147
let batch_size = batch.len();
145148
let max_size = encoding_length * batch_size;
146149

src/sparse_text_embedding/impl.rs

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ impl SparseTextEmbedding {
101101
SparseTextEmbedding::list_supported_models()
102102
.into_iter()
103103
.find(|m| &m.model == model)
104-
.expect("Model not found.")
104+
.expect("Model not found in supported models list. This is a bug - please report it.")
105105
}
106106

107107
/// Method to generate sentence embeddings for a Vec of texts
@@ -124,7 +124,10 @@ impl SparseTextEmbedding {
124124
})?;
125125

126126
// Extract the encoding length and batch size
127-
let encoding_length = encodings[0].len();
127+
let encoding_length = encodings
128+
.first()
129+
.ok_or_else(|| anyhow::anyhow!("Tokenizer returned empty encodings"))?
130+
.len();
128131
let batch_size = batch.len();
129132

130133
let max_size = encoding_length * batch_size;
@@ -170,7 +173,10 @@ impl SparseTextEmbedding {
170173
// Try to get the only output key
171174
// If multiple, then default to `last_hidden_state`
172175
let last_hidden_state_key = match outputs.len() {
173-
1 => outputs.keys().next().unwrap(),
176+
1 => outputs
177+
.keys()
178+
.next()
179+
.ok_or_else(|| anyhow::anyhow!("Expected one output but found none"))?,
174180
_ => "last_hidden_state",
175181
};
176182

src/text_embedding/impl.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -300,7 +300,10 @@ impl TextEmbedding {
300300
})?;
301301

302302
// Extract the encoding length and batch size
303-
let encoding_length = encodings[0].len();
303+
let encoding_length = encodings
304+
.first()
305+
.ok_or_else(|| anyhow::anyhow!("Tokenizer returned empty encodings"))?
306+
.len();
304307
let batch_size = batch.len();
305308

306309
let max_size = encoding_length * batch_size;

0 commit comments

Comments
 (0)