From 1b2793b12a50d2183b2f905f00ca86a2d28899d4 Mon Sep 17 00:00:00 2001 From: Malte Reimann Date: Fri, 10 Oct 2025 11:54:13 +0200 Subject: [PATCH] fix: empty inference cache --- .../core/evaluation/__init__.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/amzn_nova_prompt_optimizer/core/evaluation/__init__.py b/src/amzn_nova_prompt_optimizer/core/evaluation/__init__.py index e010b65..bc64a1a 100644 --- a/src/amzn_nova_prompt_optimizer/core/evaluation/__init__.py +++ b/src/amzn_nova_prompt_optimizer/core/evaluation/__init__.py @@ -65,7 +65,13 @@ def _get_or_run_inference(self, model_id: str) -> list: if cache_key not in self._inference_cache: self.logger.info("Cache miss - Running new inference on Dataset") - self._inference_cache[cache_key] = self.inference_runner.run(model_id) + inference_results = self.inference_runner.run(model_id) + if inference_results and len(inference_results) > 0: + self._inference_cache[cache_key] = inference_results + else: + self.logger.warning("No inference results returned. Check the inference logs for any errors.") + return inference_results + else: self.logger.info("Using cached inference results")