diff --git a/inference/core/workflows/core_steps/models/roboflow/multi_label_classification/v2.py b/inference/core/workflows/core_steps/models/roboflow/multi_label_classification/v2.py index 4c26f66792..c81f0b4df9 100644 --- a/inference/core/workflows/core_steps/models/roboflow/multi_label_classification/v2.py +++ b/inference/core/workflows/core_steps/models/roboflow/multi_label_classification/v2.py @@ -181,7 +181,7 @@ def run_locally( api_key=self._api_key, ) predictions = self._model_manager.infer_from_request_sync( - model_id=model_id, request=request + model_id=model_id, request=request, confidence=confidence ) if isinstance(predictions, list): predictions = [ diff --git a/inference/core/workflows/core_steps/sinks/roboflow/dataset_upload/v1.py b/inference/core/workflows/core_steps/sinks/roboflow/dataset_upload/v1.py index 2080a21f9f..3f165596fc 100644 --- a/inference/core/workflows/core_steps/sinks/roboflow/dataset_upload/v1.py +++ b/inference/core/workflows/core_steps/sinks/roboflow/dataset_upload/v1.py @@ -552,7 +552,9 @@ def is_prediction_registration_forbidden( return True if isinstance(prediction, sv.Detections) and len(prediction) == 0: return True - if isinstance(prediction, dict) and "top" not in prediction: + if isinstance(prediction, dict) and all( + k not in prediction for k in ["top", "predicted_classes"] + ): return True return False @@ -561,6 +563,8 @@ def encode_prediction( prediction: Union[sv.Detections, dict], ) -> Tuple[str, str]: if isinstance(prediction, dict): + if "predicted_classes" in prediction: + return ",".join(prediction["predicted_classes"]), "txt" return prediction["top"], "txt" detections_in_inference_format = serialise_sv_detections(detections=prediction) return json.dumps(detections_in_inference_format), "json"