diff --git a/coverage.xml b/coverage.xml index 612cac46..755c321e 100644 --- a/coverage.xml +++ b/coverage.xml @@ -1,5 +1,5 @@ - + @@ -22,88 +22,90 @@ - - - + + + - - + + - + - - - + + + - + - - + + - - + + - + - + - + - - + + - - - + + + - + - - - + + + - + - + - + - + + - + - + - - - + + + - + + @@ -361,6 +363,10 @@ + + + + diff --git a/semantic_router/hybrid_layer.py b/semantic_router/hybrid_layer.py index 3e9508d6..a0452a31 100644 --- a/semantic_router/hybrid_layer.py +++ b/semantic_router/hybrid_layer.py @@ -1,6 +1,7 @@ import numpy as np from numpy.linalg import norm from tqdm.auto import tqdm +from semantic_router.utils.logger import logger from semantic_router.encoders import ( BaseEncoder, @@ -89,7 +90,7 @@ def _query(self, text: str, top_k: int = 5): # convex scaling xq_d, xq_s = self._convex_scaling(xq_d, xq_s) - if self.index is not None: + if self.index is not None and self.sparse_index is not None: # calculate dense vec similarity index_norm = norm(self.index, axis=1) xq_d_norm = norm(xq_d.T) @@ -107,9 +108,10 @@ def _query(self, text: str, top_k: int = 5): routes = self.categories[idx] if self.categories is not None else [] return [{"route": d, "score": s.item()} for d, s in zip(routes, scores)] else: + logger.warning("No index found. Please add routes to the layer.") return [] - def _convex_scaling(self, dense: list[float], sparse: list[float]): + def _convex_scaling(self, dense: np.ndarray, sparse: np.ndarray): # scale sparse and dense vecs dense = np.array(dense) * self.alpha sparse = np.array(sparse) * (1 - self.alpha) diff --git a/semantic_router/utils/__init__.py b/semantic_router/utils/__init__.py new file mode 100644 index 00000000..e69de29b