pinecone.py 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251
  1. import logging
  2. import os
  3. from typing import Optional, Union
  4. try:
  5. import pinecone
  6. except ImportError:
  7. raise ImportError(
  8. "Pinecone requires extra dependencies. Install with `pip install pinecone-text pinecone-client`"
  9. ) from None
  10. from pinecone_text.sparse import BM25Encoder
  11. from embedchain.config.vectordb.pinecone import PineconeDBConfig
  12. from embedchain.helpers.json_serializable import register_deserializable
  13. from embedchain.utils.misc import chunks
  14. from embedchain.vectordb.base import BaseVectorDB
  15. logger = logging.getLogger(__name__)
  16. @register_deserializable
  17. class PineconeDB(BaseVectorDB):
  18. """
  19. Pinecone as vector database
  20. """
  21. def __init__(
  22. self,
  23. config: Optional[PineconeDBConfig] = None,
  24. ):
  25. """Pinecone as vector database.
  26. :param config: Pinecone database config, defaults to None
  27. :type config: PineconeDBConfig, optional
  28. :raises ValueError: No config provided
  29. """
  30. if config is None:
  31. self.config = PineconeDBConfig()
  32. else:
  33. if not isinstance(config, PineconeDBConfig):
  34. raise TypeError(
  35. "config is not a `PineconeDBConfig` instance. "
  36. "Please make sure the type is right and that you are passing an instance."
  37. )
  38. self.config = config
  39. self._setup_pinecone_index()
  40. # Setup BM25Encoder if sparse vectors are to be used
  41. self.bm25_encoder = None
  42. if self.config.hybrid_search:
  43. logger.info("Initializing BM25Encoder for sparse vectors..")
  44. self.bm25_encoder = self.config.bm25_encoder if self.config.bm25_encoder else BM25Encoder.default()
  45. # Call parent init here because embedder is needed
  46. super().__init__(config=self.config)
  47. def _initialize(self):
  48. """
  49. This method is needed because `embedder` attribute needs to be set externally before it can be initialized.
  50. """
  51. if not self.embedder:
  52. raise ValueError("Embedder not set. Please set an embedder with `set_embedder` before initialization.")
  53. def _setup_pinecone_index(self):
  54. """
  55. Loads the Pinecone index or creates it if not present.
  56. """
  57. api_key = self.config.api_key or os.environ.get("PINECONE_API_KEY")
  58. if not api_key:
  59. raise ValueError("Please set the PINECONE_API_KEY environment variable or pass it in config.")
  60. self.client = pinecone.Pinecone(api_key=api_key, **self.config.extra_params)
  61. indexes = self.client.list_indexes().names()
  62. if indexes is None or self.config.index_name not in indexes:
  63. if self.config.pod_config:
  64. spec = pinecone.PodSpec(**self.config.pod_config)
  65. elif self.config.serverless_config:
  66. spec = pinecone.ServerlessSpec(**self.config.serverless_config)
  67. else:
  68. raise ValueError("No pod_config or serverless_config found.")
  69. self.client.create_index(
  70. name=self.config.index_name,
  71. metric=self.config.metric,
  72. dimension=self.config.vector_dimension,
  73. spec=spec,
  74. )
  75. self.pinecone_index = self.client.Index(self.config.index_name)
  76. def get(self, ids: Optional[list[str]] = None, where: Optional[dict[str, any]] = None, limit: Optional[int] = None):
  77. """
  78. Get existing doc ids present in vector database
  79. :param ids: _list of doc ids to check for existence
  80. :type ids: list[str]
  81. :param where: to filter data
  82. :type where: dict[str, any]
  83. :return: ids
  84. :rtype: Set[str]
  85. """
  86. existing_ids = list()
  87. metadatas = []
  88. if ids is not None:
  89. for i in range(0, len(ids), self.config.batch_size):
  90. result = self.pinecone_index.fetch(ids=ids[i : i + self.config.batch_size])
  91. vectors = result.get("vectors")
  92. batch_existing_ids = list(vectors.keys())
  93. existing_ids.extend(batch_existing_ids)
  94. metadatas.extend([vectors.get(ids).get("metadata") for ids in batch_existing_ids])
  95. return {"ids": existing_ids, "metadatas": metadatas}
  96. def add(
  97. self,
  98. documents: list[str],
  99. metadatas: list[object],
  100. ids: list[str],
  101. **kwargs: Optional[dict[str, any]],
  102. ):
  103. """add data in vector database
  104. :param documents: list of texts to add
  105. :type documents: list[str]
  106. :param metadatas: list of metadata associated with docs
  107. :type metadatas: list[object]
  108. :param ids: ids of docs
  109. :type ids: list[str]
  110. """
  111. docs = []
  112. embeddings = self.embedder.embedding_fn(documents)
  113. for id, text, metadata, embedding in zip(ids, documents, metadatas, embeddings):
  114. # Insert sparse vectors as well if the user wants to do the hybrid search
  115. sparse_vector_dict = (
  116. {"sparse_values": self.bm25_encoder.encode_documents(text)} if self.bm25_encoder else {}
  117. )
  118. docs.append(
  119. {
  120. "id": id,
  121. "values": embedding,
  122. "metadata": {**metadata, "text": text},
  123. **sparse_vector_dict,
  124. },
  125. )
  126. for chunk in chunks(docs, self.config.batch_size, desc="Adding chunks in batches"):
  127. self.pinecone_index.upsert(chunk, **kwargs)
  128. def query(
  129. self,
  130. input_query: str,
  131. n_results: int,
  132. where: Optional[dict[str, any]] = None,
  133. raw_filter: Optional[dict[str, any]] = None,
  134. citations: bool = False,
  135. app_id: Optional[str] = None,
  136. **kwargs: Optional[dict[str, any]],
  137. ) -> Union[list[tuple[str, dict]], list[str]]:
  138. """
  139. Query contents from vector database based on vector similarity.
  140. Args:
  141. input_query (str): query string.
  142. n_results (int): Number of similar documents to fetch from the database.
  143. where (dict[str, any], optional): Filter criteria for the search.
  144. raw_filter (dict[str, any], optional): Advanced raw filter criteria for the search.
  145. citations (bool, optional): Flag to return context along with metadata. Defaults to False.
  146. app_id (str, optional): Application ID to be passed to Pinecone.
  147. Returns:
  148. Union[list[tuple[str, dict]], list[str]]: List of document contexts, optionally with metadata.
  149. """
  150. query_filter = raw_filter if raw_filter is not None else self._generate_filter(where)
  151. if app_id:
  152. query_filter["app_id"] = {"$eq": app_id}
  153. query_vector = self.embedder.embedding_fn([input_query])[0]
  154. params = {
  155. "vector": query_vector,
  156. "filter": query_filter,
  157. "top_k": n_results,
  158. "include_metadata": True,
  159. **kwargs,
  160. }
  161. if self.bm25_encoder:
  162. sparse_query_vector = self.bm25_encoder.encode_queries(input_query)
  163. params["sparse_vector"] = sparse_query_vector
  164. data = self.pinecone_index.query(**params)
  165. return [
  166. (metadata.get("text"), {**metadata, "score": doc.get("score")}) if citations else metadata.get("text")
  167. for doc in data.get("matches", [])
  168. for metadata in [doc.get("metadata", {})]
  169. ]
  170. def set_collection_name(self, name: str):
  171. """
  172. Set the name of the collection. A collection is an isolated space for vectors.
  173. :param name: Name of the collection.
  174. :type name: str
  175. """
  176. if not isinstance(name, str):
  177. raise TypeError("Collection name must be a string")
  178. self.config.collection_name = name
  179. def count(self) -> int:
  180. """
  181. Count number of documents/chunks embedded in the database.
  182. :return: number of documents
  183. :rtype: int
  184. """
  185. data = self.pinecone_index.describe_index_stats()
  186. return data["total_vector_count"]
  187. def _get_or_create_db(self):
  188. """Called during initialization"""
  189. return self.client
  190. def reset(self):
  191. """
  192. Resets the database. Deletes all embeddings irreversibly.
  193. """
  194. # Delete all data from the database
  195. self.client.delete_index(self.config.index_name)
  196. self._setup_pinecone_index()
  197. @staticmethod
  198. def _generate_filter(where: dict):
  199. query = {}
  200. if where is None:
  201. return query
  202. for k, v in where.items():
  203. query[k] = {"$eq": v}
  204. return query
  205. def delete(self, where: dict):
  206. """Delete from database.
  207. :param ids: list of ids to delete
  208. :type ids: list[str]
  209. """
  210. # Deleting with filters is not supported for `starter` index type.
  211. # Follow `https://docs.pinecone.io/docs/metadata-filtering#deleting-vectors-by-metadata-filter` for more details
  212. db_filter = self._generate_filter(where)
  213. try:
  214. self.pinecone_index.delete(filter=db_filter)
  215. except Exception as e:
  216. print(f"Failed to delete from Pinecone: {e}")
  217. return