pinecone.py 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249
  1. import logging
  2. import os
  3. from typing import Optional, Union
  4. try:
  5. import pinecone
  6. except ImportError:
  7. raise ImportError(
  8. "Pinecone requires extra dependencies. Install with `pip install --upgrade 'embedchain[pinecone]'`"
  9. ) from None
  10. from pinecone_text.sparse import BM25Encoder
  11. from embedchain.config.vectordb.pinecone import PineconeDBConfig
  12. from embedchain.helpers.json_serializable import register_deserializable
  13. from embedchain.utils.misc import chunks
  14. from embedchain.vectordb.base import BaseVectorDB
  15. @register_deserializable
  16. class PineconeDB(BaseVectorDB):
  17. """
  18. Pinecone as vector database
  19. """
  20. BATCH_SIZE = 100
  21. def __init__(
  22. self,
  23. config: Optional[PineconeDBConfig] = None,
  24. ):
  25. """Pinecone as vector database.
  26. :param config: Pinecone database config, defaults to None
  27. :type config: PineconeDBConfig, optional
  28. :raises ValueError: No config provided
  29. """
  30. if config is None:
  31. self.config = PineconeDBConfig()
  32. else:
  33. if not isinstance(config, PineconeDBConfig):
  34. raise TypeError(
  35. "config is not a `PineconeDBConfig` instance. "
  36. "Please make sure the type is right and that you are passing an instance."
  37. )
  38. self.config = config
  39. self._setup_pinecone_index()
  40. # Setup BM25Encoder if sparse vectors are to be used
  41. self.bm25_encoder = None
  42. if self.config.hybrid_search:
  43. # TODO: Add support for fitting BM25Encoder on any corpus
  44. logging.info("Initializing BM25Encoder for sparse vectors..")
  45. self.bm25_encoder = BM25Encoder.default()
  46. # Call parent init here because embedder is needed
  47. super().__init__(config=self.config)
  48. def _initialize(self):
  49. """
  50. This method is needed because `embedder` attribute needs to be set externally before it can be initialized.
  51. """
  52. if not self.embedder:
  53. raise ValueError("Embedder not set. Please set an embedder with `set_embedder` before initialization.")
  54. def _setup_pinecone_index(self):
  55. """
  56. Loads the Pinecone index or creates it if not present.
  57. """
  58. api_key = self.config.api_key or os.environ.get("PINECONE_API_KEY")
  59. if not api_key:
  60. raise ValueError("Please set the PINECONE_API_KEY environment variable or pass it in config.")
  61. self.client = pinecone.Pinecone(api_key=api_key, **self.config.extra_params)
  62. indexes = self.client.list_indexes().names()
  63. if indexes is None or self.config.index_name not in indexes:
  64. if self.config.pod_config:
  65. spec = pinecone.PodSpec(**self.config.pod_config)
  66. elif self.config.serverless_config:
  67. spec = pinecone.ServerlessSpec(**self.config.serverless_config)
  68. else:
  69. raise ValueError("No pod_config or serverless_config found.")
  70. self.client.create_index(
  71. name=self.config.index_name,
  72. metric=self.config.metric,
  73. dimension=self.config.vector_dimension,
  74. spec=spec,
  75. )
  76. self.pinecone_index = self.client.Index(self.config.index_name)
  77. def get(self, ids: Optional[list[str]] = None, where: Optional[dict[str, any]] = None, limit: Optional[int] = None):
  78. """
  79. Get existing doc ids present in vector database
  80. :param ids: _list of doc ids to check for existence
  81. :type ids: list[str]
  82. :param where: to filter data
  83. :type where: dict[str, any]
  84. :return: ids
  85. :rtype: Set[str]
  86. """
  87. existing_ids = list()
  88. metadatas = []
  89. if ids is not None:
  90. for i in range(0, len(ids), 1000):
  91. result = self.pinecone_index.fetch(ids=ids[i : i + 1000])
  92. vectors = result.get("vectors")
  93. batch_existing_ids = list(vectors.keys())
  94. existing_ids.extend(batch_existing_ids)
  95. metadatas.extend([vectors.get(ids).get("metadata") for ids in batch_existing_ids])
  96. return {"ids": existing_ids, "metadatas": metadatas}
  97. def add(
  98. self,
  99. documents: list[str],
  100. metadatas: list[object],
  101. ids: list[str],
  102. **kwargs: Optional[dict[str, any]],
  103. ):
  104. """add data in vector database
  105. :param documents: list of texts to add
  106. :type documents: list[str]
  107. :param metadatas: list of metadata associated with docs
  108. :type metadatas: list[object]
  109. :param ids: ids of docs
  110. :type ids: list[str]
  111. """
  112. docs = []
  113. embeddings = self.embedder.embedding_fn(documents)
  114. for id, text, metadata, embedding in zip(ids, documents, metadatas, embeddings):
  115. # Insert sparse vectors as well if the user wants to do the hybrid search
  116. sparse_vector_dict = (
  117. {"sparse_values": self.bm25_encoder.encode_documents(text)} if self.bm25_encoder else {}
  118. )
  119. docs.append(
  120. {
  121. "id": id,
  122. "values": embedding,
  123. "metadata": {**metadata, "text": text},
  124. **sparse_vector_dict,
  125. },
  126. )
  127. for chunk in chunks(docs, self.BATCH_SIZE, desc="Adding chunks in batches"):
  128. self.pinecone_index.upsert(chunk, **kwargs)
  129. def query(
  130. self,
  131. input_query: list[str],
  132. n_results: int,
  133. where: Optional[dict[str, any]] = None,
  134. raw_filter: Optional[dict[str, any]] = None,
  135. citations: bool = False,
  136. app_id: Optional[str] = None,
  137. **kwargs: Optional[dict[str, any]],
  138. ) -> Union[list[tuple[str, dict]], list[str]]:
  139. """
  140. Query contents from vector database based on vector similarity.
  141. Args:
  142. input_query (list[str]): List of query strings.
  143. n_results (int): Number of similar documents to fetch from the database.
  144. where (dict[str, any], optional): Filter criteria for the search.
  145. raw_filter (dict[str, any], optional): Advanced raw filter criteria for the search.
  146. citations (bool, optional): Flag to return context along with metadata. Defaults to False.
  147. app_id (str, optional): Application ID to be passed to Pinecone.
  148. Returns:
  149. Union[list[tuple[str, dict]], list[str]]: List of document contexts, optionally with metadata.
  150. """
  151. query_filter = raw_filter if raw_filter is not None else self._generate_filter(where)
  152. if app_id:
  153. query_filter["app_id"] = {"$eq": app_id}
  154. query_vector = self.embedder.embedding_fn([input_query])[0]
  155. params = {
  156. "vector": query_vector,
  157. "filter": query_filter,
  158. "top_k": n_results,
  159. "include_metadata": True,
  160. **kwargs,
  161. }
  162. if self.bm25_encoder:
  163. sparse_query_vector = self.bm25_encoder.encode_queries(input_query)
  164. params["sparse_vector"] = sparse_query_vector
  165. data = self.pinecone_index.query(**params)
  166. return [
  167. (metadata.get("text"), {**metadata, "score": doc.get("score")}) if citations else metadata.get("text")
  168. for doc in data.get("matches", [])
  169. for metadata in [doc.get("metadata", {})]
  170. ]
  171. def set_collection_name(self, name: str):
  172. """
  173. Set the name of the collection. A collection is an isolated space for vectors.
  174. :param name: Name of the collection.
  175. :type name: str
  176. """
  177. if not isinstance(name, str):
  178. raise TypeError("Collection name must be a string")
  179. self.config.collection_name = name
  180. def count(self) -> int:
  181. """
  182. Count number of documents/chunks embedded in the database.
  183. :return: number of documents
  184. :rtype: int
  185. """
  186. data = self.pinecone_index.describe_index_stats()
  187. return data["total_vector_count"]
  188. def _get_or_create_db(self):
  189. """Called during initialization"""
  190. return self.client
  191. def reset(self):
  192. """
  193. Resets the database. Deletes all embeddings irreversibly.
  194. """
  195. # Delete all data from the database
  196. self.client.delete_index(self.config.index_name)
  197. self._setup_pinecone_index()
  198. @staticmethod
  199. def _generate_filter(where: dict):
  200. query = {}
  201. for k, v in where.items():
  202. query[k] = {"$eq": v}
  203. return query
  204. def delete(self, where: dict):
  205. """Delete from database.
  206. :param ids: list of ids to delete
  207. :type ids: list[str]
  208. """
  209. # Deleting with filters is not supported for `starter` index type.
  210. # Follow `https://docs.pinecone.io/docs/metadata-filtering#deleting-vectors-by-metadata-filter` for more details
  211. db_filter = self._generate_filter(where)
  212. try:
  213. self.pinecone_index.delete(filter=db_filter)
  214. except Exception as e:
  215. print(f"Failed to delete from Pinecone: {e}")
  216. return