opensearch.py 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. import logging
  2. import time
  3. from typing import Any, Optional, Union
  4. from tqdm import tqdm
  5. try:
  6. from opensearchpy import OpenSearch
  7. from opensearchpy.helpers import bulk
  8. except ImportError:
  9. raise ImportError(
  10. "OpenSearch requires extra dependencies. Install with `pip install --upgrade embedchain[opensearch]`"
  11. ) from None
  12. from langchain_community.embeddings.openai import OpenAIEmbeddings
  13. from langchain_community.vectorstores import OpenSearchVectorSearch
  14. from embedchain.config import OpenSearchDBConfig
  15. from embedchain.helpers.json_serializable import register_deserializable
  16. from embedchain.vectordb.base import BaseVectorDB
  17. logger = logging.getLogger(__name__)
  18. @register_deserializable
  19. class OpenSearchDB(BaseVectorDB):
  20. """
  21. OpenSearch as vector database
  22. """
  23. BATCH_SIZE = 100
  24. def __init__(self, config: OpenSearchDBConfig):
  25. """OpenSearch as vector database.
  26. :param config: OpenSearch domain config
  27. :type config: OpenSearchDBConfig
  28. """
  29. if config is None:
  30. raise ValueError("OpenSearchDBConfig is required")
  31. self.config = config
  32. self.client = OpenSearch(
  33. hosts=[self.config.opensearch_url],
  34. http_auth=self.config.http_auth,
  35. **self.config.extra_params,
  36. )
  37. info = self.client.info()
  38. logger.info(f"Connected to {info['version']['distribution']}. Version: {info['version']['number']}")
  39. # Remove auth credentials from config after successful connection
  40. super().__init__(config=self.config)
  41. def _initialize(self):
  42. logger.info(self.client.info())
  43. index_name = self._get_index()
  44. if self.client.indices.exists(index=index_name):
  45. print(f"Index '{index_name}' already exists.")
  46. return
  47. index_body = {
  48. "settings": {"knn": True},
  49. "mappings": {
  50. "properties": {
  51. "text": {"type": "text"},
  52. "embeddings": {
  53. "type": "knn_vector",
  54. "index": False,
  55. "dimension": self.config.vector_dimension,
  56. },
  57. }
  58. },
  59. }
  60. self.client.indices.create(index_name, body=index_body)
  61. print(self.client.indices.get(index_name))
  62. def _get_or_create_db(self):
  63. """Called during initialization"""
  64. return self.client
  65. def _get_or_create_collection(self, name):
  66. """Note: nothing to return here. Discuss later"""
  67. def get(
  68. self, ids: Optional[list[str]] = None, where: Optional[dict[str, any]] = None, limit: Optional[int] = None
  69. ) -> set[str]:
  70. """
  71. Get existing doc ids present in vector database
  72. :param ids: _list of doc ids to check for existence
  73. :type ids: list[str]
  74. :param where: to filter data
  75. :type where: dict[str, any]
  76. :return: ids
  77. :type: set[str]
  78. """
  79. query = {}
  80. if ids:
  81. query["query"] = {"bool": {"must": [{"ids": {"values": ids}}]}}
  82. else:
  83. query["query"] = {"bool": {"must": []}}
  84. if where:
  85. for key, value in where.items():
  86. query["query"]["bool"]["must"].append({"term": {f"metadata.{key}.keyword": value}})
  87. # OpenSearch syntax is different from Elasticsearch
  88. response = self.client.search(index=self._get_index(), body=query, _source=True, size=limit)
  89. docs = response["hits"]["hits"]
  90. ids = [doc["_id"] for doc in docs]
  91. doc_ids = [doc["_source"]["metadata"]["doc_id"] for doc in docs]
  92. # Result is modified for compatibility with other vector databases
  93. # TODO: Add method in vector database to return result in a standard format
  94. result = {"ids": ids, "metadatas": []}
  95. for doc_id in doc_ids:
  96. result["metadatas"].append({"doc_id": doc_id})
  97. return result
  98. def add(self, documents: list[str], metadatas: list[object], ids: list[str], **kwargs: Optional[dict[str, any]]):
  99. """Adds documents to the opensearch index"""
  100. embeddings = self.embedder.embedding_fn(documents)
  101. for batch_start in tqdm(range(0, len(documents), self.BATCH_SIZE), desc="Inserting batches in opensearch"):
  102. batch_end = batch_start + self.BATCH_SIZE
  103. batch_documents = documents[batch_start:batch_end]
  104. batch_embeddings = embeddings[batch_start:batch_end]
  105. # Create document entries for bulk upload
  106. batch_entries = [
  107. {
  108. "_index": self._get_index(),
  109. "_id": doc_id,
  110. "_source": {"text": text, "metadata": metadata, "embeddings": embedding},
  111. }
  112. for doc_id, text, metadata, embedding in zip(
  113. ids[batch_start:batch_end], batch_documents, metadatas[batch_start:batch_end], batch_embeddings
  114. )
  115. ]
  116. # Perform bulk operation
  117. bulk(self.client, batch_entries, **kwargs)
  118. self.client.indices.refresh(index=self._get_index())
  119. # Sleep to avoid rate limiting
  120. time.sleep(0.1)
  121. def query(
  122. self,
  123. input_query: str,
  124. n_results: int,
  125. where: dict[str, any],
  126. citations: bool = False,
  127. **kwargs: Optional[dict[str, Any]],
  128. ) -> Union[list[tuple[str, dict]], list[str]]:
  129. """
  130. query contents from vector database based on vector similarity
  131. :param input_query: query string
  132. :type input_query: str
  133. :param n_results: no of similar documents to fetch from database
  134. :type n_results: int
  135. :param where: Optional. to filter data
  136. :type where: dict[str, any]
  137. :param citations: we use citations boolean param to return context along with the answer.
  138. :type citations: bool, default is False.
  139. :return: The content of the document that matched your query,
  140. along with url of the source and doc_id (if citations flag is true)
  141. :rtype: list[str], if citations=False, otherwise list[tuple[str, str, str]]
  142. """
  143. embeddings = OpenAIEmbeddings()
  144. docsearch = OpenSearchVectorSearch(
  145. index_name=self._get_index(),
  146. embedding_function=embeddings,
  147. opensearch_url=f"{self.config.opensearch_url}",
  148. http_auth=self.config.http_auth,
  149. use_ssl=hasattr(self.config, "use_ssl") and self.config.use_ssl,
  150. verify_certs=hasattr(self.config, "verify_certs") and self.config.verify_certs,
  151. )
  152. pre_filter = {"match_all": {}} # default
  153. if len(where) > 0:
  154. pre_filter = {"bool": {"must": []}}
  155. for key, value in where.items():
  156. pre_filter["bool"]["must"].append({"term": {f"metadata.{key}.keyword": value}})
  157. docs = docsearch.similarity_search_with_score(
  158. input_query,
  159. search_type="script_scoring",
  160. space_type="cosinesimil",
  161. vector_field="embeddings",
  162. text_field="text",
  163. metadata_field="metadata",
  164. pre_filter=pre_filter,
  165. k=n_results,
  166. **kwargs,
  167. )
  168. contexts = []
  169. for doc, score in docs:
  170. context = doc.page_content
  171. if citations:
  172. metadata = doc.metadata
  173. metadata["score"] = score
  174. contexts.append(tuple((context, metadata)))
  175. else:
  176. contexts.append(context)
  177. return contexts
  178. def set_collection_name(self, name: str):
  179. """
  180. Set the name of the collection. A collection is an isolated space for vectors.
  181. :param name: Name of the collection.
  182. :type name: str
  183. """
  184. if not isinstance(name, str):
  185. raise TypeError("Collection name must be a string")
  186. self.config.collection_name = name
  187. def count(self) -> int:
  188. """
  189. Count number of documents/chunks embedded in the database.
  190. :return: number of documents
  191. :rtype: int
  192. """
  193. query = {"query": {"match_all": {}}}
  194. response = self.client.count(index=self._get_index(), body=query)
  195. doc_count = response["count"]
  196. return doc_count
  197. def reset(self):
  198. """
  199. Resets the database. Deletes all embeddings irreversibly.
  200. """
  201. # Delete all data from the database
  202. if self.client.indices.exists(index=self._get_index()):
  203. # delete index in ES
  204. self.client.indices.delete(index=self._get_index())
  205. def delete(self, where):
  206. """Deletes a document from the OpenSearch index"""
  207. query = {"query": {"bool": {"must": []}}}
  208. for key, value in where.items():
  209. query["query"]["bool"]["must"].append({"term": {f"metadata.{key}.keyword": value}})
  210. self.client.delete_by_query(index=self._get_index(), body=query)
  211. def _get_index(self) -> str:
  212. """Get the OpenSearch index for a collection
  213. :return: OpenSearch index
  214. :rtype: str
  215. """
  216. return self.config.collection_name