web_page.py 2.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677
  1. import hashlib
  2. import logging
  3. import requests
  4. from bs4 import BeautifulSoup
  5. from embedchain.helper.json_serializable import register_deserializable
  6. from embedchain.loaders.base_loader import BaseLoader
  7. from embedchain.utils import clean_string
  8. @register_deserializable
  9. class WebPageLoader(BaseLoader):
  10. def load_data(self, url):
  11. """Load data from a web page."""
  12. response = requests.get(url)
  13. data = response.content
  14. soup = BeautifulSoup(data, "html.parser")
  15. original_size = len(str(soup.get_text()))
  16. tags_to_exclude = [
  17. "nav",
  18. "aside",
  19. "form",
  20. "header",
  21. "noscript",
  22. "svg",
  23. "canvas",
  24. "footer",
  25. "script",
  26. "style",
  27. ]
  28. for tag in soup(tags_to_exclude):
  29. tag.decompose()
  30. ids_to_exclude = ["sidebar", "main-navigation", "menu-main-menu"]
  31. for id in ids_to_exclude:
  32. tags = soup.find_all(id=id)
  33. for tag in tags:
  34. tag.decompose()
  35. classes_to_exclude = [
  36. "elementor-location-header",
  37. "navbar-header",
  38. "nav",
  39. "header-sidebar-wrapper",
  40. "blog-sidebar-wrapper",
  41. "related-posts",
  42. ]
  43. for class_name in classes_to_exclude:
  44. tags = soup.find_all(class_=class_name)
  45. for tag in tags:
  46. tag.decompose()
  47. content = soup.get_text()
  48. content = clean_string(content)
  49. cleaned_size = len(content)
  50. if original_size != 0:
  51. logging.info(
  52. f"[{url}] Cleaned page size: {cleaned_size} characters, down from {original_size} (shrunk: {original_size-cleaned_size} chars, {round((1-(cleaned_size/original_size)) * 100, 2)}%)" # noqa:E501
  53. )
  54. meta_data = {
  55. "url": url,
  56. }
  57. content = content
  58. doc_id = hashlib.sha256((content + url).encode()).hexdigest()
  59. return {
  60. "doc_id": doc_id,
  61. "data": [
  62. {
  63. "content": content,
  64. "meta_data": meta_data,
  65. }
  66. ],
  67. }