Browse Source

Telemetry support for `ec` cli commands (#1030)

Sidharth Mohanty 1 năm trước cách đây
mục cha
commit
48c38b5dc3

+ 1 - 1
docs/deployment/fly_io.mdx

@@ -70,7 +70,7 @@ Feel free to edit the files as required.
 - `.env`: Contains environment variables for production
 - `.env.example`: Contains dummy environment variables (can ignore this file)
 - `embedchain.json`: Contains embedchain specific configuration for deployment (you don't need to configure this)
-- `requirements.txt`: Contains python dependencies for your FastAPI application
+- `requirements.txt`: Contains python dependencies for your application
 
 ## Step-3: Test app locally
 

+ 33 - 14
embedchain/cli.py

@@ -8,6 +8,8 @@ import click
 import pkg_resources
 from rich.console import Console
 
+from embedchain.telemetry.posthog import AnonymousTelemetry
+
 console = Console()
 
 
@@ -16,6 +18,26 @@ def cli():
     pass
 
 
+anonymous_telemetry = AnonymousTelemetry()
+
+def get_pkg_path_from_name(template: str):
+    try:
+        # Determine the installation location of the embedchain package
+        package_path = pkg_resources.resource_filename("embedchain", "")
+    except ImportError:
+        console.print("❌ [bold red]Failed to locate the 'embedchain' package. Is it installed?[/bold red]")
+        return
+
+    # Construct the source path from the embedchain package
+    src_path = os.path.join(package_path, "deployment", template)
+
+    if not os.path.exists(src_path):
+        console.print(f"❌ [bold red]Template '{template}' not found.[/bold red]")
+        return
+    
+    return src_path
+
+
 def setup_fly_io_app(extra_args):
     fly_launch_command = ["fly", "launch", "--region", "sjc", "--no-deploy"] + list(extra_args)
     try:
@@ -49,20 +71,10 @@ def setup_modal_com_app(extra_args):
 @click.option("--template", default="fly.io", help="The template to use.")
 @click.argument("extra_args", nargs=-1, type=click.UNPROCESSED)
 def create(template, extra_args):
-    try:
-        # Determine the installation location of the embedchain package
-        package_path = pkg_resources.resource_filename("embedchain", "")
-    except ImportError:
-        console.print("❌ [bold red]Failed to locate the 'embedchain' package. Is it installed?[/bold red]")
-        return
-
-    # Construct the source path from the embedchain package
-    src_path = os.path.join(package_path, "deployment", template)
-
-    if not os.path.exists(src_path):
-        console.print(f"❌ [bold red]Template '{template}' not found.[/bold red]")
-        return
-
+    anonymous_telemetry.capture(
+        event_name="ec_create", properties={"template_used": template}
+    )
+    src_path = get_pkg_path_from_name(template)
     shutil.copytree(src_path, os.getcwd(), dirs_exist_ok=True)
     env_sample_path = os.path.join(src_path, ".env.example")
     if os.path.exists(env_sample_path):
@@ -122,6 +134,9 @@ def dev(debug, host, port):
         embedchain_config = json.load(file)
         template = embedchain_config["provider"]
 
+    anonymous_telemetry.capture(
+        event_name="ec_dev", properties={"template_used": template}
+    )
     if template == "fly.io":
         run_dev_fly_io(debug, host, port)
     elif template == "modal.com":
@@ -207,6 +222,10 @@ def deploy():
     with open("embedchain.json", "r") as file:
         embedchain_config = json.load(file)
         template = embedchain_config["provider"]
+
+    anonymous_telemetry.capture(
+        event_name="ec_deploy", properties={"template_used": template}
+    )
     if template == "fly.io":
         deploy_fly()
     elif template == "modal.com":

+ 3 - 0
embedchain/deployment/fly.io/app.py

@@ -2,6 +2,9 @@ from fastapi import FastAPI, responses
 from pydantic import BaseModel
 
 from embedchain import Pipeline
+from dotenv import load_dotenv
+
+load_dotenv(".env")
 
 app = FastAPI(title="Embedchain FastAPI App")
 embedchain_app = Pipeline()

+ 1 - 1
embedchain/deployment/fly.io/requirements.txt

@@ -1,4 +1,4 @@
 fastapi==0.104.0
 uvicorn==0.23.2
-embedchain==0.1.34
+embedchain
 beautifulsoup4

+ 1 - 1
embedchain/deployment/modal.com/requirements.txt

@@ -1,4 +1,4 @@
 modal==0.56.4329
 fastapi==0.104.0
 uvicorn==0.23.2
-embedchain==0.1.34
+embedchain

+ 4 - 2
embedchain/llm/openai.py

@@ -33,14 +33,16 @@ class OpenAILlm(BaseLlm):
         if config.top_p:
             kwargs["model_kwargs"]["top_p"] = config.top_p
         if config.stream:
-            from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
+            from langchain.callbacks.streaming_stdout import \
+                StreamingStdOutCallbackHandler
 
             callbacks = config.callbacks if config.callbacks else [StreamingStdOutCallbackHandler()]
             chat = ChatOpenAI(**kwargs, streaming=config.stream, callbacks=callbacks)
         else:
             chat = ChatOpenAI(**kwargs)
         if self.functions is not None:
-            from langchain.chains.openai_functions import create_openai_fn_runnable
+            from langchain.chains.openai_functions import \
+                create_openai_fn_runnable
             from langchain.prompts import ChatPromptTemplate
 
             structured_prompt = ChatPromptTemplate.from_messages(messages)

+ 3 - 1
examples/mistral-streamlit/app.py

@@ -1,7 +1,9 @@
 import os
-from embedchain import Pipeline as App
+
 import streamlit as st
 
+from embedchain import Pipeline as App
+
 with st.sidebar:
     huggingface_access_token = st.text_input("Hugging face Token", key="chatbot_api_key", type="password")
     "[Get Hugging Face Access Token](https://huggingface.co/settings/tokens)"

+ 1 - 1
pyproject.toml

@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "embedchain"
-version = "0.1.35"
+version = "0.1.36"
 description = "Data platform for LLMs - Load, index, retrieve and sync any unstructured data"
 authors = [
     "Taranjeet Singh <taranjeet@embedchain.ai>",