From 576bf1c16182126845fa27ffb515c4335dddab32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jacques=20Verr=C3=A9?= Date: Thu, 5 Sep 2024 14:04:43 +0100 Subject: [PATCH] Jacques/docs v1.1 (#180) * Updated frontend contribution guide * Fixed broken links * Fixed broken links * Updated PyPi readme file * Fix issue with SDK setup.py file * Fix linter issues --------- Co-authored-by: Nimrod Lahav <3535799+Nimrod007@users.noreply.github.com> --- CONTRIBUTING.md | 13 +++--- README.md | 42 +++++++++---------- .../evaluate_hallucination_metric.ipynb | 2 +- .../cookbook/evaluate_moderation_metric.ipynb | 2 +- .../docs/cookbook/langchain.ipynb | 2 +- .../docs/cookbook/llama-index.ipynb | 2 +- .../documentation/docs/cookbook/openai.ipynb | 2 +- .../documentation/docs/cookbook/ragas.ipynb | 2 +- .../documentation/docs/evaluation/concepts.md | 8 ---- .../documentation/docs/evaluation/overview.md | 8 ---- .../documentation/docs/home.md | 12 +++--- .../docs/monitoring/_category_.json | 8 ---- .../docs/monitoring/add_traces_to_dataset.md | 8 ---- .../docs/monitoring/annotate_traces.md | 8 ---- .../documentation/docs/monitoring/overview.md | 8 ---- .../documentation/docs/quickstart.md | 4 +- .../docs/testing/pytest_integration.md | 2 +- .../docs/tracing/annotate_traces.md | 5 +-- .../documentation/docs/tracing/concepts.md | 8 ---- .../docs/tracing/integrations/overview.md | 6 +-- .../docs/tracing/log_distributed_traces.md | 4 +- .../documentation/docs/tracing/log_traces.md | 6 +-- .../documentation/docs/tracing/overview.md | 8 ---- .../documentation/docusaurus.config.ts | 2 +- .../python-sdk-docs/source/Objects/Span.rst | 4 +- .../python-sdk-docs/source/Objects/Trace.rst | 4 +- .../python-sdk-docs/source/index.rst | 7 ++++ .../llama_index/LlamaIndexCallbackHandler.rst | 4 ++ .../source/integrations/llama_index/index.rst | 19 +++++++++ sdks/python/setup.py | 9 ++-- 30 files changed, 90 insertions(+), 129 deletions(-) delete mode 100644 apps/opik-documentation/documentation/docs/evaluation/concepts.md delete mode 100644 apps/opik-documentation/documentation/docs/evaluation/overview.md delete mode 100644 apps/opik-documentation/documentation/docs/monitoring/_category_.json delete mode 100644 apps/opik-documentation/documentation/docs/monitoring/add_traces_to_dataset.md delete mode 100644 apps/opik-documentation/documentation/docs/monitoring/annotate_traces.md delete mode 100644 apps/opik-documentation/documentation/docs/monitoring/overview.md delete mode 100644 apps/opik-documentation/documentation/docs/tracing/concepts.md delete mode 100644 apps/opik-documentation/documentation/docs/tracing/overview.md create mode 100644 apps/opik-documentation/python-sdk-docs/source/integrations/llama_index/LlamaIndexCallbackHandler.rst create mode 100644 apps/opik-documentation/python-sdk-docs/source/integrations/llama_index/index.rst diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7269921cee..298703ec20 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -239,6 +239,9 @@ The Opik frontend is a React application that is located in `apps/opik-frontend` In order to run the frontend locally, you need to have `npm` installed. Once installed, you can run the frontend locally using the following command: ```bash +# Run the backend locally with the flag "--local-fe" +./build_and_run.sh --local-fe + cd apps/opik-frontend # Install dependencies - Only needs to be run once @@ -248,20 +251,16 @@ npm install npm run start ``` -You can then access the development frontend at `http://localhost:5174/`. Any change you make to the frontend will be updated in real-time. - -The dev server is set up to work with Opik BE run on `http://localhost:8080`. All requests made to `http://localhost:5174/api` are proxied to the backend. -The server port can be changed in `vite.config.ts` file section `proxy`. +You can then access the development frontend at `http://localhost:5173/`. Any change you make to the frontend will be updated in real-time. -> [!NOTE] -> You will need to have the backend running locally in order for the frontend to work. For this, we recommend running a local instance of Opik using `opik server install`. +> You will need to open the FE using `http://localhost:5173/` ignoring the output from the `npm run start` command which will recommend to open `http://localhost:5174/`. In case `http://localhost:5174/` is opened, the BE will not be accessible. Before submitting a PR, please ensure that your code passes the test suite, the linter and the type checker: ```bash cd apps/opik-frontend -npm run test +npm run e2e npm run lint npm run typecheck ``` diff --git a/README.md b/README.md index e6cbb64362..2955f4136e 100644 --- a/README.md +++ b/README.md @@ -35,22 +35,22 @@ Confidently evaluate, test and monitor LLM applications.  ## 🚀 What is Opik? -[Opik](https://www.comet.com/site/products/opik) is an open-source platform for evaluating, testing and monitoring LLM applications. Built by [Comet](https://www.comet.com). +Opik is an open-source platform for evaluating, testing and monitoring LLM applications. Built by [Comet](https://www.comet.com).
You can use Opik for: * **Development:** - * **Tracing:** Track all LLM calls and traces during development and production ([Quickstart](https://www.comet.com/docs/opik/quickstart), [Integrations](https://www.comet.com/docs/opik/integrations/overview)) - * **Annotations:** Annotate your LLM calls by logging feedback scores using the [Python SDK](...), [Rest API](...) or the [UI](...). + * **Tracing:** Track all LLM calls and traces during development and production ([Quickstart](https://www.comet.com/docs/opik/quickstart/), [Integrations](https://www.comet.com/docs/opik/integrations/overview/)) + * **Annotations:** Annotate your LLM calls by logging feedback scores using the [Python SDK](https://www.comet.com/docs/opik/tracing/annotate_traces/#annotating-traces-and-spans-using-the-sdk) or the [UI](https://www.comet.com/docs/opik/tracing/annotate_traces/#annotating-traces-through-the-ui). * **Evaluation**: Automate the evaluation process of your LLM application: - * **Datasets and Experiments**: Store test cases and run experiments ([Datasets](https://www.comet.com/docs/opik/evaluation/manage_datasets), [Evaluate your LLM Application](https://www.comet.com/docs/opik/evaluation/evaluate_your_llm)) + * **Datasets and Experiments**: Store test cases and run experiments ([Datasets](https://www.comet.com/docs/opik/evaluation/manage_datasets/), [Evaluate your LLM Application](https://www.comet.com/docs/opik/evaluation/evaluate_your_llm/)) - * **LLM as a judge metrics**: Use Opik's LLM as a judge metric for complex issues like [hallucination detection](https://www.comet.com/docs/opik/evaluation/metrics/hallucination), [moderation](https://www.comet.com/docs/opik/evaluation/metrics/moderation) and RAG evaluation ([Answer Relevance](https://www.comet.com/docs/opik/evaluation/metrics/answer_relevance), [Context Precision](https://www.comet.com/docs/opik/evaluation/metrics/context_precision) and [Answer Relevance](https://www.comet.com/docs/opik/evaluation/metrics/answer_relevance)) + * **LLM as a judge metrics**: Use Opik's LLM as a judge metric for complex issues like [hallucination detection](https://www.comet.com/docs/opik/evaluation/metrics/hallucination/), [moderation](https://www.comet.com/docs/opik/evaluation/metrics/moderation/) and RAG evaluation ([Answer Relevance](https://www.comet.com/docs/opik/evaluation/metrics/answer_relevance/), [Context Precision](https://www.comet.com/docs/opik/evaluation/metrics/context_precision/) and [Answer Relevance](https://www.comet.com/docs/opik/evaluation/metrics/answer_relevance/)) - * **CI/CD integration**: Run evaluations as part of your CI/CD pipeline using our [PyTest integration](...) + * **CI/CD integration**: Run evaluations as part of your CI/CD pipeline using our [PyTest integration](https://www.comet.com/docs/opik/testing/pytest_integration/) * **Production Monitoring**: Monitor your LLM application in production and easily close the feedback loop by adding error traces to your evaluation datasets. @@ -74,8 +74,8 @@ For more information about the different deployment options, please see our depl | Installation methods | Docs link | | ------------------- | --------- | -| Local instance | [![Minikube](https://img.shields.io/badge/minikube-%230db7ed.svg?&logo=data:image/svg%2bxml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyMDAiIGhlaWdodD0iMjAwIiB2aWV3Qm94PSIwIDAgMzIgMzIiPgogIDxkZWZzPgogICAgPG1hc2sgaWQ9InZzY29kZUljb25zRm9sZGVyVHlwZU1pbmlrdWJlMCIgd2lkdGg9IjIxIiBoZWlnaHQ9IjIwLjQ0OSIgeD0iMTAiIHk9IjEwLjU3NSIgbWFza1VuaXRzPSJ1c2VyU3BhY2VPblVzZSI+CiAgICAgIDxwYXRoIGZpbGw9IiNmZmYiIGZpbGwtcnVsZT0iZXZlbm9kZCIgZD0iTTMxIDMxLjAyNXYtMjAuNDVIMTB2MjAuNDVoMjF6Ii8+CiAgICA8L21hc2s+CiAgPC9kZWZzPgogIDxwYXRoIGZpbGw9IiM1NWI1YmYiIGQ9Ik0yNy45IDZoLTkuOGwtMiA0SDV2MTdoMjVWNlptLjEgNGgtNy44bDEtMkgyOFoiLz4KICA8ZyBtYXNrPSJ1cmwoI3ZzY29kZUljb25zRm9sZGVyVHlwZU1pbmlrdWJlMCkiPgogICAgPHBhdGggZmlsbD0iIzMyNmRlNiIgZmlsbC1ydWxlPSJldmVub2RkIiBkPSJNMjAuNTIgMTAuNTc1YTIuMDM4IDIuMDM4IDAgMCAwLS44NDEuMTkxbC02Ljg3MSAzLjI4NmExLjkyMSAxLjkyMSAwIDAgMC0xLjA1OSAxLjMxN2wtMS43IDcuMzczYTEuOTI0IDEuOTI0IDAgMCAwIC4zODEgMS42NTZsNC43NTIgNS45MDdhMS45MTcgMS45MTcgMCAwIDAgMS41MDcuNzJoNy42MThhMS45MTcgMS45MTcgMCAwIDAgMS41MDctLjcybDQuNzU0LTUuOTA1YTEuOTE0IDEuOTE0IDAgMCAwIC4zODEtMS42NTZsLTEuNy03LjM3M2ExLjkyMSAxLjkyMSAwIDAgMC0xLjA1OS0xLjMxN2wtNi44NDMtMy4yODZhMS45MzkgMS45MzkgMCAwIDAtLjgyOS0uMTkxbTAgLjYzOWExLjMyIDEuMzIgMCAwIDEgLjU1Ny4xMjJsNi44NzEgMy4yNzJhMS4zMjIgMS4zMjIgMCAwIDEgLjcwNi44ODNsMS43IDcuMzczYTEuMjY5IDEuMjY5IDAgMCAxLS4yNTggMS4xMTNsLTQuNzUyIDUuOTA3YTEuMyAxLjMgMCAwIDEtMS4wMTkuNDg5SDE2LjdhMS4zIDEuMyAwIDAgMS0xLjAxOS0uNDg5bC00Ljc1Mi01LjkwN2ExLjM2MSAxLjM2MSAwIDAgMS0uMjU4LTEuMTEzbDEuNy03LjM3M2ExLjMgMS4zIDAgMCAxIC43MDYtLjg4M2w2Ljg3MS0zLjI4NmExLjYzMyAxLjYzMyAwIDAgMSAuNTctLjEwOCIvPgogIDwvZz4KICA8cGF0aCBmaWxsPSIjMWZiZmNmIiBmaWxsLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNi41NDUgMjguNjQ5YTEuMjYxIDEuMjYxIDAgMCAwIC45OS40NzRsNS45NzgtLjAxYTEuMjg5IDEuMjg5IDAgMCAwIC45ODctLjQ3NWwzLjY0NC00LjU4OGExLjI4IDEuMjggMCAwIDAgLjA5NC0uNjYxdi02LjQwN2wtNy44MyA0LjUwOGwtNy44MjItNC41djYuNGExLjA3NiAxLjA3NiAwIDAgMCAuMjQxLjY3MVoiLz4KICA8cGF0aCBmaWxsPSIjYzllOWVjIiBmaWxsLXJ1bGU9ImV2ZW5vZGQiIGQ9Im0yMC40MDggMjEuNDlsNy44My00LjUwOGwtNy44MzctNC41MDVsLTcuODE1IDQuNTA5bDcuODIyIDQuNTA0eiIvPgogIDxwYXRoIGZpbGw9IiMzMjZkZTYiIGZpbGwtcnVsZT0iZXZlbm9kZCIgZD0iTTIyLjI3NiAyNC45NzNhLjU0NS41NDUgMCAwIDEtLjcxNS0uMTIyYS40NjQuNDY0IDAgMCAxLS4xLS4yMjVsLS4xODUtMy4zMjZhNi4xOTQgNi4xOTQgMCAwIDEgMy42NzQgMS43NzZabS0yLjc3Ni0uNDI5YS41NTkuNTU5IDAgMCAxLS41NTEuNTMxYS40ODIuNDgyIDAgMCAxLS4yNDUtLjA2MWwtMi43MTUtMS45MzlBNi4yMzMgNi4yMzMgMCAwIDEgMTkuMDMgMjEuNGMuMjI1LS4wNDEuNDI5LS4wODIuNjU0LS4xMjJabTcuNjM0LTEuMzY3bC4yLS4xODR2LS4wNDFhLjQ1OS40NTkgMCAwIDEgLjEtLjMwNmE1Ljk3MSA1Ljk3MSAwIDAgMSAuOTE4LS42MzJjLjA2MS0uMDQxLjEyMy0uMDYyLjE4NC0uMWEyLjk4NiAyLjk4NiAwIDAgMCAuMzQ3LS4yYy4wMi0uMDIuMDYxLS4wNC4xLS4wODFjLjAyLS4wMjEuMDQxLS4wMjEuMDQxLS4wNDFhLjY4Mi42ODIgMCAwIDAgLjE0My0uOTE5YS41OC41OCAwIDAgMC0uNDctLjIyNGEuNzU5Ljc1OSAwIDAgMC0uNDQ5LjE2M2wtLjA0MS4wNDFjLS4wNC4wMi0uMDYxLjA2MS0uMS4wODJhMy40NDcgMy40NDcgMCAwIDAtLjI2NS4yODVhLjk2NC45NjQgMCAwIDEtLjE0My4xNDNhNS4yNCA1LjI0IDAgMCAxLS44MTYuNzM1YS4zMzEuMzMxIDAgMCAxLS4xODQuMDYxYS4yNjYuMjY2IDAgMCAxLS4xMjMtLjAyaC0uMDRsLS4yMzYuMTYxYTkuOTUzIDkuOTUzIDAgMCAwLS44MzctLjc3NmE4LjE1NyA4LjE1NyAwIDAgMC00LjI2Ni0xLjY5NGwtLjAyLS4yNjVsLS4wNDEtLjA0MWEuNDI5LjQyOSAwIDAgMS0uMTY0LS4yNjVhNy4xOTMgNy4xOTMgMCAwIDEgLjA2Mi0xLjF2LS4wMjFhLjcuNyAwIDAgMSAuMDQxLS4yYy4wMi0uMTIyLjA0LS4yNDUuMDYxLS4zODh2LS4xODNhLjYyMy42MjMgMCAwIDAtMS4wODItLjQ3YS42NDYuNjQ2IDAgMCAwLS4xODQuNDd2LjE2M2ExLjIxNCAxLjIxNCAwIDAgMCAuMDYxLjM4OGMuMDIxLjA2MS4wMjEuMTIyLjA0MS4ydi4wMmE1LjMzIDUuMzMgMCAwIDEgLjA2MiAxLjFhLjQzMi40MzIgMCAwIDEtLjE2NC4yNjVsLS4wNDEuMDQxbC0uMDIuMjY1YTEwLjQ2MSAxMC40NjEgMCAwIDAtMS4xLjE2M2E3Ljg3IDcuODcgMCAwIDAtNC4wNDIgMi4yODZsLS4yLS4xNDNoLS4wNDFjLS4wNCAwLS4wODEuMDIxLS4xMjIuMDIxYS4zMzkuMzM5IDAgMCAxLS4xODQtLjA2MWE1LjQyIDUuNDIgMCAwIDEtLjgxNi0uNzU2YS45NjEuOTYxIDAgMCAwLS4xNDMtLjE0MmEzLjQ1NSAzLjQ1NSAwIDAgMC0uMjY1LS4yODZjLS4wMjEtLjAyMS0uMDYyLS4wNDEtLjEtLjA4MmMtLjAyMS0uMDItLjA0MS0uMDItLjA0MS0uMDQxYS43MTUuNzE1IDAgMCAwLS40NTUtLjE2OGEuNTgxLjU4MSAwIDAgMC0uNDcuMjI1YS42ODEuNjgxIDAgMCAwIC4xNDMuOTE4Yy4wMjEgMCAuMDIxLjAyLjA0MS4wMmMuMDQxLjAyMS4wNjEuMDYyLjEuMDgyYTIuOTg2IDIuOTg2IDAgMCAwIC4zNDcuMmEuODQ2Ljg0NiAwIDAgMSAuMTg0LjFhNS45NyA1Ljk3IDAgMCAxIC45MTguNjMzYS4zNzUuMzc1IDAgMCAxIC4xLjMwNnYuMDQxbC4yLjE4NGEuNzY3Ljc2NyAwIDAgMC0uMS4xNjNhNy45ODYgNy45ODYgMCAwIDAtLjYxMiAxLjE3OGwxLjE2NCAxLjQzNmE2LjQxIDYuNDEgMCAwIDEgLjY5My0xLjU5M2wyLjQyOSAyLjE2M2EuNTQ0LjU0NCAwIDAgMSAuMDYyLjc1NWEuNDExLjQxMSAwIDAgMS0uMjQ1LjE2NGwtMS40LjQwN2wuNy44NmExLjI2MSAxLjI2MSAwIDAgMCAuOTkuNDc0bDUuOTc4LS4wMWExLjI4OSAxLjI4OSAwIDAgMCAuOTg3LS40NzVsLjY1NC0uODI0bC0xLjQzOC0uNDA3YS41NTMuNTUzIDAgMCAxLS4zODgtLjY1M2EuNDkuNDkgMCAwIDEgLjEyMy0uMjI1bDIuNDY5LTIuMjIyYTYuNDYzIDYuNDYzIDAgMCAxIC43MDUgMS42NTZsMS4xODctMS40OTRhOC42MTYgOC42MTYgMCAwIDAtLjY4Ny0xLjI4NVoiLz4KPC9zdmc+)](https://www.comet.com/docs/opik/self-host/self_hosting_opik#all-in-one-installation) -| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?&logo=kubernetes&logoColor=white)](https://www.comet.com/docs/opik/self-host/self_hosting_opik#all-in-one-installation) | +| Local instance | [![All-in-one isntallation](https://img.shields.io/badge/All--in--one%20Installer-%230db7ed)](https://www.comet.com/docs/opik/self-host/self_hosting_opik/#all-in-one-installation) +| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?&logo=kubernetes&logoColor=white)](https://www.comet.com/docs/opik/self-host/self_hosting_opik/#kubernetes-installation) | ## 🏁 Get Started @@ -83,15 +83,17 @@ For more information about the different deployment options, please see our depl If you are logging traces to the Cloud Opik platform, you will need to get your API key from the user menu and set it as the `OPIK_API_KEY` environment variable: ```bash -export OPIK_API_KEY= +export OPIK_API_KEY= +export OPIK_WORKSPACE= ``` -If you are using a local Opik instance, you don't need to set the `OPIK_API_KEY` environment variable and isntead set the environment variable `OPIK_BASE_URL` to point to your local Opik instance: +If you are using a local Opik instance, you don't need to set the `OPIK_API_KEY` or `OPIK_WORKSPACE` environment variable and isntead set the environment variable `OPIK_BASE_URL` to point to your local Opik instance: + ```bash export OPIK_BASE_URL=http://localhost:5173 ``` -You are now ready to start logging traces using either the [Python SDK](https://www.comet.com/docs/opik/python-sdk/overview) or the [REST API](https://www.comet.com/docs/opik/rest-api). +You are now ready to start logging traces using the [Python SDK](https://www.comet.com/docs/opik/python-sdk-reference/). ### 📝 Logging Traces @@ -99,16 +101,14 @@ The easiest way to get started is to use one of our integrations. Opik supports: | Integration | Description | Documentation | Try in Colab | | ----------- | ----------- | ------------- | ------------ | -| OpenAI | Log traces for all OpenAI LLM calls | [Documentation](https://www.comet.com/docs/opik/integrations/openai) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/openai.ipynb) | -| LiteLLM | Log traces for all OpenAI LLM calls | [Documentation](https://www.comet.com/docs/opik/integrations/openai) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/litellm.ipynb) | -| LangChain | Log traces for all LangChain LLM calls | [Documentation](https://www.comet.com/docs/opik/integrations/langchain) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/langchain.ipynb) | -| LlamaIndex | Log traces for all LlamaIndex LLM calls | [Documentation](https://www.comet.com/docs/opik/integrations/llamaindex) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/llama-index.ipynb) | -| Ragas | Log traces for all Ragas evaluations | [Documentation](https://www.comet.com/docs/opik/integrations/ragas) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/ragas.ipynb) | +| OpenAI | Log traces for all OpenAI LLM calls | [Documentation](https://www.comet.com//docs/opik/tracing/integrations/openai/) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/openai.ipynb) | +| LangChain | Log traces for all LangChain LLM calls | [Documentation](https://www.comet.com/docs/opik/tracing/integrations/langchain/) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/langchain.ipynb) | +| LlamaIndex | Log traces for all LlamaIndex LLM calls | [Documentation](https://www.comet.com/docs/opik/tracing/integrations/llama_index) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/llama-index.ipynb) | > [!TIP] > If the framework you are using is not listed above, feel free to [open an issue](https://github.com/comet-ml/opik/issues) or submit a PR with the integration. -If you are not using any of the frameworks above, you can also using the `track` function decorator to [log traces](https://www.comet.com/docs/opik/tracing/log_traces): +If you are not using any of the frameworks above, you can also using the `track` function decorator to [log traces](https://www.comet.com/docs/opik/tracing/log_traces/): ```python from opik import track @@ -125,7 +125,7 @@ def my_llm_function(user_question: str) -> str: ### 🧑‍⚖️ LLM as a Judge metrics -The Python Opik SDK includes a number of LLM as a judge metrics to help you evaluate your LLM application. Learn more about it in the [metrics documentation](https://www.comet.com/docs/opik/evaluation/metrics/overview). +The Python Opik SDK includes a number of LLM as a judge metrics to help you evaluate your LLM application. Learn more about it in the [metrics documentation](https://www.comet.com/docs/opik/evaluation/metrics/overview/). To use them, simply import the relevant metric and use the `score` function: @@ -145,9 +145,9 @@ Opik also includes a number of pre-built heuristic metrics as well as the abilit ### 🔍 Evaluating your LLM Application -Opik allows you to evaluate your LLM application during development through [Datasets](https://www.comet.com/docs/opik/evaluation/manage_datasets) and [Experiments](https://www.comet.com/docs/opik/evaluation/evaluate_your_llm). +Opik allows you to evaluate your LLM application during development through [Datasets](https://www.comet.com/docs/opik/evaluation/manage_datasets/) and [Experiments](https://www.comet.com/docs/opik/evaluation/evaluate_your_llm/). -You can also run evaluations as part of your CI/CD pipeline using our [PyTest integration](...). +You can also run evaluations as part of your CI/CD pipeline using our [PyTest integration](https://www.comet.com/docs/opik/testing/pytest_integration/). ## 🤝 Contributing @@ -156,6 +156,6 @@ There are many ways to contribute to Opik: * Submit [bug reports](https://github.com/comet-ml/opik/issues) and [feature requests](https://github.com/comet-ml/opik/issues) * Review the documentation and submit [Pull Requests](https://github.com/comet-ml/opik/pulls) to improve it * Speaking or writing about Opik and [letting us know](https://chat.comet.com) -* Upvoting [popular feature requests](https://github.com/comet-ml/opik/issues?q=is%3Aissue+is%3Aopen+label%3A%22feature+request%22) to show your support +* Upvoting [popular feature requests](https://github.com/comet-ml/opik/issues?q=is%3Aissue+is%3Aopen+label%3A%22enhancement%22) to show your support To learn more about how to contribute to Opik, please see our [contributing guidelines](CONTRIBUTING.md). diff --git a/apps/opik-documentation/documentation/docs/cookbook/evaluate_hallucination_metric.ipynb b/apps/opik-documentation/documentation/docs/cookbook/evaluate_hallucination_metric.ipynb index 48c132417e..baa00f5a80 100644 --- a/apps/opik-documentation/documentation/docs/cookbook/evaluate_hallucination_metric.ipynb +++ b/apps/opik-documentation/documentation/docs/cookbook/evaluate_hallucination_metric.ipynb @@ -17,7 +17,7 @@ "\n", "[Comet](https://www.comet.com/site) provides a hosted version of the Opik platform, [simply create an account](https://www.comet.com/signup?from=llm) and grab you API Key.\n", "\n", - "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik) for more information." + "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik/) for more information." ] }, { diff --git a/apps/opik-documentation/documentation/docs/cookbook/evaluate_moderation_metric.ipynb b/apps/opik-documentation/documentation/docs/cookbook/evaluate_moderation_metric.ipynb index 98eb7e5a0f..c89d829232 100644 --- a/apps/opik-documentation/documentation/docs/cookbook/evaluate_moderation_metric.ipynb +++ b/apps/opik-documentation/documentation/docs/cookbook/evaluate_moderation_metric.ipynb @@ -19,7 +19,7 @@ "\n", "[Comet](https://www.comet.com/site) provides a hosted version of the Opik platform, [simply create an account](https://www.comet.com/signup?from=llm) and grab you API Key.\n", "\n", - "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik) for more information." + "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik/) for more information." ] }, { diff --git a/apps/opik-documentation/documentation/docs/cookbook/langchain.ipynb b/apps/opik-documentation/documentation/docs/cookbook/langchain.ipynb index aee519aa8d..ed81c567bd 100644 --- a/apps/opik-documentation/documentation/docs/cookbook/langchain.ipynb +++ b/apps/opik-documentation/documentation/docs/cookbook/langchain.ipynb @@ -23,7 +23,7 @@ "\n", "[Comet](https://www.comet.com/site) provides a hosted version of the Opik platform, [simply create an account](https://www.comet.com/signup?from=llm) and grab you API Key.\n", "\n", - "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik) for more information." + "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik/) for more information." ] }, { diff --git a/apps/opik-documentation/documentation/docs/cookbook/llama-index.ipynb b/apps/opik-documentation/documentation/docs/cookbook/llama-index.ipynb index 5cc470f2f3..d571ed0d87 100644 --- a/apps/opik-documentation/documentation/docs/cookbook/llama-index.ipynb +++ b/apps/opik-documentation/documentation/docs/cookbook/llama-index.ipynb @@ -25,7 +25,7 @@ "\n", "[Comet](https://www.comet.com/site) provides a hosted version of the Opik platform, [simply create an account](https://www.comet.com/signup?from=llm) and grab you API Key.\n", "\n", - "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik) for more information." + "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik/) for more information." ] }, { diff --git a/apps/opik-documentation/documentation/docs/cookbook/openai.ipynb b/apps/opik-documentation/documentation/docs/cookbook/openai.ipynb index 95ec0d7ee6..6541aa681b 100644 --- a/apps/opik-documentation/documentation/docs/cookbook/openai.ipynb +++ b/apps/opik-documentation/documentation/docs/cookbook/openai.ipynb @@ -17,7 +17,7 @@ "\n", "[Comet](https://www.comet.com/site) provides a hosted version of the Opik platform, [simply create an account](https://www.comet.com/signup?from=llm) and grab you API Key.\n", "\n", - "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik) for more information." + "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik/) for more information." ] }, { diff --git a/apps/opik-documentation/documentation/docs/cookbook/ragas.ipynb b/apps/opik-documentation/documentation/docs/cookbook/ragas.ipynb index 0a6ce78e65..86e3974065 100644 --- a/apps/opik-documentation/documentation/docs/cookbook/ragas.ipynb +++ b/apps/opik-documentation/documentation/docs/cookbook/ragas.ipynb @@ -22,7 +22,7 @@ "\n", "[Comet](https://www.comet.com/site) provides a hosted version of the Opik platform, [simply create an account](https://www.comet.com/signup?from=llm) and grab you API Key.\n", "\n", - "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik) for more information." + "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik/) for more information." ] }, { diff --git a/apps/opik-documentation/documentation/docs/evaluation/concepts.md b/apps/opik-documentation/documentation/docs/evaluation/concepts.md deleted file mode 100644 index 5721427b5b..0000000000 --- a/apps/opik-documentation/documentation/docs/evaluation/concepts.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -sidebar_position: 2 -sidebar_label: Concepts - TBD ---- - -# Concepts - -Under construction \ No newline at end of file diff --git a/apps/opik-documentation/documentation/docs/evaluation/overview.md b/apps/opik-documentation/documentation/docs/evaluation/overview.md deleted file mode 100644 index 4816e07660..0000000000 --- a/apps/opik-documentation/documentation/docs/evaluation/overview.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -sidebar_position: 1 -sidebar_label: Overview - TBD ---- - -# Overview - -Under construction \ No newline at end of file diff --git a/apps/opik-documentation/documentation/docs/home.md b/apps/opik-documentation/documentation/docs/home.md index 80b9f5d13b..5e51227fff 100644 --- a/apps/opik-documentation/documentation/docs/home.md +++ b/apps/opik-documentation/documentation/docs/home.md @@ -18,9 +18,9 @@ During development, you can use the platform to log, view and debug your LLM tra 1. Log traces using: - a. One of our [integrations](/tracing/integrations/overview). + a. One of our [integrations](/tracing/integrations/overview.md). - b. The `@track` decorator for Python, learn more in the [Logging Traces](/tracing/log_traces) guide. + b. The `@track` decorator for Python, learn more in the [Logging Traces](/tracing/log_traces.md) guide. 3. [Annotate and label traces](/tracing/annotate_traces) through the SDK or the UI. @@ -28,13 +28,13 @@ During development, you can use the platform to log, view and debug your LLM tra Evaluating the output of your LLM calls is critical to ensure that your application is working as expected and can be challenging. Using the Comet LLM Evaluation platformm, you can: -1. Use one of our [LLM as a Judge evaluators](/evaluation/metrics/overview) or [Heuristic evaluators](/evaluation/metrics/heuristic_metrics) to score your traces and LLM calls -2. [Store evaluation datasets](/evaluation/manage_datasets) in the platform and [run evaluations](/evaluation/evaluate_your_llm) -3. Use our [pytest integration](/testing/pytest_integration) to track unit test results and compare results between runs +1. Use one of our [LLM as a Judge evaluators](/evaluation/metrics/overview.md) or [Heuristic evaluators](/evaluation/metrics/heuristic_metrics.md) to score your traces and LLM calls +2. [Store evaluation datasets](/evaluation/manage_datasets.md) in the platform and [run evaluations](/evaluation/evaluate_your_llm.md) +3. Use our [pytest integration](/testing/pytest_integration.md) to track unit test results and compare results between runs ## Getting Started [Comet](https://www.comet.com/site) provides a managed Cloud offering for Opik, simply [create an account](https://www.comet.com/signup?from=llm) to get started. -You can also run Opik locally using our [local installer](//self-host/self_hosting_opik#all-in-one-installation). If you are looking for a more production ready deployment, you can also use our [Kubernetes deployment option](/self-host/self_hosting_opik#kubernetes-installation). +You can also run Opik locally using our [local installer](/self-host/self_hosting_opik.md#all-in-one-installation). If you are looking for a more production ready deployment, you can also use our [Kubernetes deployment option](/self-host/self_hosting_opik.md#kubernetes-installation). diff --git a/apps/opik-documentation/documentation/docs/monitoring/_category_.json b/apps/opik-documentation/documentation/docs/monitoring/_category_.json deleted file mode 100644 index e7c5adcf8b..0000000000 --- a/apps/opik-documentation/documentation/docs/monitoring/_category_.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "label": "Monitoring", - "position": 5, - "link": { - "type": "generated-index" - }, - "collapsed": false - } \ No newline at end of file diff --git a/apps/opik-documentation/documentation/docs/monitoring/add_traces_to_dataset.md b/apps/opik-documentation/documentation/docs/monitoring/add_traces_to_dataset.md deleted file mode 100644 index 7e9acf7207..0000000000 --- a/apps/opik-documentation/documentation/docs/monitoring/add_traces_to_dataset.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -sidebar_position: 3 -sidebar_label: Add Traces to Datasets - TBD ---- - -# Add Traces to Datasets - -Under construction \ No newline at end of file diff --git a/apps/opik-documentation/documentation/docs/monitoring/annotate_traces.md b/apps/opik-documentation/documentation/docs/monitoring/annotate_traces.md deleted file mode 100644 index 2c63c71fef..0000000000 --- a/apps/opik-documentation/documentation/docs/monitoring/annotate_traces.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -sidebar_position: 2 -sidebar_label: Annotate Traces - TBD ---- - -# Annotate Traces - -Under construction \ No newline at end of file diff --git a/apps/opik-documentation/documentation/docs/monitoring/overview.md b/apps/opik-documentation/documentation/docs/monitoring/overview.md deleted file mode 100644 index 4816e07660..0000000000 --- a/apps/opik-documentation/documentation/docs/monitoring/overview.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -sidebar_position: 1 -sidebar_label: Overview - TBD ---- - -# Overview - -Under construction \ No newline at end of file diff --git a/apps/opik-documentation/documentation/docs/quickstart.md b/apps/opik-documentation/documentation/docs/quickstart.md index 81215da3ce..923d743ee2 100644 --- a/apps/opik-documentation/documentation/docs/quickstart.md +++ b/apps/opik-documentation/documentation/docs/quickstart.md @@ -9,7 +9,7 @@ This guide helps you integrate the Opik platform with your existing LLM applicat ## Set up -Getting started is as simple as creating an [account on Comet](https://www.comet.com/signup?from=llm) or [self-hosting the platform](/self-host/self_hosting_opik). +Getting started is as simple as creating an [account on Comet](https://www.comet.com/signup?from=llm) or [self-hosting the platform](/self-host/self_hosting_opik.md). Once your account is created, you can start logging traces by installing the Opik Python SDK: @@ -48,6 +48,6 @@ def your_llm_application(input): return output ``` -To learn more about the `track` decorator, see the [`track` documentation](./track). Once the traces are logged, you can view them in the OPIK UI: +To learn more about the `track` decorator, see the [`track` documentation](/tracing/log_traces.md#log-using-function-annotators). Once the traces are logged, you can view them in the OPIK UI: ![Opik Traces](/img/home/traces_page_for_quickstart.png) diff --git a/apps/opik-documentation/documentation/docs/testing/pytest_integration.md b/apps/opik-documentation/documentation/docs/testing/pytest_integration.md index e2a4a27b9e..e3e04bec66 100644 --- a/apps/opik-documentation/documentation/docs/testing/pytest_integration.md +++ b/apps/opik-documentation/documentation/docs/testing/pytest_integration.md @@ -33,7 +33,7 @@ When you run the tests, Opik will create a new experiment for each run and log e ![Test Experiments](/img/testing/test_experiments.png) :::tip -If you are evaluating your LLM application during development, we recommend using the `evaluate` function as it will provide you with a more detailed report. You can learn more about the `evaluate` function in the [evaluation documentation](/evaluation/evaluate_your_llm). +If you are evaluating your LLM application during development, we recommend using the `evaluate` function as it will provide you with a more detailed report. You can learn more about the `evaluate` function in the [evaluation documentation](/evaluation/evaluate_your_llm.md). ::: ### Advanced Usage diff --git a/apps/opik-documentation/documentation/docs/tracing/annotate_traces.md b/apps/opik-documentation/documentation/docs/tracing/annotate_traces.md index 43c42a50a7..9eeadd1f99 100644 --- a/apps/opik-documentation/documentation/docs/tracing/annotate_traces.md +++ b/apps/opik-documentation/documentation/docs/tracing/annotate_traces.md @@ -1,5 +1,5 @@ --- -sidebar_position: 5 +sidebar_position: 3 sidebar_label: Annotate Traces --- @@ -124,6 +124,5 @@ Comet supports many different LLM as a Judge metrics out of the box including: * `AnswerRelevanceMetric` * `ContextRecallMetric` * `ContextPrecisionMetric` -* `ContextRelevancyMetric` -You can find a full list of metrics in the [LLM as a Judge Metrics](/evaluation/metrics/llm_as_a_judge_metrics.md) section. +You can find a full list of supported metrics in the [Metrics Overview](/evaluation/metrics/overview.md) section. diff --git a/apps/opik-documentation/documentation/docs/tracing/concepts.md b/apps/opik-documentation/documentation/docs/tracing/concepts.md deleted file mode 100644 index ca0e5ebefc..0000000000 --- a/apps/opik-documentation/documentation/docs/tracing/concepts.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -sidebar_position: 2 -sidebar_label: Concepts - TBD ---- - -# Concepts - -Under construction. \ No newline at end of file diff --git a/apps/opik-documentation/documentation/docs/tracing/integrations/overview.md b/apps/opik-documentation/documentation/docs/tracing/integrations/overview.md index 2d25a145ee..354e70656c 100644 --- a/apps/opik-documentation/documentation/docs/tracing/integrations/overview.md +++ b/apps/opik-documentation/documentation/docs/tracing/integrations/overview.md @@ -10,8 +10,8 @@ Opik aims to make it as easy as possible to log, view and evaluate your LLM trac | Integration | Description | Documentation | Try in Colab | | ----------- | ----------- | ------------- | ------------ | -| OpenAI | Log traces for all OpenAI LLM calls | [Documentation](https://www.comet.com/docs/opik/integrations/openai) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/openai.ipynb) | -| LangChain | Log traces for all LangChain LLM calls | [Documentation](https://www.comet.com/docs/opik/integrations/langchain) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/langchain.ipynb) | -| LlamaIndex | Log traces for all LlamaIndex LLM calls | [Documentation](https://www.comet.com/docs/opik/integrations/llama_index) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/llama-index.ipynb) | +| OpenAI | Log traces for all OpenAI LLM calls | [Documentation](/tracing/integrations/openai.md) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/openai.ipynb) | +| LangChain | Log traces for all LangChain LLM calls | [Documentation](/tracing/integrations/langchain.md) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/langchain.ipynb) | +| LlamaIndex | Log traces for all LlamaIndex LLM calls | [Documentation](/tracing/integrations/llama_index.md) | [![Open Quickstart In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/opik/blob/master/apps/opik-documentation/documentation/docs/cookbook/llama-index.ipynb) | If you would like to see more integrations, please open an issue on our [GitHub repository](https://github.com/comet-ml/opik). diff --git a/apps/opik-documentation/documentation/docs/tracing/log_distributed_traces.md b/apps/opik-documentation/documentation/docs/tracing/log_distributed_traces.md index c019d29f61..d5f71b440e 100644 --- a/apps/opik-documentation/documentation/docs/tracing/log_distributed_traces.md +++ b/apps/opik-documentation/documentation/docs/tracing/log_distributed_traces.md @@ -1,6 +1,6 @@ --- -sidebar_position: 4 -sidebar_label: Log DistributedTraces +sidebar_position: 2 +sidebar_label: Log Distributed Traces --- # Log Distributed Traces diff --git a/apps/opik-documentation/documentation/docs/tracing/log_traces.md b/apps/opik-documentation/documentation/docs/tracing/log_traces.md index 493c8fbc77..8fe8e2f52c 100644 --- a/apps/opik-documentation/documentation/docs/tracing/log_traces.md +++ b/apps/opik-documentation/documentation/docs/tracing/log_traces.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 1 sidebar_label: Log Traces --- @@ -18,7 +18,7 @@ pip install opik Once the SDK is installed, you can log traces to using one our Comet's integration, function annotations or manually. :::tip -Opik has a number of integrations for popular LLM frameworks like LangChain or OpenAI, checkout a full list of integrations in the [integrations](/tracing/integrations/overview) section. +Opik has a number of integrations for popular LLM frameworks like LangChain or OpenAI, checkout a full list of integrations in the [integrations](/tracing/integrations/overview.md) section. ::: ## Log using function annotators @@ -128,7 +128,7 @@ def llm_chain(input_text): span.update(name="llm_chain") ``` -You can learn more about the `Trace` object in the [Trace reference docs](/sdk-reference-docs/Objects/Trace.html) and the `Span` object in the [Span reference docs](/sdk-reference-docs/Objects/Span.html). +You can learn more about the `Trace` object in the [Trace reference docs](/python-sdk-reference/Objects/Trace.html) and the `Span` object in the [Span reference docs](/python-sdk-reference/Objects/Span.html). ## Log scores to traces and spans diff --git a/apps/opik-documentation/documentation/docs/tracing/overview.md b/apps/opik-documentation/documentation/docs/tracing/overview.md deleted file mode 100644 index 99525a7b07..0000000000 --- a/apps/opik-documentation/documentation/docs/tracing/overview.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -sidebar_position: 1 -sidebar_label: Overview - TBD ---- - -# Overview - -Under construction. \ No newline at end of file diff --git a/apps/opik-documentation/documentation/docusaurus.config.ts b/apps/opik-documentation/documentation/docusaurus.config.ts index 6c955a602c..667d6af162 100644 --- a/apps/opik-documentation/documentation/docusaurus.config.ts +++ b/apps/opik-documentation/documentation/docusaurus.config.ts @@ -68,7 +68,7 @@ const config: Config = { { to: process.env.NODE_ENV === 'development' ? 'http://localhost:8000' - : '/sdk-reference-docs', + : '/python-sdk-reference', label: 'Python SDK reference docs', position: 'left', className: "header-external-link", diff --git a/apps/opik-documentation/python-sdk-docs/source/Objects/Span.rst b/apps/opik-documentation/python-sdk-docs/source/Objects/Span.rst index 092954541a..a1ebb0935c 100644 --- a/apps/opik-documentation/python-sdk-docs/source/Objects/Span.rst +++ b/apps/opik-documentation/python-sdk-docs/source/Objects/Span.rst @@ -1,6 +1,6 @@ Span ==== -.. autoclass:: opik.Span +.. autoclass:: opik.api_objects.span.Span :members: - :inherited-members: \ No newline at end of file + :inherited-members: diff --git a/apps/opik-documentation/python-sdk-docs/source/Objects/Trace.rst b/apps/opik-documentation/python-sdk-docs/source/Objects/Trace.rst index 2ba6c25829..6f853d6be5 100644 --- a/apps/opik-documentation/python-sdk-docs/source/Objects/Trace.rst +++ b/apps/opik-documentation/python-sdk-docs/source/Objects/Trace.rst @@ -1,6 +1,6 @@ Trace ===== -.. autoclass:: opik.Trace +.. autoclass:: opik.api_objects.trace.Trace :members: - :inherited-members: \ No newline at end of file + :inherited-members: diff --git a/apps/opik-documentation/python-sdk-docs/source/index.rst b/apps/opik-documentation/python-sdk-docs/source/index.rst index eb5192bc90..6fe37a24fd 100644 --- a/apps/opik-documentation/python-sdk-docs/source/index.rst +++ b/apps/opik-documentation/python-sdk-docs/source/index.rst @@ -155,6 +155,7 @@ You can learn more about the `opik` python SDK in the following sections: integrations/openai/index integrations/langchain/index + integrations/llama_index/index .. toctree:: :caption: Objects @@ -164,3 +165,9 @@ You can learn more about the `opik` python SDK in the following sections: Objects/Span.rst Objects/FeedbackScoreDict.rst Objects/UsageDict.rst + +.. toctree:: + :caption: Documentation Guides + :maxdepth: 1 + + Opik Documentation diff --git a/apps/opik-documentation/python-sdk-docs/source/integrations/llama_index/LlamaIndexCallbackHandler.rst b/apps/opik-documentation/python-sdk-docs/source/integrations/llama_index/LlamaIndexCallbackHandler.rst new file mode 100644 index 0000000000..de8ae0446c --- /dev/null +++ b/apps/opik-documentation/python-sdk-docs/source/integrations/llama_index/LlamaIndexCallbackHandler.rst @@ -0,0 +1,4 @@ +LlamaIndexCallbackHandler +========================= + +.. autofunction:: opik.integrations.llama_index.LlamaIndexCallbackHandler diff --git a/apps/opik-documentation/python-sdk-docs/source/integrations/llama_index/index.rst b/apps/opik-documentation/python-sdk-docs/source/integrations/llama_index/index.rst new file mode 100644 index 0000000000..e595f9af30 --- /dev/null +++ b/apps/opik-documentation/python-sdk-docs/source/integrations/llama_index/index.rst @@ -0,0 +1,19 @@ +llama_index +=========== + +Opik integrates with LlamaIndex to allow you to log your LlamaIndex calls to the Opik platform. To enable the logging to Opik, simply set:: + + from llama_index.core import Settings + from llama_index.core.callbacks import CallbackManager + from opik.integrations.llama_index import LlamaIndexCallbackHandler + + opik_callback_handler = LlamaIndexCallbackHandler() + Settings.callback_manager = CallbackManager([opik_callback_handler]) + +You can learn more about the `LlamaIndexCallbackHandler` callback in the following section: + +.. toctree:: + :maxdepth: 4 + :titlesonly: + + LlamaIndexCallbackHandler diff --git a/sdks/python/setup.py b/sdks/python/setup.py index beabffc278..589afa7d38 100644 --- a/sdks/python/setup.py +++ b/sdks/python/setup.py @@ -1,10 +1,7 @@ -from pathlib import Path - from setuptools import find_packages, setup import os project_urls = {"Source code": "https://github.com/comet-ml/opik"} -this_directory = Path(__file__).parent setup( author="Comet ML Inc.", @@ -13,7 +10,7 @@ classifiers=[ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", + "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", @@ -22,7 +19,7 @@ "Programming Language :: Python :: 3.10", ], description="Comet tool for logging and evaluating LLM traces", - long_description=open("README.md", encoding="utf-8").read(), + long_description=open("./../../README.md", encoding="utf-8").read(), long_description_content_type="text/markdown", install_requires=[ "httpx<1.0.0", @@ -52,5 +49,5 @@ project_urls=project_urls, version=os.environ.get("VERSION", "0.0.1"), zip_safe=False, - license="MIT", + license="Apache 2.0 License", )