From 87c2bae1e778fa71da56a12bec4f34d79937ea76 Mon Sep 17 00:00:00 2001 From: Gustavo Cid Ornelas Date: Tue, 15 Apr 2025 10:02:55 -0300 Subject: [PATCH 1/4] feat: add OpenLIT notebook example --- .../tracing/openlit/openlit_tracing.ipynb | 125 ++++++++++++++++++ 1 file changed, 125 insertions(+) create mode 100644 examples/tracing/openlit/openlit_tracing.ipynb diff --git a/examples/tracing/openlit/openlit_tracing.ipynb b/examples/tracing/openlit/openlit_tracing.ipynb new file mode 100644 index 00000000..d43674b4 --- /dev/null +++ b/examples/tracing/openlit/openlit_tracing.ipynb @@ -0,0 +1,125 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "2722b419", + "metadata": {}, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/openlayer-python/blob/main/examples/tracing/openlit/openlit_tracing.ipynb)\n", + "\n", + "\n", + "# OpenLIT quickstart\n", + "\n", + "This notebook shows how to export traces captured by [OpenLIT](https://docs.openlit.io/latest/features/tracing) to Openlayer. The integration is done via the Openlayer's [OpenTelemetry endpoint](https://www.openlayer.com/docs/integrations/opentelemetry). For more information, refer to the [OpenLIT integration guide](https://www.openlayer.com/docs/integrations/openlit)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "020c8f6a", + "metadata": {}, + "outputs": [], + "source": [ + "!pip install openai openlit" + ] + }, + { + "cell_type": "markdown", + "id": "75c2a473", + "metadata": {}, + "source": [ + "## 1. Set the environment variables" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "f3f4fa13", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "import openai\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY_HERE\"\n", + "\n", + "os.environ[\"OTEL_EXPORTER_OTLP_ENDPOINT\"] = \"https://api.openlayer.com/v1/otel\"\n", + "os.environ[\"OTEL_EXPORTER_OTLP_HEADERS\"] = \"Authorization=Bearer YOUR_OPENLAYER_API_KEY_HERE, x-bt-parent=pipeline_id:YOUR_OPENLAYER_PIPELINE_ID_HERE\"" + ] + }, + { + "cell_type": "markdown", + "id": "9758533f", + "metadata": {}, + "source": [ + "## 2. Initialize OpenLIT instrumentation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c35d9860-dc41-4f7c-8d69-cc2ac7e5e485", + "metadata": {}, + "outputs": [], + "source": [ + "import openlit\n", + "\n", + "openlit.init(disable_batch=True)" + ] + }, + { + "cell_type": "markdown", + "id": "72a6b954", + "metadata": {}, + "source": [ + "## 3. Use LLMs and workflows as usual\n", + "\n", + "That's it! Now you can continue using LLMs and workflows as usual.The trace data is automatically exported to Openlayer and you can start creating tests around it." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "e00c1c79", + "metadata": {}, + "outputs": [], + "source": [ + "client = openai.OpenAI()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "abaf6987-c257-4f0d-96e7-3739b24c7206", + "metadata": {}, + "outputs": [], + "source": [ + "client.chat.completions.create(\n", + " model=\"gpt-4o-mini\", messages=[{\"role\": \"user\", \"content\": \"How are you doing today?\"}]\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "openlayer-assistant", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From bd2100b2581d6c71791f5f8c2910d146edea407f Mon Sep 17 00:00:00 2001 From: Gustavo Cid Ornelas Date: Tue, 15 Apr 2025 10:04:24 -0300 Subject: [PATCH 2/4] chore: link to OpenLLMetry integration guide --- .../tracing/openllmetry/openllmetry_tracing.ipynb | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/examples/tracing/openllmetry/openllmetry_tracing.ipynb b/examples/tracing/openllmetry/openllmetry_tracing.ipynb index eb1833ed..bb215775 100644 --- a/examples/tracing/openllmetry/openllmetry_tracing.ipynb +++ b/examples/tracing/openllmetry/openllmetry_tracing.ipynb @@ -10,7 +10,7 @@ "\n", "# OpenLLMetry quickstart\n", "\n", - "This notebook shows how to export traces captured by [OpenLLMetry](https://github.com/traceloop/openllmetry) (by Traceloop) to Openlayer. The integration is done via the Openlayer's [OpenTelemetry endpoint](https://www.openlayer.com/docs/integrations/opentelemetry)." + "This notebook shows how to export traces captured by [OpenLLMetry](https://github.com/traceloop/openllmetry) (by Traceloop) to Openlayer. The integration is done via the Openlayer's [OpenTelemetry endpoint](https://www.openlayer.com/docs/integrations/opentelemetry). For more information, refer to the [OpenLLMetry integration guide](https://www.openlayer.com/docs/integrations/openllmetry)." ] }, { @@ -62,15 +62,7 @@ "execution_count": null, "id": "c35d9860-dc41-4f7c-8d69-cc2ac7e5e485", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Failed to export batch code: 404, reason: {\"error\": \"The requested URL was not found on the server. If you entered the URL manually please check your spelling and try again.\", \"code\": 404}\n" - ] - } - ], + "outputs": [], "source": [ "from traceloop.sdk import Traceloop\n", "\n", From 9c6c355bcf1332ebf1b3a50c31bbd92bfa42521d Mon Sep 17 00:00:00 2001 From: Gustavo Cid Ornelas Date: Tue, 15 Apr 2025 10:37:16 -0300 Subject: [PATCH 3/4] feat: add MLflow notebook example --- examples/tracing/mlflow/mlflow_tracing.ipynb | 126 +++++++++++++++++++ examples/tracing/mlflow/mlruns/0/meta.yaml | 6 + 2 files changed, 132 insertions(+) create mode 100644 examples/tracing/mlflow/mlflow_tracing.ipynb create mode 100644 examples/tracing/mlflow/mlruns/0/meta.yaml diff --git a/examples/tracing/mlflow/mlflow_tracing.ipynb b/examples/tracing/mlflow/mlflow_tracing.ipynb new file mode 100644 index 00000000..ad22df3a --- /dev/null +++ b/examples/tracing/mlflow/mlflow_tracing.ipynb @@ -0,0 +1,126 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "2722b419", + "metadata": {}, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/openlayer-python/blob/main/examples/tracing/mlflow/mlflow_tracing.ipynb)\n", + "\n", + "\n", + "# MLflow quickstart\n", + "\n", + "This notebook shows how to export traces captured by [MLflow](https://mlflow.org/docs/latest/tracing/integrations/) to Openlayer. The integration is done via the Openlayer's [OpenTelemetry endpoint](https://www.openlayer.com/docs/integrations/opentelemetry). For more information, refer to the [MLflow integration guide](https://www.openlayer.com/docs/integrations/mlflow)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "020c8f6a", + "metadata": {}, + "outputs": [], + "source": [ + "!pip install openai mlflow" + ] + }, + { + "cell_type": "markdown", + "id": "75c2a473", + "metadata": {}, + "source": [ + "## 1. Set the environment variables" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "f3f4fa13", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "import openai\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY_HERE\"\n", + "\n", + "os.environ[\"OTEL_EXPORTER_OTLP_TRACES_ENDPOINT\"] = \"https://api.openlayer.com/v1/otel/v1/traces\"\n", + "os.environ[\"OTEL_EXPORTER_OTLP_TRACES_HEADERS\"] = \"Authorization=Bearer YOUR_OPENLAYER_API_KEY_HERE, x-bt-parent=pipeline_id:YOUR_OPENLAYER_PIPELINE_ID_HERE\"\n", + "os.environ['OTEL_EXPORTER_OTLP_TRACES_PROTOCOL']= \"http/protobuf\"" + ] + }, + { + "cell_type": "markdown", + "id": "9758533f", + "metadata": {}, + "source": [ + "## 2. Initialize MLflow instrumentation" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "c35d9860-dc41-4f7c-8d69-cc2ac7e5e485", + "metadata": {}, + "outputs": [], + "source": [ + "import mlflow\n", + "\n", + "mlflow.openai.autolog()" + ] + }, + { + "cell_type": "markdown", + "id": "72a6b954", + "metadata": {}, + "source": [ + "## 3. Use LLMs and workflows as usual\n", + "\n", + "That's it! Now you can continue using LLMs and workflows as usual.The trace data is automatically exported to Openlayer and you can start creating tests around it." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "e00c1c79", + "metadata": {}, + "outputs": [], + "source": [ + "client = openai.OpenAI()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "abaf6987-c257-4f0d-96e7-3739b24c7206", + "metadata": {}, + "outputs": [], + "source": [ + "client.chat.completions.create(\n", + " model=\"gpt-4o-mini\", messages=[{\"role\": \"user\", \"content\": \"How are you doing today?\"}]\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "openlayer-assistant", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/tracing/mlflow/mlruns/0/meta.yaml b/examples/tracing/mlflow/mlruns/0/meta.yaml new file mode 100644 index 00000000..96b86fce --- /dev/null +++ b/examples/tracing/mlflow/mlruns/0/meta.yaml @@ -0,0 +1,6 @@ +artifact_location: file:///Users/gustavocid/Desktop/openlayer-repos/openlayer-python-client/examples/tracing/mlflow/mlruns/0 +creation_time: 1744723828391 +experiment_id: '0' +last_update_time: 1744723828391 +lifecycle_stage: active +name: Default From 9eb90b9c9168f807a5ebb511367fe1f7be32865d Mon Sep 17 00:00:00 2001 From: Gustavo Cid Ornelas Date: Fri, 2 May 2025 10:08:37 -0300 Subject: [PATCH 4/4] chore: remove MLflow example --- examples/tracing/mlflow/mlflow_tracing.ipynb | 126 ------------------- examples/tracing/mlflow/mlruns/0/meta.yaml | 6 - 2 files changed, 132 deletions(-) delete mode 100644 examples/tracing/mlflow/mlflow_tracing.ipynb delete mode 100644 examples/tracing/mlflow/mlruns/0/meta.yaml diff --git a/examples/tracing/mlflow/mlflow_tracing.ipynb b/examples/tracing/mlflow/mlflow_tracing.ipynb deleted file mode 100644 index ad22df3a..00000000 --- a/examples/tracing/mlflow/mlflow_tracing.ipynb +++ /dev/null @@ -1,126 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "2722b419", - "metadata": {}, - "source": [ - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/openlayer-python/blob/main/examples/tracing/mlflow/mlflow_tracing.ipynb)\n", - "\n", - "\n", - "# MLflow quickstart\n", - "\n", - "This notebook shows how to export traces captured by [MLflow](https://mlflow.org/docs/latest/tracing/integrations/) to Openlayer. The integration is done via the Openlayer's [OpenTelemetry endpoint](https://www.openlayer.com/docs/integrations/opentelemetry). For more information, refer to the [MLflow integration guide](https://www.openlayer.com/docs/integrations/mlflow)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "020c8f6a", - "metadata": {}, - "outputs": [], - "source": [ - "!pip install openai mlflow" - ] - }, - { - "cell_type": "markdown", - "id": "75c2a473", - "metadata": {}, - "source": [ - "## 1. Set the environment variables" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "f3f4fa13", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "import openai\n", - "\n", - "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY_HERE\"\n", - "\n", - "os.environ[\"OTEL_EXPORTER_OTLP_TRACES_ENDPOINT\"] = \"https://api.openlayer.com/v1/otel/v1/traces\"\n", - "os.environ[\"OTEL_EXPORTER_OTLP_TRACES_HEADERS\"] = \"Authorization=Bearer YOUR_OPENLAYER_API_KEY_HERE, x-bt-parent=pipeline_id:YOUR_OPENLAYER_PIPELINE_ID_HERE\"\n", - "os.environ['OTEL_EXPORTER_OTLP_TRACES_PROTOCOL']= \"http/protobuf\"" - ] - }, - { - "cell_type": "markdown", - "id": "9758533f", - "metadata": {}, - "source": [ - "## 2. Initialize MLflow instrumentation" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "c35d9860-dc41-4f7c-8d69-cc2ac7e5e485", - "metadata": {}, - "outputs": [], - "source": [ - "import mlflow\n", - "\n", - "mlflow.openai.autolog()" - ] - }, - { - "cell_type": "markdown", - "id": "72a6b954", - "metadata": {}, - "source": [ - "## 3. Use LLMs and workflows as usual\n", - "\n", - "That's it! Now you can continue using LLMs and workflows as usual.The trace data is automatically exported to Openlayer and you can start creating tests around it." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "e00c1c79", - "metadata": {}, - "outputs": [], - "source": [ - "client = openai.OpenAI()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "abaf6987-c257-4f0d-96e7-3739b24c7206", - "metadata": {}, - "outputs": [], - "source": [ - "client.chat.completions.create(\n", - " model=\"gpt-4o-mini\", messages=[{\"role\": \"user\", \"content\": \"How are you doing today?\"}]\n", - ")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "openlayer-assistant", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.18" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/examples/tracing/mlflow/mlruns/0/meta.yaml b/examples/tracing/mlflow/mlruns/0/meta.yaml deleted file mode 100644 index 96b86fce..00000000 --- a/examples/tracing/mlflow/mlruns/0/meta.yaml +++ /dev/null @@ -1,6 +0,0 @@ -artifact_location: file:///Users/gustavocid/Desktop/openlayer-repos/openlayer-python-client/examples/tracing/mlflow/mlruns/0 -creation_time: 1744723828391 -experiment_id: '0' -last_update_time: 1744723828391 -lifecycle_stage: active -name: Default