Set up HaystackInstrumentor to trace your Haystack application and sends the traces to Phoenix at the endpoint defined below.
from openinference.instrumentation.haystack import HaystackInstrumentor# Import open-telemetry dependenciesfrom arize_otel import register_otel, Endpoints# Setup OTEL via our convenience functionregister_otel( endpoints = Endpoints.ARIZE, space_id ="your-space-id", # in app space settings page api_key ="your-api-key", # in app space settings page model_id ="your-model-id", # name this to whatever you would like)HaystackInstrumentor().instrument()
Setup a simple Pipeline and see it instrumented
from haystack import Pipelinefrom haystack.components.generators import OpenAIGenerator# Initialize the pipelinepipeline =Pipeline()# Initialize the OpenAI generator componentllm =OpenAIGenerator(model="gpt-3.5-turbo")# Add the generator component to the pipelinepipeline.add_component("llm", llm)# Define the questionquestion ="What is the location of the Hanging Gardens of Babylon?"# Run the pipeline with the questionresponse = pipeline.run({"llm": {"prompt": question}})print(response)