Set up HaystackInstrumentor to trace your Haystack application and sends the traces to Phoenix at the endpoint defined below.
# Import open-telemetry dependenciesfrom arize.otel import register# Setup OTel via our convenience functiontracer_provider =register( space_id ="your-space-id", # in app space settings page api_key ="your-api-key", # in app space settings page project_name ="your-project-name", # name this to whatever you would like)# Import openinference instrumentorfrom openinference.instrumentation.haystack import HaystackInstrumentor# Turn on the instrumentorHaystackInstrumentor().instrument(tracer_provider=tracer_provider)
Setup a simple Pipeline and see it instrumented
from haystack import Pipelinefrom haystack.components.generators import OpenAIGenerator# Initialize the pipelinepipeline =Pipeline()# Initialize the OpenAI generator componentllm =OpenAIGenerator(model="gpt-3.5-turbo")# Add the generator component to the pipelinepipeline.add_component("llm", llm)# Define the questionquestion ="What is the location of the Hanging Gardens of Babylon?"# Run the pipeline with the questionresponse = pipeline.run({"llm": {"prompt": question}})print(response)