Set up GuardrailsInstrumentor to trace your guardrails application and sends the traces to Phoenix at the endpoint defined below.
from openinference.instrumentation.guardrails import GuardrailsInstrumentor# Import open-telemetry dependenciesfrom arize.otel import register# Setup OTel via our convenience functiontracer_provider =register( space_id ="your-space-id", # in app space settings page api_key ="your-api-key", # in app space settings page project_name ="your-project-name", # name this to whatever you would like)GuardrailsInstrumentor().instrument(tracer_provider=tracer_provider)
To test, run the following code and observe your traces in Arize.
from guardrails import Guardfrom guardrails.hub import TwoWordsimport openaiguard =Guard().use(TwoWords(),)response =guard( llm_api=openai.chat.completions.create, prompt="What is another name for America?", model="gpt-3.5-turbo", max_tokens=1024,)print(response)