Set up GuardrailsInstrumentor to trace your guardrails application and sends the traces to Phoenix at the endpoint defined below.
from openinference.instrumentation.guardrails import GuardrailsInstrumentor# Import open-telemetry dependenciesfrom arize_otel import register_otel, Endpoints# Setup OTEL via our convenience functionregister_otel( endpoints = Endpoints.ARIZE, space_id ="your-space-id", # in app space settings page api_key ="your-api-key", # in app space settings page model_id ="your-model-id", # name this to whatever you would like)GuardrailsInstrumentor().instrument()
To test, run the following code and observe your traces in Arize.
from guardrails import Guardfrom guardrails.hub import TwoWordsimport openaiguard =Guard().use(TwoWords(),)response =guard( llm_api=openai.chat.completions.create, prompt="What is another name for America?", model="gpt-3.5-turbo", max_tokens=1024,)print(response)