from arize.api import Client
from arize.types import ModelTypes
# Step 1: Set up Arize and Hugging Face API/SPACE Key and Tokens
ARIZE_SPACE_KEY = 'YOUR_ARIZE_SPACE_KEY'
ARIZE_API_KEY = 'YOUR_ARIZE_API_KEY'
arize = Client(space_key=ARIZE_SPACE_KEY, api_key=ARIZE_API_KEY)
API_URL = "https://api-inference.huggingface.co/models/facebook/bart-large-mnli"
YOUR_HUGGINGFACE_API_KEY = 'YOUR_HUGGINGFACE_API_KEY'
headers = {"Authorization": "Bearer {}".format(YOUR_HUGGINGFACE_API_KEY}
# Step 1: Standard request to Hugging Face Inference API
data = json.dumps(payload)
response = requests.request("POST", API_URL, headers=headers, data=data)
output = json.loads(response.content.decode("utf-8"))
# Step 2: Process output (and/or features) for logging to Arize
idx = np.argmax(output['scores'])
prediction, score = output['labels'][idx], output['scores'][idx]
# optional, if you want to log model features to Arize
features = feature_pipeline(data)
arize_response = arize.log(
model_id='facebook/bart-large-mnli',
model_type=ModelTypes.SCORE_CATEGORICAL,
prediction_id = str(uuid.uuid4()),
prediction_label=(prediction, score),
arize_success = arize_response.result().status_code == 200
# Step 4: Return the formatted output
return {'prediction': prediction,
'arize-success': arize_success}