-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathchat_api_client.py
49 lines (41 loc) · 1.49 KB
/
chat_api_client.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
"""
This module contains the function to send a message to the chat API.
"""
import time
import requests
from llm_agent.config.global_settings import (
HOST_URL,
LLM_HTTP_PORT,
)
from llm_agent.log_config.logger_setup import logger
FASTAPI_REST_PATH = "chat"
MAX_NUMBER_OF_TRIES_TO_CONNECT = 11
def send_message_to_chat_api(message: str) -> str:
"""
Sends a message to the chat API and returns the response.
Args:
message (str): The message to send.
Returns:
str: The response from the API, or an error message if the request failed.
"""
url = f"http://{HOST_URL}:{LLM_HTTP_PORT}/{FASTAPI_REST_PATH}"
data = {"message": message}
for retries in range(MAX_NUMBER_OF_TRIES_TO_CONNECT):
logger.debug("Attempting to connect: %s", retries)
try:
response = requests.post(url, json=data, timeout=120)
if response.status_code == 200:
return response.json()
else:
logger.error(
"send_message_to_chat_api: Error connecting to fastAPI: http status code: %s, http response: %s, url: %s",
response.status_code,
response.text,
url,
)
except requests.exceptions.RequestException as e:
logger.error("send_message_to_chat_api: Error from fastAPI: %s", e)
time.sleep(0.5)
return (
"Ouch, I got an error from fastAPI server, can't connect to the LLM."
)