lollms-webui/tests/unitary_tests/test_chat_interface_streaming.py

35 lines
890 B
Python
Raw Normal View History

2024-01-21 05:35:29 +01:00
import json
2024-12-19 13:48:57 +01:00
import requests
2024-01-21 05:35:29 +01:00
url = "http://localhost:1234/v1/chat/completions"
payload = {
"messages": [
{
"role": "system",
2024-12-19 13:48:57 +01:00
"content": "You are a research engineer specialized in the applications of AI in robotics.",
2024-01-21 05:35:29 +01:00
},
{
"role": "user",
2024-12-19 13:48:57 +01:00
"content": "List a number of libraries I can use for robotics.",
},
2024-01-21 05:35:29 +01:00
],
"max_tokens": 100,
"temperature": 0.5,
2024-12-19 13:48:57 +01:00
"stream": True,
2024-01-21 05:35:29 +01:00
}
2024-12-19 13:48:57 +01:00
headers = {"Content-Type": "application/json"}
2024-01-21 05:35:29 +01:00
response = requests.post(url, data=json.dumps(payload), headers=headers, stream=True)
if response.status_code == 200:
for response_chunk in response.iter_lines():
if response_chunk:
rc = response_chunk.decode()
rc = json.loads(rc)
print(rc["choices"][0]["delta"]["content"])
else:
print("Error:", response.status_code)