lollms-webui/tests/unitary_tests/test_chat_interface_streaming2.py

20 lines
531 B
Python
Raw Normal View History

2024-01-21 04:49:19 +00:00
import json
2024-12-19 12:48:57 +00:00
import requests
2024-01-21 04:49:19 +00:00
url = "http://localhost:1234/lollms_generate"
2024-12-19 12:48:57 +00:00
payload = {"prompt": "Once apon a time, ", "temperature": 0.1, "stream": True}
2024-01-21 04:49:19 +00:00
2024-12-19 12:48:57 +00:00
headers = {"Content-Type": "application/json"}
2024-01-21 04:49:19 +00:00
response = requests.post(url, data=json.dumps(payload), headers=headers, stream=True)
if response.status_code == 200:
for response_chunk in response.iter_lines():
if response_chunk:
rc = response_chunk.decode()
2024-12-19 12:48:57 +00:00
print(rc, end="", flush=True)
2024-01-21 04:49:19 +00:00
else:
print("Error:", response.status_code)