mirror of
https://github.com/ParisNeo/lollms.git
synced 2025-02-21 09:51:45 +00:00
First working version of Gandalf
This commit is contained in:
parent
3eb864612b
commit
b18c95e60a
@ -1,28 +1,27 @@
|
||||
from lollms.apps.console import Conversation
|
||||
from lollms.app import LollmsApplication
|
||||
from lollms.paths import LollmsPaths
|
||||
from lollms.main_config import LOLLMSConfig
|
||||
import sys
|
||||
import time
|
||||
maxtry=10
|
||||
import streamlit as st
|
||||
from collections import deque
|
||||
from pathlib import Path
|
||||
import json
|
||||
import re
|
||||
import random
|
||||
import pwd, os
|
||||
from flask import Flask, make_response, request, abort
|
||||
from flask.json import jsonify
|
||||
from typing import Callable
|
||||
import string
|
||||
|
||||
|
||||
BUNDLES=4
|
||||
MAXWORDS=1048
|
||||
DEBUG=True
|
||||
class Gandalf(LollmsApplication):
|
||||
def __init__(self, cfg=None):
|
||||
lollms_paths = LollmsPaths.find_paths(tool_prefix="gandalf_")
|
||||
super().__init__("Gandalf", cfg, lollms_paths)
|
||||
lollms_paths = LollmsPaths.find_paths(tool_prefix="lollms_server_")
|
||||
config = LOLLMSConfig.autoload(lollms_paths, None)
|
||||
super().__init__("Gandalf", config, lollms_paths)
|
||||
|
||||
def split_fibers(self,fibers, max_words=MAXWORDS):
|
||||
# Split each fiber into chunks of up to max_words words
|
||||
@ -372,5 +371,10 @@ def providers():
|
||||
#todo : implement
|
||||
return jsonify({})
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
def main():
|
||||
app.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -637,22 +637,22 @@ class LoLLMsServer(LollmsApplication):
|
||||
model = self.model
|
||||
self.clients[client_id]["is_generating"]=True
|
||||
self.clients[client_id]["requested_stop"]=False
|
||||
prompt = data['prompt']
|
||||
tokenized = model.tokenize(prompt)
|
||||
personality_id = data.get('personality', -1)
|
||||
prompt = data['prompt']
|
||||
tokenized = model.tokenize(prompt)
|
||||
personality_id = int(data.get('personality', -1))
|
||||
|
||||
n_crop = data.get('n_crop', len(tokenized))
|
||||
n_crop = int(data.get('n_crop', len(tokenized)))
|
||||
if n_crop!=-1:
|
||||
prompt = model.detokenize(tokenized[-n_crop:])
|
||||
|
||||
n_predicts = data["n_predicts"]
|
||||
parameters = data.get("parameters",{
|
||||
"temperature":self.config["temperature"],
|
||||
"top_k":self.config["top_k"],
|
||||
"top_p":self.config["top_p"],
|
||||
"repeat_penalty":self.config["repeat_penalty"],
|
||||
"repeat_last_n":self.config["repeat_last_n"],
|
||||
"seed":self.config["seed"]
|
||||
n_predicts = int(data.get("n_predicts",1024))
|
||||
parameters = data.get("parameters",{
|
||||
"temperature":data.get("temperature",self.config["temperature"]),
|
||||
"top_k":data.get("top_k",self.config["top_k"]),
|
||||
"top_p":data.get("top_p",self.config["top_p"]),
|
||||
"repeat_penalty":data.get("repeat_penalty",self.config["repeat_penalty"]),
|
||||
"repeat_last_n":data.get("repeat_last_n",self.config["repeat_last_n"]),
|
||||
"seed":data.get("seed",self.config["seed"])
|
||||
})
|
||||
|
||||
if personality_id==-1:
|
||||
|
Loading…
x
Reference in New Issue
Block a user