mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-18 20:27:58 +00:00
Upgraded
This commit is contained in:
parent
f015b3ce40
commit
5e96d87910
@ -22,6 +22,8 @@ Lord of Large Language Models (LoLLMs) Server is a text generation server based
|
|||||||
- RESTful API for listing personalities and adding new personalities.
|
- RESTful API for listing personalities and adding new personalities.
|
||||||
- Easy integration with various applications and frameworks.
|
- Easy integration with various applications and frameworks.
|
||||||
- Possibility to send files to personalities
|
- Possibility to send files to personalities
|
||||||
|
- Possibility to run on multiple nodes and provide a generation service to many outputs at once.
|
||||||
|
- Data stays local even in the remote version. Only generations are sent to the host node. The logs, data and discussion history are kept in your local disucssion folder.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@ -38,12 +40,15 @@ pip install --upgrade git+https://github.com/ParisNeo/lollms.git
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
To simply configure your environment run the console app:
|
To simply configure your environment run the settings app:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
lollms-console
|
lollms-settings
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The tool is intuitive and will guide you through configuration process.
|
||||||
|
|
||||||
|
|
||||||
The first time you will be prompted to select a binding.
|
The first time you will be prompted to select a binding.
|
||||||
![image](https://github.com/ParisNeo/lollms/assets/827993/2d7f58fe-089d-4d3e-a21a-0609f8e27969)
|
![image](https://github.com/ParisNeo/lollms/assets/827993/2d7f58fe-089d-4d3e-a21a-0609f8e27969)
|
||||||
|
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
######
|
######
|
||||||
# Project : GPT4ALL-UI
|
# Project : lollms
|
||||||
# File : binding.py
|
# File : binding.py
|
||||||
# Author : ParisNeo with the help of the community
|
# Author : ParisNeo with the help of the community
|
||||||
# Supported by Nomic-AI
|
# Supported by Nomic-AI
|
||||||
# license : Apache 2.0
|
# license : Apache 2.0
|
||||||
# Description :
|
# Description :
|
||||||
# This is an interface class for GPT4All-ui bindings.
|
# This is an interface class for lollms bindings.
|
||||||
######
|
######
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit 5b35eb0641862d7bfd12ac0355bf3bc9491b7104
|
Subproject commit c08874de08eb012827aa13ab711581db9c8274b1
|
@ -1 +1 @@
|
|||||||
Subproject commit 50cf9e7287692f962b3a7ec349155c29708252d0
|
Subproject commit 14ba94d886bbe9713d8c4489c7ebe59317c5a057
|
@ -329,6 +329,7 @@ Date: {{date}}
|
|||||||
# Disclaimer
|
# Disclaimer
|
||||||
self._disclaimer: str = ""
|
self._disclaimer: str = ""
|
||||||
self._help: str = ""
|
self._help: str = ""
|
||||||
|
self._commands: list = []
|
||||||
|
|
||||||
# Default model parameters
|
# Default model parameters
|
||||||
self._model_temperature: float = 0.8 # higher: more creative, lower more deterministic
|
self._model_temperature: float = 0.8 # higher: more creative, lower more deterministic
|
||||||
@ -421,6 +422,7 @@ Date: {{date}}
|
|||||||
self._dependencies = config.get("dependencies", self._dependencies)
|
self._dependencies = config.get("dependencies", self._dependencies)
|
||||||
self._disclaimer = config.get("disclaimer", self._disclaimer)
|
self._disclaimer = config.get("disclaimer", self._disclaimer)
|
||||||
self._help = config.get("help", self._help)
|
self._help = config.get("help", self._help)
|
||||||
|
self._commands = config.get("commands", self._commands)
|
||||||
self._model_temperature = config.get("model_temperature", self._model_temperature)
|
self._model_temperature = config.get("model_temperature", self._model_temperature)
|
||||||
self._model_n_predicts = config.get("model_n_predicts", self._model_n_predicts)
|
self._model_n_predicts = config.get("model_n_predicts", self._model_n_predicts)
|
||||||
self._model_top_k = config.get("model_top_k", self._model_top_k)
|
self._model_top_k = config.get("model_top_k", self._model_top_k)
|
||||||
@ -530,6 +532,7 @@ Date: {{date}}
|
|||||||
"dependencies": self._dependencies,
|
"dependencies": self._dependencies,
|
||||||
"disclaimer": self._disclaimer,
|
"disclaimer": self._disclaimer,
|
||||||
"help": self._help,
|
"help": self._help,
|
||||||
|
"commands": self._commands,
|
||||||
"model_temperature": self._model_temperature,
|
"model_temperature": self._model_temperature,
|
||||||
"model_n_predicts": self._model_n_predicts,
|
"model_n_predicts": self._model_n_predicts,
|
||||||
"model_top_k": self._model_top_k,
|
"model_top_k": self._model_top_k,
|
||||||
@ -569,6 +572,7 @@ Date: {{date}}
|
|||||||
"dependencies": self._dependencies,
|
"dependencies": self._dependencies,
|
||||||
"disclaimer": self._disclaimer,
|
"disclaimer": self._disclaimer,
|
||||||
"help": self._help,
|
"help": self._help,
|
||||||
|
"commands": self._commands,
|
||||||
"model_temperature": self._model_temperature,
|
"model_temperature": self._model_temperature,
|
||||||
"model_n_predicts": self._model_n_predicts,
|
"model_n_predicts": self._model_n_predicts,
|
||||||
"model_top_k": self._model_top_k,
|
"model_top_k": self._model_top_k,
|
||||||
@ -867,6 +871,26 @@ Date: {{date}}
|
|||||||
self._help = help
|
self._help = help
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def commands(self) -> str:
|
||||||
|
"""Getter method for the commands attribute.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The commands text.
|
||||||
|
"""
|
||||||
|
return self._commands
|
||||||
|
|
||||||
|
@commands.setter
|
||||||
|
def commands(self, commands: str):
|
||||||
|
"""Setter method for the commands attribute.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
commands (str): The commands text.
|
||||||
|
"""
|
||||||
|
self._commands = commands
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def model_temperature(self) -> float:
|
def model_temperature(self) -> float:
|
||||||
"""Get the model's temperature."""
|
"""Get the model's temperature."""
|
||||||
|
2
setup.py
2
setup.py
@ -26,7 +26,7 @@ def get_all_files(path):
|
|||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="lollms",
|
name="lollms",
|
||||||
version="1.1.93",
|
version="1.1.94",
|
||||||
author="Saifeddine ALOUI",
|
author="Saifeddine ALOUI",
|
||||||
author_email="aloui.saifeddine@gmail.com",
|
author_email="aloui.saifeddine@gmail.com",
|
||||||
description="A python library for AI personality definition",
|
description="A python library for AI personality definition",
|
||||||
|
Loading…
Reference in New Issue
Block a user