Added working Docker

This commit is contained in:
Saifeddine ALOUI 2023-10-15 02:27:30 +02:00
parent 282bd96824
commit 220a843dbe
16 changed files with 310 additions and 17 deletions

31
Dockerfile Normal file
View File

@ -0,0 +1,31 @@
# Use an official Python runtime as a parent image
#FROM ai_ticket
ARG BASE_IMAGE
FROM ${BASE_IMAGE}
# Set the working directory to /app
WORKDIR /app
# Copy the current directory contents into the container at /app
COPY requirements.txt /app/requirements.txt
# Install any needed packages specified in requirements.txt
RUN apt update
RUN apt install -y git
RUN pip install --trusted-host pypi.python.org -r requirements.txt
COPY ./elf_docker_cfg /app/elf_docker_cfg
COPY ./lollms /app/lollms
COPY ./README.md /app/README.md
COPY ./MANIFEST.in /app/MANIFEST.in
COPY ./LICENSE /app/LICENSE
COPY ./requirements_dev.txt /app/requirements_dev.txt
COPY ./requirements.txt /app/requirements.txt
COPY ./setup.py /app/setup.py
COPY ./zoos /app/zoos
COPY ./configs /app/configs
RUN pip install -e .
# Run app.py when the container launches
CMD ["lollms-elf","--host","0.0.0.0", "--default_cfg_path", "/app/elf_docker_cfg/config_paths.yaml"]

1
configs/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
local_config.yaml

55
configs/config.yaml Normal file
View File

@ -0,0 +1,55 @@
# =================== Lord Of Large Language Models Configuration file ===========================
version: 26
binding_name: null
model_name: null
# Enables gpu usage
enable_gpu: true
# Host information
host: localhost
port: 9601
# Genreration parameters
discussion_prompt_separator: "!@>"
seed: -1
n_predict: 1024
ctx_size: 2048
min_n_predict: 256
temperature: 0.9
top_k: 50
top_p: 0.95
repeat_last_n: 40
repeat_penalty: 1.2
n_threads: 8
#Personality parameters
personalities: ["generic/lollms"]
active_personality_id: 0
override_personality_model_parameters: false #if true the personality parameters are overriden by those of the configuration (may affect personality behaviour)
# Extensions
extensions: []
# User infos
user_name: user
user_description: ""
use_user_name_in_discussions: false
user_avatar: default_user
# Automatic update
auto_update: false
auto_save: true
debug: false
# Data vectorization
use_files: true # Activate using files
data_vectorization_activate: true # To activate/deactivate data vectorization
data_vectorization_method: "ftidf_vectorizer" #"model_embedding" or "ftidf_vectorizer"
data_visualization_method: "PCA" #"PCA" or "TSNE"
data_vectorization_save_db: False # For each new session, new files
data_vectorization_chunk_size: 512 # chunk size
data_vectorization_overlap_size: 128 # overlap between chunks size
data_vectorization_nb_chunks: 2 # number of chunks to use
data_vectorization_build_keys_words: false # If true, when querrying the database, we use keywords generated from the user prompt instead of the prompt itself.

13
docker-compose.yaml Normal file
View File

@ -0,0 +1,13 @@
version: '3'
services:
lollms:
build:
context: .
dockerfile: Dockerfile
args:
- BASE_IMAGE=python:3.10
ports:
- 9601:9601
volumes:
- ./data:/app/data

View File

@ -0,0 +1,2 @@
lollms_path: /app/
lollms_personal_path: /app/elf_docker_cfg/personal

View File

@ -0,0 +1,51 @@
active_personality_id: 15
audio_auto_send_input: true
audio_in_language: en-US
audio_out_voice: Google UK English Female
audio_pitch: '1'
audio_silenceTimer: 5000
auto_save: true
auto_show_browser: true
auto_speak: false
auto_update: true
binding_name: null
model_name: null
config: local_config
ctx_size: 4090
data_vectorization_activate: true
data_vectorization_build_keys_words: true
data_vectorization_chunk_size: 512
data_vectorization_method: tfidf_vectorizer
data_vectorization_nb_chunks: 3
data_vectorization_overlap_size: 128
data_vectorization_save_db: false
data_visualization_method: PCA
db_path: lollms.db
debug: true
discussion_prompt_separator: '!@>'
enable_gpu: true
extensions: []
host: localhost
min_n_predict: 256
n_predict: 1024
n_threads: 8
override_personality_model_parameters: false
personalities:
- generic/lollms
port: 9600
repeat_last_n: 40
repeat_penalty: 1.2
seed: -1
temperature: '0.3'
top_k: 50
top_p: 0.95
use_discussions_history: true
use_files: true
use_user_informations_in_discussion: false
use_user_name_in_discussions: true
user_avatar: default_user
user_description:
user_name: User
version: 26

View File

@ -0,0 +1,2 @@
lollms_path: .
lollms_personal_path: elf_test_cfg/personal

View File

@ -0,0 +1,51 @@
active_personality_id: 15
audio_auto_send_input: true
audio_in_language: en-US
audio_out_voice: Google UK English Female
audio_pitch: '1'
audio_silenceTimer: 5000
auto_save: true
auto_show_browser: true
auto_speak: false
auto_update: true
binding_name: null
model_name: null
config: local_config
ctx_size: 4090
data_vectorization_activate: true
data_vectorization_build_keys_words: true
data_vectorization_chunk_size: 512
data_vectorization_method: tfidf_vectorizer
data_vectorization_nb_chunks: 3
data_vectorization_overlap_size: 128
data_vectorization_save_db: false
data_visualization_method: PCA
db_path: lollms.db
debug: true
discussion_prompt_separator: '!@>'
enable_gpu: true
extensions: []
host: localhost
min_n_predict: 256
n_predict: 1024
n_threads: 8
override_personality_model_parameters: false
personalities:
- generic/lollms
port: 9600
repeat_last_n: 40
repeat_penalty: 1.2
seed: -1
temperature: '0.3'
top_k: 50
top_p: 0.95
use_discussions_history: true
use_files: true
use_user_informations_in_discussion: false
use_user_name_in_discussions: true
user_avatar: default_user
user_description:
user_name: User
version: 26

View File

@ -0,0 +1,38 @@
active_personality_id: 0
auto_save: true
auto_update: false
binding_name: null
ctx_size: 2048
data_vectorization_activate: true
data_vectorization_build_keys_words: false
data_vectorization_chunk_size: 512
data_vectorization_method: ftidf_vectorizer
data_vectorization_nb_chunks: 2
data_vectorization_overlap_size: 128
data_vectorization_save_db: false
data_visualization_method: PCA
debug: false
discussion_prompt_separator: '!@>'
enable_gpu: true
extensions: []
host: localhost
min_n_predict: 256
model_name: null
n_predict: 1024
n_threads: 8
override_personality_model_parameters: false
personalities:
- generic/lollms
port: 9601
repeat_last_n: 40
repeat_penalty: 1.2
seed: -1
temperature: 0.9
top_k: 50
top_p: 0.95
use_files: true
use_user_name_in_discussions: false
user_avatar: default_user
user_description: ''
user_name: user
version: 26

View File

@ -12,16 +12,18 @@ from flask import Flask, make_response, request, abort
from flask.json import jsonify
from typing import Callable
import string
import argparse
from ascii_colors import ASCIIColors
BUNDLES=4
MAXWORDS=1048
DEBUG=True
class Gandalf(LollmsApplication):
def __init__(self, cfg=None):
lollms_paths = LollmsPaths.find_paths(tool_prefix="lollms_server_")
class Elf(LollmsApplication):
def __init__(self):
pass
def init(self, custom_default_cfg_path):
lollms_paths = LollmsPaths.find_paths(custom_default_cfg_path=custom_default_cfg_path, tool_prefix="lollms_elf_")
config = LOLLMSConfig.autoload(lollms_paths, None)
super().__init__("Gandalf", config, lollms_paths)
super().__init__("Elf", config, lollms_paths)
def split_fibers(self,fibers, max_words=MAXWORDS):
# Split each fiber into chunks of up to max_words words
@ -144,9 +146,8 @@ class Gandalf(LollmsApplication):
# set up the Flask application
app = Flask(__name__)
#if __name__ == "__main__":
cv = Gandalf(Path("config.yaml"))
cv = Elf()
# input_file_path = "user_input.txt"
# try:
# cv.read_input_file(input_file_path)
@ -373,6 +374,46 @@ def providers():
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--host', '-hst', default=None, help='Host name')
parser.add_argument('--port', '-prt', default=None, help='Port number')
parser.add_argument('--reset_personal_path', action='store_true', help='Reset the personal path')
parser.add_argument('--reset_config', action='store_true', help='Reset the configurations')
parser.add_argument('--reset_installs', action='store_true', help='Reset all installation status')
parser.add_argument('--default_cfg_path', type=str, default=None, help='Reset all installation status')
ASCIIColors.yellow(" _ _ _ _ _ _ _ ")
ASCIIColors.yellow(" _\ \ /\ \ _\ \ _\ \ /\_\/\_\ _ / /\ ")
ASCIIColors.yellow(" /\__ \ / \ \ /\__ \ /\__ \ / / / / //\_\ / / \ ")
ASCIIColors.yellow(" / /_ \_\ / /\ \ \ / /_ \_\ / /_ \_\ /\ \/ \ \/ / // / /\ \__ ")
ASCIIColors.yellow(" / / /\/_/ / / /\ \ \ / / /\/_/ / / /\/_/ / \____\__/ // / /\ \___\ ")
ASCIIColors.yellow(" / / / / / / \ \_\ / / / / / / / /\/________/ \ \ \ \/___/ ")
ASCIIColors.yellow(" / / / / / / / / // / / / / / / / /\/_// / / \ \ \ ")
ASCIIColors.yellow(" / / / ____ / / / / / // / / ____ / / / ____ / / / / / /_ \ \ \ ")
ASCIIColors.yellow(" / /_/_/ ___/\ / / /___/ / // /_/_/ ___/\ / /_/_/ ___/\/ / / / / //_/\__/ / / ")
ASCIIColors.yellow("/_______/\__\// / /____\/ //_______/\__\//_______/\__\/\/_/ / / / \ \/___/ / ")
ASCIIColors.yellow("\_______\/ \/_________/ \_______\/ \_______\/ \/_/ \_____\/ ")
ASCIIColors.yellow(" _ _ _ ")
ASCIIColors.yellow(" /\ \ _\ \ /\ \ ")
ASCIIColors.yellow(" / \ \ /\__ \ / \ \ ")
ASCIIColors.yellow(" / /\ \ \ / /_ \_\ / /\ \ \ ")
ASCIIColors.yellow(" / / /\ \_\ / / /\/_/ / / /\ \_\ ")
ASCIIColors.yellow(" / /_/_ \/_/ / / / / /_/_ \/_/ ")
ASCIIColors.yellow(" / /____/\ / / / / /____/\ ")
ASCIIColors.yellow(" / /\____\/ / / / ____ / /\____\/ ")
ASCIIColors.yellow(" / / /______ / /_/_/ ___/\ / / / ")
ASCIIColors.yellow("/ / /_______\/_______/\__\// / / ")
ASCIIColors.yellow("\/__________/\_______\/ \/_/ ")
args = parser.parse_args()
if args.reset_personal_path:
LollmsPaths.reset_configs()
cv.init(args.default_cfg_path)
app.run()

View File

@ -57,6 +57,7 @@ class LoLLMsServer(LollmsApplication):
parser.add_argument('--reset_personal_path', action='store_true', help='Reset the personal path')
parser.add_argument('--reset_config', action='store_true', help='Reset the configurations')
parser.add_argument('--reset_installs', action='store_true', help='Reset all installation status')
parser.add_argument('--default_cfg_path', type=str, default=None, help='Reset all installation status')
args = parser.parse_args()
@ -68,7 +69,7 @@ class LoLLMsServer(LollmsApplication):
LollmsPaths.reset_configs()
if args.reset_config:
lollms_paths = LollmsPaths.find_paths(tool_prefix="lollms_server_")
lollms_paths = LollmsPaths.find_paths(custom_default_cfg_path=args.default_cfg_path, tool_prefix="lollms_server_")
cfg_path = lollms_paths.personal_configuration_path / f"{lollms_paths.tool_prefix}local_config.yaml"
try:
cfg_path.unlink()

View File

@ -1,5 +1,5 @@
# =================== Lord Of Large Language Models Configuration file ===========================
version: 24
version: 26
binding_name: null
model_name: null
@ -15,6 +15,7 @@ discussion_prompt_separator: "!@>"
seed: -1
n_predict: 1024
ctx_size: 2048
min_n_predict: 256
temperature: 0.9
top_k: 50
top_p: 0.95

View File

@ -116,6 +116,7 @@ class LOLLMSConfig(BaseConfig):
del self.config[key]
removed_entries.append(key)
ASCIIColors.yellow(default_config.config)
self["version"]=default_config["version"]
return self, added_entries, removed_entries

View File

@ -1,6 +1,6 @@
from pathlib import Path
import shutil
from ascii_colors import ASCIIColors
from ascii_colors import ASCIIColors, trace_exception
from lollms.config import BaseConfig
import subprocess
import os
@ -176,12 +176,15 @@ class LollmsPaths:
cfg.save_config(global_paths_cfg_path)
found = True
return LollmsPaths(global_paths_cfg_path, cfg.lollms_path, cfg.lollms_personal_path, custom_default_cfg_path=custom_default_cfg_path)
return LollmsPaths(global_paths_cfg_path, cfg.lollms_path, cfg.lollms_personal_path, custom_default_cfg_path=self.default_cfg_path)
@staticmethod
def find_paths(force_local=False, custom_default_cfg_path=None, tool_prefix=""):
lollms_path = Path(__file__).parent
global_paths_cfg_path = Path(f"./{tool_prefix}global_paths_cfg.yaml")
if custom_default_cfg_path is None:
global_paths_cfg_path = Path(f"./{tool_prefix}global_paths_cfg.yaml")
else:
global_paths_cfg_path = Path(custom_default_cfg_path)
if global_paths_cfg_path.exists():
try:
cfg = BaseConfig()
@ -194,9 +197,10 @@ class LollmsPaths:
ASCIIColors.warning(f"{lollms_path}")
ASCIIColors.warning(f"{lollms_personal_path}")
raise Exception("Wrong configuration file")
return LollmsPaths(global_paths_cfg_path, lollms_path, lollms_personal_path, custom_default_cfg_path=custom_default_cfg_path, tool_prefix=tool_prefix)
return LollmsPaths(global_paths_cfg_path, lollms_path, lollms_personal_path, tool_prefix=tool_prefix)
except Exception as ex:
print(f"{ASCIIColors.color_red}Global paths configuration file found but seems to be corrupted{ASCIIColors.color_reset}")
ASCIIColors.error(f"Global paths configuration file found but seems to be corrupted")
trace_exception(ex)
print("Couldn't find your personal data path!")
cfg.lollms_path = lollms_path
cfg["lollms_personal_path"] = str(Path.home()/"Documents/lollms")
@ -238,7 +242,6 @@ class LollmsPaths:
"lollms_path":str(Path(__file__).parent),
"lollms_personal_path":str(Path.home()/"Documents/lollms")
})
cfg.lollms_personal_path = input(f"Folder path: ({cfg.lollms_personal_path}):")
if cfg.lollms_personal_path=="":
cfg.lollms_personal_path = str(Path.home()/"Documents/lollms")

View File

@ -16,4 +16,5 @@ mplcursors
scikit-learn
ascii_colors
safe_store
safe_store
ascii_colors

1
zoos/bindings_zoo Submodule

@ -0,0 +1 @@
Subproject commit 5a9127f3ee9e6592d5d9aeaf2dc26def75123d01