Enhanced code

This commit is contained in:
Saifeddine ALOUI 2023-10-30 01:32:09 +01:00
parent 3edd29f948
commit e08bb1f052
9 changed files with 578 additions and 64 deletions

View File

@ -95,6 +95,25 @@ class LollmsApplication:
self.mount_personalities()
self.mount_extensions()
def safe_generate(self, full_discussion:str, n_predict=None, callback: Callable[[str, int, dict], bool]=None):
"""safe_generate
Args:
full_discussion (string): A prompt or a long discussion to use for generation
callback (_type_, optional): A callback to call for each received token. Defaults to None.
Returns:
str: Model output
"""
if n_predict == None:
n_predict =self.personality.model_n_predicts
tk = self.personality.model.tokenize(full_discussion)
n_tokens = len(tk)
fd = self.personality.model.detokenize(tk[-min(self.config.ctx_size-self.n_cond_tk,n_tokens):])
self.bot_says = ""
output = self.personality.model.generate(self.personality.personality_conditioning+fd, n_predict=n_predict, callback=callback)
return output
def notify(self, content, is_success, client_id=None):
if is_success:
ASCIIColors.yellow(content)

View File

@ -137,24 +137,7 @@ Participating personalities:
full_discussion = ""
return full_discussion
def safe_generate(self, full_discussion:str, n_predict=None, callback: Callable[[str, int, dict], bool]=None):
"""safe_generate
Args:
full_discussion (string): A prompt or a long discussion to use for generation
callback (_type_, optional): A callback to call for each received token. Defaults to None.
Returns:
str: Model output
"""
if n_predict == None:
n_predict =self.personality.model_n_predicts
tk = self.personality.model.tokenize(full_discussion)
n_tokens = len(tk)
fd = self.personality.model.detokenize(tk[-min(self.config.ctx_size-self.n_cond_tk,n_tokens):])
self.bot_says = ""
output = self.personality.model.generate(self.personality.personality_conditioning+fd, n_predict=n_predict, callback=callback)
return output
def remove_text_from_string(self, string, text_to_find):
"""

View File

@ -1 +1,2 @@
discord_config.yaml
discord_config.yaml
data

View File

@ -0,0 +1,31 @@
# Project Name
## Description
This project is a Discord bot that utilizes the lollms library to provide assistance and generate responses based on user input.
## Installation
1. Clone the repository:
```shell
git clone https://github.com/your-username/your-repository.git
```
2. Install the required dependencies:
```shell
pip install -r requirements.txt
```
3. Create a `discord_config.yaml` file in the project directory and add your bot token to it.
## Usage
1. Run the bot:
```shell
python main.py
```
2. The bot will join your Discord server and send a welcome message to new members.
3. Use the following commands to interact with the bot:
- `!lollms <prompt>`: Chat with the bot and receive generated responses based on the input prompt.
- `!install`: Get instructions on how to install the lollms library.
## Contributing
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
## License
[Apache 2.0](https://choosealicense.com/licenses/apache-2.0/)

View File

@ -1,10 +1,27 @@
import discord
from discord.ext import commands
from lollms.apps.console import Conversation
from lollms.app import LollmsApplication
from lollms.paths import LollmsPaths
from lollms.main_config import LOLLMSConfig
import yaml
from pathlib import Path
from ascii_colors import ASCIIColors
from safe_store import TextVectorizer, GenericDataLoader, VisualizationMethod, VectorizationMethod
config_path = Path(__file__).resolve().parent / 'discord_config.yaml'
current_path = Path(__file__).resolve().parent
config_path = current_path / 'discord_config.yaml'
data_folder = current_path / "data"
data_folder.mkdir(parents=True, exist_ok=True)
text_vectorzer = TextVectorizer(
database_path=data_folder / "db.json",
vectorization_method=VectorizationMethod.TFIDF_VECTORIZER,
save_db=True
)
files = [f for f in data_folder.iterdir() if f.suffix in [".txt", ".md", ".pdf"]]
for f in files:
txt = GenericDataLoader.read_file(f)
text_vectorzer.add_document(f, txt, 512, 128, False)
text_vectorzer.index()
if not config_path.exists():
bot_token = input("Please enter your bot token: ")
@ -16,34 +33,66 @@ else:
config = yaml.safe_load(config_file)
bot_token = config['bot_token']
bot = commands.Bot(command_prefix='!', intents=discord.Intents.all())
client = commands.Bot(command_prefix='!', intents=discord.Intents.all())
class ChatBot(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.cv = Conversation()
self.context = {}
lollms_paths = LollmsPaths.find_paths(force_local=False, tool_prefix="lollms_discord_")
config = LOLLMSConfig.autoload(lollms_paths)
lollms_app = LollmsApplication("lollms_bot", config=config, lollms_paths=lollms_paths)
@commands.command()
async def chat(self, ctx, *, prompt):
self.context[ctx.author.id] = f'{self.context.get(ctx.author.id, "")}:{prompt}\nlollms_chatbot:'
def callback(text, type=None):
self.context[ctx.author.id] += text
return True
self.cv.safe_generate(self.context[ctx.author.id], callback=callback)
await ctx.send(self.context[ctx.author.id])
@commands.command()
async def install(self, ctx):
response = "To install lollms, make sure you have installed the currently supported python version (consult the repository to verify what version is currently supported, but as of 10/22/2023, the version is 3.10).\nThen you can follow these steps:\n1. Open your command line interface.\n2. Navigate to the directory where you want to install lollms.\n3. Run the following command to clone the lollms repository: `git clone https://github.com/lollms/lollms.git`.\n4. Once the repository is cloned, navigate into the lollms directory.\n5. Run `pip install -r requirements.txt` to install the required dependencies.\n6. You can now use lollms by importing it in your Python code."
@commands.Cog.listener()
async def on_member_join(self, member):
channel = member.guild.system_channel
if channel is not None:
await channel.send(f'Welcome {member.mention} to the server! How can I assist you today?')
@bot.event
@client.event
async def on_ready():
print(f'Logged in as {bot.user}')
print(f'Logged in as {client.user}')
print('------')
bot.add_cog(ChatBot(bot))
bot.run(bot_token)
@client.event
async def on_member_join(member):
channel = member.guild.system_channel
if channel is not None:
await channel.send(f'Welcome {member.mention} to the server! How can I assist you today?')
@client.event
async def on_message(message):
if message.author == client.user:
return
if message.content.startswith('!lollms'):
prompt = message.content[6:]
print("Chatting")
docs, _ = text_vectorzer.recover_text(prompt,3)
docs = '\n'.join(docs)
context = f"""!#>instruction:
{lollms_app.personality.personality_conditioning}
!#>Informations:
Current model:{lollms_app.config.model_name}
Current personality:{lollms_app.personality.name}
!#>Documentation:
{docs}
!#>{message.author.id}: {prompt}
!#>{lollms_app.personality.ai_message_prefix}: """
print("Context:"+context)
out = ['']
def callback(text, type=None):
out[0] += text
print(text,end="")
return True
ASCIIColors.green("Warming up")
lollms_app.safe_generate(context, n_predict=1024, callback=callback)
print()
await message.channel.send(out[0])
elif message.content.startswith('!install'):
response = "To install lollms, make sure you have installed the currently supported python version (consult the repository to verify what version is currently supported, but as of 10/22/2023, the version is 3.10).\nThen you can follow these steps:\n1. Open your command line interface.\n2. Navigate to the directory where you want to install lollms.\n3. Run the following command to clone the lollms repository: `git clone https://github.com/lollms/lollms.git`.\n4. Once the repository is cloned, navigate into the lollms directory.\n5. Run `pip install -r requirements.txt` to install the required dependencies.\n6. You can now use lollms by importing it in your Python code."
await message.channel.send(response)
@client.event
async def on_ready():
print(f'Logged in as {client.user}')
print('------')
@client.event
async def on_member_join(member):
channel = member.guild.system_channel
if channel is not None:
await channel.send(f'Welcome {member.mention} to the server! How can I assist you today?')
client.run(bot_token)

View File

@ -157,7 +157,6 @@ Participating personalities:
return string
def main():
tool_prefix = "lollms_server_"
# Create the argument parser
parser = argparse.ArgumentParser(description='App Description')
@ -184,6 +183,7 @@ def main():
# Parse the command-line arguments
args = parser.parse_args()
tool_prefix = args.tool_prefix
if args.reset_installs:
LollmsApplication.reset_all_installs()
@ -192,7 +192,7 @@ def main():
LollmsPaths.reset_configs()
if args.reset_config:
lollms_paths = LollmsPaths.find_paths(custom_default_cfg_path=args.default_cfg_path, tool_prefix=tool_prefix)
lollms_paths = LollmsPaths.find_paths(custom_default_cfg_path=args.default_cfg_path, tool_prefix=tool_prefix, force_personal_path=args.set_personal_folder_path)
cfg_path = lollms_paths.personal_configuration_path / f"{lollms_paths.tool_prefix}local_config.yaml"
try:
cfg_path.unlink()
@ -200,7 +200,7 @@ def main():
except:
ASCIIColors.success("Couldn't reset LOLLMS configuration")
else:
lollms_paths = LollmsPaths.find_paths(force_local=False, tool_prefix=tool_prefix)
lollms_paths = LollmsPaths.find_paths(force_local=False, tool_prefix=tool_prefix, force_personal_path=args.set_personal_folder_path)
configuration_path = args.configuration_path
@ -242,6 +242,7 @@ def main():
with tqdm(total=100, unit="%", desc="Download Progress", ncols=80) as tqdm_bar:
settings_app.config.download_model(args.install_model,settings_app.binding, progress_callback)
settings_app.config.model_name = args.install_model.split("/")[-1]
settings_app.model = settings_app.binding.build_model()
settings_app.config.save_config()

View File

@ -209,7 +209,7 @@ class LollmsPaths:
return LollmsPaths(global_paths_cfg_path, cfg.lollms_path, cfg.lollms_personal_path, custom_default_cfg_path=self.default_cfg_path)
@staticmethod
def find_paths(force_local=False, custom_default_cfg_path=None, custom_global_paths_cfg_path=None, tool_prefix=""):
def find_paths(force_local=False, custom_default_cfg_path=None, custom_global_paths_cfg_path=None, tool_prefix="", force_personal_path=None):
lollms_path = Path(__file__).parent
if custom_global_paths_cfg_path is None:
global_paths_cfg_path = Path(f"./{tool_prefix}global_paths_cfg.yaml")
@ -218,6 +218,7 @@ class LollmsPaths:
ASCIIColors.cyan(f"Trying to use Configuration at :{global_paths_cfg_path}")
if global_paths_cfg_path.exists():
ASCIIColors.green(f"{global_paths_cfg_path} found!")
try:
cfg = BaseConfig()
cfg.load_config(global_paths_cfg_path)
@ -245,9 +246,11 @@ class LollmsPaths:
lollms_personal_path = cfg.lollms_personal_path
return LollmsPaths(global_paths_cfg_path, lollms_path, lollms_personal_path, custom_default_cfg_path=custom_default_cfg_path, tool_prefix=tool_prefix)
else:
ASCIIColors.red(f"{global_paths_cfg_path} not found! Searching in your home folder.")
# if the app is not forcing a specific path, then try to find out if the default installed library has specified a default path
global_paths_cfg_path = Path.home()/f"{tool_prefix}global_paths_cfg.yaml"
if global_paths_cfg_path.exists():
ASCIIColors.green(f"{global_paths_cfg_path} found!")
cfg = BaseConfig()
cfg.load_config(global_paths_cfg_path)
try:
@ -263,21 +266,11 @@ class LollmsPaths:
lollms_personal_path = cfg.lollms_personal_path
return LollmsPaths(global_paths_cfg_path, lollms_path, lollms_personal_path, custom_default_cfg_path=custom_default_cfg_path, tool_prefix=tool_prefix)
else: # First time
print(f"{ASCIIColors.color_green}Welcome! It seems this is your first use of the new lollms app.{ASCIIColors.color_reset}")
print(f"To make it clear where your data are stored, we now give the user the choice where to put its data.")
print(f"This allows you to mutualize models which are heavy, between multiple lollms compatible apps.")
print(f"You can change this at any time using the lollms-settings script or by simply change the content of the global_paths_cfg.yaml file.")
found = False
while not found:
print(f"Please provide a folder to store your configurations files, your models and your personal data (database, custom personalities etc).")
if force_personal_path is not None:
cfg = BaseConfig(config={
"lollms_path":str(Path(__file__).parent),
"lollms_personal_path":str(Path.home()/"Documents/lollms")
"lollms_personal_path":force_personal_path
})
cfg.lollms_personal_path = input(f"Folder path: ({cfg.lollms_personal_path}):")
if cfg.lollms_personal_path=="":
cfg.lollms_personal_path = str(Path.home()/"Documents/lollms")
print(f"Selected: {cfg.lollms_personal_path}")
pp= Path(cfg.lollms_personal_path)
if not pp.exists():
@ -285,13 +278,42 @@ class LollmsPaths:
pp.mkdir(parents=True)
except:
print(f"{ASCIIColors.color_red}It seams there is an error in the path you rovided{ASCIIColors.color_reset}")
continue
if force_local:
global_paths_cfg_path = Path(f"./{tool_prefix}global_paths_cfg.yaml")
else:
global_paths_cfg_path = Path.home()/f"{tool_prefix}global_paths_cfg.yaml"
cfg.save_config(global_paths_cfg_path)
found = True
else:
print(f"{ASCIIColors.color_green}Welcome! It seems this is your first use of the new lollms app.{ASCIIColors.color_reset}")
print(f"To make it clear where your data are stored, we now give the user the choice where to put its data.")
print(f"This allows you to mutualize models which are heavy, between multiple lollms compatible apps.")
print(f"You can change this at any time using the lollms-settings script or by simply change the content of the global_paths_cfg.yaml file.")
found = False
while not found:
print(f"Please provide a folder to store your configurations files, your models and your personal data (database, custom personalities etc).")
cfg = BaseConfig(config={
"lollms_path":str(Path(__file__).parent),
"lollms_personal_path":str(Path.home()/"Documents/lollms")
})
cfg.lollms_personal_path = input(f"Folder path: ({cfg.lollms_personal_path}):")
if cfg.lollms_personal_path=="":
cfg.lollms_personal_path = str(Path.home()/"Documents/lollms")
print(f"Selected: {cfg.lollms_personal_path}")
pp= Path(cfg.lollms_personal_path)
if not pp.exists():
try:
pp.mkdir(parents=True)
except:
print(f"{ASCIIColors.color_red}It seams there is an error in the path you rovided{ASCIIColors.color_reset}")
continue
if force_local:
global_paths_cfg_path = Path(f"./{tool_prefix}global_paths_cfg.yaml")
else:
global_paths_cfg_path = Path.home()/f"{tool_prefix}global_paths_cfg.yaml"
cfg.save_config(global_paths_cfg_path)
found = True
return LollmsPaths(global_paths_cfg_path, cfg.lollms_path, cfg.lollms_personal_path, custom_default_cfg_path=custom_default_cfg_path, tool_prefix=tool_prefix)

View File

@ -0,0 +1,408 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "Lm1ahQJs7lMk",
"outputId": "0af9d0e2-2385-4123-b2ca-4f82ae29b374"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Looking in indexes: https://download.pytorch.org/whl/cu118\n",
"Requirement already satisfied: torch in /usr/local/lib/python3.10/dist-packages (2.1.0+cu118)\n",
"Requirement already satisfied: torchvision in /usr/local/lib/python3.10/dist-packages (0.16.0+cu118)\n",
"Requirement already satisfied: torchaudio in /usr/local/lib/python3.10/dist-packages (2.1.0+cu118)\n",
"Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch) (3.12.4)\n",
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from torch) (4.5.0)\n",
"Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch) (1.12)\n",
"Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch) (3.2)\n",
"Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch) (3.1.2)\n",
"Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch) (2023.6.0)\n",
"Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch) (2.1.0)\n",
"Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from torchvision) (1.23.5)\n",
"Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from torchvision) (2.31.0)\n",
"Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /usr/local/lib/python3.10/dist-packages (from torchvision) (9.4.0)\n",
"Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch) (2.1.3)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->torchvision) (3.3.1)\n",
"Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->torchvision) (3.4)\n",
"Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->torchvision) (2.0.7)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->torchvision) (2023.7.22)\n",
"Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch) (1.3.0)\n",
"Requirement already satisfied: lollms in /usr/local/lib/python3.10/dist-packages (5.9.4)\n",
"Collecting lollms\n",
" Downloading lollms-5.9.5-py3-none-any.whl (164 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m164.7/164.7 kB\u001b[0m \u001b[31m4.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from lollms) (4.66.1)\n",
"Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from lollms) (6.0.1)\n",
"Requirement already satisfied: Pillow in /usr/local/lib/python3.10/dist-packages (from lollms) (9.4.0)\n",
"Requirement already satisfied: flask in /usr/local/lib/python3.10/dist-packages (from lollms) (2.2.5)\n",
"Requirement already satisfied: flask-socketio in /usr/local/lib/python3.10/dist-packages (from lollms) (5.3.6)\n",
"Requirement already satisfied: flask-cors in /usr/local/lib/python3.10/dist-packages (from lollms) (4.0.0)\n",
"Requirement already satisfied: simple-websocket in /usr/local/lib/python3.10/dist-packages (from lollms) (1.0.0)\n",
"Requirement already satisfied: eventlet in /usr/local/lib/python3.10/dist-packages (from lollms) (0.33.3)\n",
"Requirement already satisfied: wget in /usr/local/lib/python3.10/dist-packages (from lollms) (3.2)\n",
"Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from lollms) (67.7.2)\n",
"Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from lollms) (2.31.0)\n",
"Requirement already satisfied: matplotlib in /usr/local/lib/python3.10/dist-packages (from lollms) (3.7.1)\n",
"Requirement already satisfied: seaborn in /usr/local/lib/python3.10/dist-packages (from lollms) (0.12.2)\n",
"Requirement already satisfied: mplcursors in /usr/local/lib/python3.10/dist-packages (from lollms) (0.5.2)\n",
"Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (from lollms) (1.2.2)\n",
"Requirement already satisfied: ascii-colors in /usr/local/lib/python3.10/dist-packages (from lollms) (0.1.3)\n",
"Requirement already satisfied: safe-store in /usr/local/lib/python3.10/dist-packages (from lollms) (0.2.4)\n",
"Requirement already satisfied: dnspython>=1.15.0 in /usr/local/lib/python3.10/dist-packages (from eventlet->lollms) (2.4.2)\n",
"Requirement already satisfied: greenlet>=0.3 in /usr/local/lib/python3.10/dist-packages (from eventlet->lollms) (3.0.0)\n",
"Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from eventlet->lollms) (1.16.0)\n",
"Requirement already satisfied: Werkzeug>=2.2.2 in /usr/local/lib/python3.10/dist-packages (from flask->lollms) (3.0.1)\n",
"Requirement already satisfied: Jinja2>=3.0 in /usr/local/lib/python3.10/dist-packages (from flask->lollms) (3.1.2)\n",
"Requirement already satisfied: itsdangerous>=2.0 in /usr/local/lib/python3.10/dist-packages (from flask->lollms) (2.1.2)\n",
"Requirement already satisfied: click>=8.0 in /usr/local/lib/python3.10/dist-packages (from flask->lollms) (8.1.7)\n",
"Requirement already satisfied: python-socketio>=5.0.2 in /usr/local/lib/python3.10/dist-packages (from flask-socketio->lollms) (5.10.0)\n",
"Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (1.1.1)\n",
"Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (0.12.1)\n",
"Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (4.43.1)\n",
"Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (1.4.5)\n",
"Requirement already satisfied: numpy>=1.20 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (1.23.5)\n",
"Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (23.2)\n",
"Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (3.1.1)\n",
"Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (2.8.2)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->lollms) (3.3.1)\n",
"Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->lollms) (3.4)\n",
"Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->lollms) (2.0.7)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->lollms) (2023.7.22)\n",
"Requirement already satisfied: python-docx in /usr/local/lib/python3.10/dist-packages (from safe-store->lollms) (1.0.1)\n",
"Requirement already satisfied: python-pptx in /usr/local/lib/python3.10/dist-packages (from safe-store->lollms) (0.6.22)\n",
"Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from safe-store->lollms) (1.5.3)\n",
"Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.10/dist-packages (from safe-store->lollms) (4.11.2)\n",
"Requirement already satisfied: PyPDF2 in /usr/local/lib/python3.10/dist-packages (from safe-store->lollms) (3.0.1)\n",
"Requirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->lollms) (1.11.3)\n",
"Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->lollms) (1.3.2)\n",
"Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->lollms) (3.2.0)\n",
"Requirement already satisfied: wsproto in /usr/local/lib/python3.10/dist-packages (from simple-websocket->lollms) (1.2.0)\n",
"Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from Jinja2>=3.0->flask->lollms) (2.1.3)\n",
"Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->safe-store->lollms) (2023.3.post1)\n",
"Requirement already satisfied: bidict>=0.21.0 in /usr/local/lib/python3.10/dist-packages (from python-socketio>=5.0.2->flask-socketio->lollms) (0.22.1)\n",
"Requirement already satisfied: python-engineio>=4.8.0 in /usr/local/lib/python3.10/dist-packages (from python-socketio>=5.0.2->flask-socketio->lollms) (4.8.0)\n",
"Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.10/dist-packages (from beautifulsoup4->safe-store->lollms) (2.5)\n",
"Requirement already satisfied: lxml>=3.1.0 in /usr/local/lib/python3.10/dist-packages (from python-docx->safe-store->lollms) (4.9.3)\n",
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from python-docx->safe-store->lollms) (4.5.0)\n",
"Requirement already satisfied: XlsxWriter>=0.5.7 in /usr/local/lib/python3.10/dist-packages (from python-pptx->safe-store->lollms) (3.1.9)\n",
"Requirement already satisfied: h11<1,>=0.9.0 in /usr/local/lib/python3.10/dist-packages (from wsproto->simple-websocket->lollms) (0.14.0)\n",
"Installing collected packages: lollms\n",
" Attempting uninstall: lollms\n",
" Found existing installation: lollms 5.9.4\n",
" Uninstalling lollms-5.9.4:\n",
" Successfully uninstalled lollms-5.9.4\n",
"Successfully installed lollms-5.9.5\n"
]
}
],
"source": [
"!pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118\n",
"!pip install lollms --upgrade"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "YP6kXgau8Wej",
"outputId": "4a9492f9-4843-4e56-9cd1-7e031b337a30"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Cloning into 'lollms_bindings_zoo'...\n",
"remote: Enumerating objects: 2153, done.\u001b[K\n",
"remote: Counting objects: 100% (1095/1095), done.\u001b[K\n",
"remote: Compressing objects: 100% (337/337), done.\u001b[K\n",
"remote: Total 2153 (delta 832), reused 995 (delta 754), pack-reused 1058\u001b[K\n",
"Receiving objects: 100% (2153/2153), 6.34 MiB | 21.72 MiB/s, done.\n",
"Resolving deltas: 100% (1577/1577), done.\n",
"Cloning into 'lollms_extensions_zoo'...\n",
"remote: Enumerating objects: 82, done.\u001b[K\n",
"remote: Counting objects: 100% (82/82), done.\u001b[K\n",
"remote: Compressing objects: 100% (48/48), done.\u001b[K\n",
"remote: Total 82 (delta 31), reused 51 (delta 12), pack-reused 0\u001b[K\n",
"Receiving objects: 100% (82/82), 928.75 KiB | 6.07 MiB/s, done.\n",
"Resolving deltas: 100% (31/31), done.\n",
"Cloning into 'models_zoo'...\n",
"remote: Enumerating objects: 55, done.\u001b[K\n",
"remote: Counting objects: 100% (55/55), done.\u001b[K\n",
"remote: Compressing objects: 100% (39/39), done.\u001b[K\n",
"remote: Total 55 (delta 27), reused 39 (delta 15), pack-reused 0\u001b[K\n",
"Receiving objects: 100% (55/55), 225.38 KiB | 2.21 MiB/s, done.\n",
"Resolving deltas: 100% (27/27), done.\n",
"Cloning into 'lollms_personalities_zoo'...\n",
"remote: Enumerating objects: 5986, done.\u001b[K\n",
"remote: Counting objects: 100% (1657/1657), done.\u001b[K\n",
"remote: Compressing objects: 100% (598/598), done.\u001b[K\n",
"remote: Total 5986 (delta 1046), reused 1443 (delta 959), pack-reused 4329\u001b[K\n",
"Receiving objects: 100% (5986/5986), 65.03 MiB | 44.66 MiB/s, done.\n",
"Resolving deltas: 100% (2745/2745), done.\n"
]
}
],
"source": [
"!mkdir zoos\n",
"!cd zoos\n",
"!git clone https://github.com/ParisNeo/lollms_bindings_zoo.git\n",
"!git clone https://github.com/ParisNeo/lollms_extensions_zoo.git\n",
"!git clone https://github.com/ParisNeo/models_zoo.git\n",
"!git clone https://github.com/ParisNeo/lollms_personalities_zoo.git\n",
"!cd ..\n",
"\n"
]
},
{
"cell_type": "code",
"source": [
"!lollms-settings --silent --set_personal_folder_path ./personal_data --install_binding exllama2 --install_model TheBloke/Mistral-7B-Instruct-v0.1-GPTQ"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "DSsf2L1kPc0K",
"outputId": "5a7c6af4-99f0-4c6f-a1cc-cb174b968879"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"\u001b[36mTrying to use Configuration at :lollms_server_global_paths_cfg.yaml\u001b[0m\n",
"\u001b[31mlollms_server_global_paths_cfg.yaml not found! Searching in your home folder.\u001b[0m\n",
"Selected: ./personal_data\n",
"\u001b[33mExecution path : /content\u001b[0m\n",
"\u001b[32mLocal zoos folder found\u001b[0m\n",
"\u001b[32m----------------------Paths information-----------------------\u001b[0m\n",
"\u001b[31mpersonal_path:\u001b[0m\u001b[33m/content/personal_data\u001b[0m\n",
"\u001b[31mpersonal_configuration_path:\u001b[0m\u001b[33m/content/personal_data/configs\u001b[0m\n",
"\u001b[31mpersonal_databases_path:\u001b[0m\u001b[33m/content/personal_data/databases\u001b[0m\n",
"\u001b[31mpersonal_models_path:\u001b[0m\u001b[33m/content/personal_data/models\u001b[0m\n",
"\u001b[31mpersonal_user_infos_path:\u001b[0m\u001b[33m/content/personal_data/user_infos\u001b[0m\n",
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers\u001b[0m\n",
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers/gptqlora\u001b[0m\n",
"\u001b[31mpersonal_data_path:\u001b[0m\u001b[33m/content/personal_data/data\u001b[0m\n",
"\u001b[31mcustom_personalities_path:\u001b[0m\u001b[33m/content/personal_data/custom_personalities\u001b[0m\n",
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
"\u001b[32m----------------------Zoo information-----------------------\u001b[0m\n",
"\u001b[31mbindings_zoo_path:\u001b[0m\u001b[33m/content/zoos/bindings_zoo\u001b[0m\n",
"\u001b[31mpersonalities_zoo_path:\u001b[0m\u001b[33m/content/zoos/personalities_zoo\u001b[0m\n",
"\u001b[31mextensions_zoo_path:\u001b[0m\u001b[33m/content/zoos/extensions_zoo\u001b[0m\n",
"\u001b[31mmodels_zoo_path:\u001b[0m\u001b[33m/content/zoos/models_zoo\u001b[0m\n",
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
"\u001b[34;1mNo bindings found in your personal space.\n",
"Cloning the personalities zoo\u001b[0m\n",
"Cloning into '/content/zoos/bindings_zoo'...\n",
"remote: Enumerating objects: 2153, done.\u001b[K\n",
"remote: Counting objects: 100% (1095/1095), done.\u001b[K\n",
"remote: Compressing objects: 100% (337/337), done.\u001b[K\n",
"remote: Total 2153 (delta 832), reused 995 (delta 754), pack-reused 1058\u001b[K\n",
"Receiving objects: 100% (2153/2153), 6.34 MiB | 21.51 MiB/s, done.\n",
"Resolving deltas: 100% (1577/1577), done.\n",
"\u001b[34;1mNo personalities found in your personal space.\n",
"Cloning the personalities zoo\u001b[0m\n",
"Cloning into '/content/zoos/personalities_zoo'...\n",
"remote: Enumerating objects: 5986, done.\u001b[K\n",
"remote: Counting objects: 100% (1657/1657), done.\u001b[K\n",
"remote: Compressing objects: 100% (598/598), done.\u001b[K\n",
"remote: Total 5986 (delta 1046), reused 1443 (delta 959), pack-reused 4329\u001b[K\n",
"Receiving objects: 100% (5986/5986), 65.03 MiB | 46.31 MiB/s, done.\n",
"Resolving deltas: 100% (2745/2745), done.\n",
"\u001b[34;1mNo extensions found in your personal space.\n",
"Cloning the extensions zoo\u001b[0m\n",
"Cloning into '/content/zoos/extensions_zoo'...\n",
"remote: Enumerating objects: 82, done.\u001b[K\n",
"remote: Counting objects: 100% (82/82), done.\u001b[K\n",
"remote: Compressing objects: 100% (48/48), done.\u001b[K\n",
"remote: Total 82 (delta 31), reused 51 (delta 12), pack-reused 0\u001b[K\n",
"Receiving objects: 100% (82/82), 928.75 KiB | 6.07 MiB/s, done.\n",
"Resolving deltas: 100% (31/31), done.\n",
"\u001b[34;1mNo models found in your personal space.\n",
"Cloning the models zoo\u001b[0m\n",
"Cloning into '/content/zoos/models_zoo'...\n",
"remote: Enumerating objects: 55, done.\u001b[K\n",
"remote: Counting objects: 100% (55/55), done.\u001b[K\n",
"remote: Compressing objects: 100% (39/39), done.\u001b[K\n",
"remote: Total 55 (delta 27), reused 39 (delta 15), pack-reused 0\u001b[K\n",
"Receiving objects: 100% (55/55), 225.38 KiB | 2.75 MiB/s, done.\n",
"Resolving deltas: 100% (27/27), done.\n",
"\u001b[32mSelected personal path: ./personal_data\u001b[0m\n",
"\u001b[36mTrying to use Configuration at :/root/lollms_server_global_paths_cfg.yaml\u001b[0m\n",
"\u001b[32m/root/lollms_server_global_paths_cfg.yaml found!\u001b[0m\n",
"\u001b[33mExecution path : /content\u001b[0m\n",
"\u001b[32mLocal zoos folder found\u001b[0m\n",
"\u001b[32m----------------------Paths information-----------------------\u001b[0m\n",
"\u001b[31mpersonal_path:\u001b[0m\u001b[33m/content/personal_data\u001b[0m\n",
"\u001b[31mpersonal_configuration_path:\u001b[0m\u001b[33m/content/personal_data/configs\u001b[0m\n",
"\u001b[31mpersonal_databases_path:\u001b[0m\u001b[33m/content/personal_data/databases\u001b[0m\n",
"\u001b[31mpersonal_models_path:\u001b[0m\u001b[33m/content/personal_data/models\u001b[0m\n",
"\u001b[31mpersonal_user_infos_path:\u001b[0m\u001b[33m/content/personal_data/user_infos\u001b[0m\n",
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers\u001b[0m\n",
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers/gptqlora\u001b[0m\n",
"\u001b[31mpersonal_data_path:\u001b[0m\u001b[33m/content/personal_data/data\u001b[0m\n",
"\u001b[31mcustom_personalities_path:\u001b[0m\u001b[33m/content/personal_data/custom_personalities\u001b[0m\n",
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
"\u001b[32m----------------------Zoo information-----------------------\u001b[0m\n",
"\u001b[31mbindings_zoo_path:\u001b[0m\u001b[33m/content/zoos/bindings_zoo\u001b[0m\n",
"\u001b[31mpersonalities_zoo_path:\u001b[0m\u001b[33m/content/zoos/personalities_zoo\u001b[0m\n",
"\u001b[31mextensions_zoo_path:\u001b[0m\u001b[33m/content/zoos/extensions_zoo\u001b[0m\n",
"\u001b[31mmodels_zoo_path:\u001b[0m\u001b[33m/content/zoos/models_zoo\u001b[0m\n",
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
"\u001b[34;1mBindings zoo found in your personal space.\n",
"Pulling last personalities zoo\u001b[0m\n",
"Already up to date.\n",
"\u001b[34;1mPersonalities zoo found in your personal space.\n",
"Pulling last personalities zoo\u001b[0m\n",
"Already up to date.\n",
"\u001b[34;1mExtensions zoo found in your personal space.\n",
"Pulling last Extensions zoo\u001b[0m\n",
"Already up to date.\n",
"\u001b[34;1mModels zoo found in your personal space.\n",
"Pulling last Models zoo\u001b[0m\n",
"Already up to date.\n",
"\u001b[38;5;208mNo binding selected\u001b[0m\n",
"\u001b[32mPersonality lollms mounted successfully but no model is selected\u001b[0m\n",
"\u001b[33;1m\n",
" ___ ___ ___ ___ ___ ___ \n",
" /\\__\\ /\\ \\ /\\__\\ /\\__\\ /\\__\\ /\\ \\ \n",
" /:/ / /::\\ \\ /:/ / /:/ / /::| | /::\\ \\ \n",
" /:/ / /:/\\:\\ \\ /:/ / /:/ / /:|:| | /:/\\ \\ \\ \n",
" /:/ / /:/ \\:\\ \\ /:/ / /:/ / /:/|:|__|__ _\\:\\~\\ \\ \\ \n",
" /:/__/ /:/__/ \\:\\__\\ /:/__/ /:/__/ /:/ |::::\\__\\ /\\ \\:\\ \\ \\__\\ \n",
" \\:\\ \\ \\:\\ \\ /:/ / \\:\\ \\ \\:\\ \\ \\/__/~~/:/ / \\:\\ \\:\\ \\/__/ \n",
" \\:\\ \\ \\:\\ /:/ / \\:\\ \\ \\:\\ \\ /:/ / \\:\\ \\:\\__\\ \n",
" \\:\\ \\ \\:\\/:/ / \\:\\ \\ \\:\\ \\ /:/ / \\:\\/:/ / \n",
" \\:\\__\\ \\::/ / \\:\\__\\ \\:\\__\\ /:/ / \\::/ / \n",
" \\/__/ \\/__/ \\/__/ \\/__/ \\/__/ \\/__/ \n",
"\u001b[0m\n",
"\u001b[31mVersion: \u001b[32m5.9.5\n",
"\u001b[31mBy : \u001b[32mParisNeo\n",
"\u001b[0m\n",
"\n",
"\u001b[32mCurrent personality : \u001b[0mgeneric/lollms\n",
"\u001b[32mVersion : \u001b[0m1.0.0\n",
"\u001b[32mAuthor : \u001b[0mParisNeo\n",
"\u001b[32mDescription : \u001b[0mThis personality is a helpful and Kind AI ready to help you solve your problems \n",
"\n",
"\n",
"\n",
"\u001b[32mCurrent binding : \u001b[0mNone\n",
"\u001b[32mCurrent model : \u001b[0mNone\n",
"\n",
"\u001b[32mfreeing memory\u001b[0m\n",
"The variable 'ExLlamaGenerator' does not exist in the local or global namespace.\n",
"The variable 'ExLlama' does not exist in the local or global namespace.\n",
"The variable 'ExLlamaCache' does not exist in the local or global namespace.\n",
"The variable 'ExLlamaConfig' does not exist in the local or global namespace.\n",
"The variable 'ExLlamaTokenizer' does not exist in the local or global namespace.\n",
"The variable 'torch_version' does not exist in the local or global namespace.\n",
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
"\u001b[31mCuda VRAM usage\u001b[0m\n",
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
"{'nb_gpus': 1, 'gpu_0_total_vram': 16106127360, 'gpu_0_used_vram': 0, 'gpu_0_model': 'Tesla T4'}\n",
"\u001b[31mCleared cache\u001b[0m\n",
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
"\u001b[31mCuda VRAM usage\u001b[0m\n",
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
"{'nb_gpus': 1, 'gpu_0_total_vram': 16106127360, 'gpu_0_used_vram': 0, 'gpu_0_model': 'Tesla T4'}\n",
"\u001b[32mfreed memory\u001b[0m\n",
"\u001b[34m*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*\u001b[0m\n",
"\u001b[31mInstalling exllama2\u001b[0m\n",
"\u001b[34m*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*\u001b[0m\n",
"This is the first time you are using this binding.\n",
"\u001b[34;1mChecking pytorch\u001b[0m\n",
"\u001b[32mCUDA is supported.\u001b[0m\n",
"Collecting exllamav2==0.0.6+cu118\n",
" Downloading https://github.com/turboderp/exllamav2/releases/download/v0.0.6/exllamav2-0.0.6+cu118-cp310-cp310-linux_x86_64.whl (12.2 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m12.2/12.2 MB\u001b[0m \u001b[31m28.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from exllamav2==0.0.6+cu118) (1.5.3)\n",
"Collecting ninja (from exllamav2==0.0.6+cu118)\n",
" Downloading ninja-1.11.1.1-py2.py3-none-manylinux1_x86_64.manylinux_2_5_x86_64.whl (307 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m307.2/307.2 kB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting fastparquet (from exllamav2==0.0.6+cu118)\n",
" Downloading fastparquet-2023.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.7 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.7/1.7 MB\u001b[0m \u001b[31m10.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: torch>=2.0.1 in /usr/local/lib/python3.10/dist-packages (from exllamav2==0.0.6+cu118) (2.1.0+cu118)\n",
"Collecting safetensors>=0.3.2 (from exllamav2==0.0.6+cu118)\n",
" Downloading safetensors-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m16.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting sentencepiece>=0.1.97 (from exllamav2==0.0.6+cu118)\n",
" Downloading sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m21.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from exllamav2==0.0.6+cu118) (2.16.1)\n",
"Collecting websockets (from exllamav2==0.0.6+cu118)\n",
" Downloading websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (130 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m130.2/130.2 kB\u001b[0m \u001b[31m19.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: regex in /usr/local/lib/python3.10/dist-packages (from exllamav2==0.0.6+cu118) (2023.6.3)\n",
"Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (3.12.4)\n",
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (4.5.0)\n",
"Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (1.12)\n",
"Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (3.2)\n",
"Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (3.1.2)\n",
"Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (2023.6.0)\n",
"Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (2.1.0)\n",
"Requirement already satisfied: numpy>=1.20.3 in /usr/local/lib/python3.10/dist-packages (from fastparquet->exllamav2==0.0.6+cu118) (1.23.5)\n",
"Collecting cramjam>=2.3 (from fastparquet->exllamav2==0.0.6+cu118)\n",
" Downloading cramjam-2.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.6 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m26.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from fastparquet->exllamav2==0.0.6+cu118) (23.2)\n",
"Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas->exllamav2==0.0.6+cu118) (2.8.2)\n",
"Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->exllamav2==0.0.6+cu118) (2023.3.post1)\n",
"Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.1->pandas->exllamav2==0.0.6+cu118) (1.16.0)\n",
"Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=2.0.1->exllamav2==0.0.6+cu118) (2.1.3)\n",
"Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=2.0.1->exllamav2==0.0.6+cu118) (1.3.0)\n",
"Installing collected packages: sentencepiece, ninja, websockets, safetensors, cramjam, fastparquet, exllamav2\n",
"Successfully installed cramjam-2.7.0 exllamav2-0.0.6+cu118 fastparquet-2023.10.1 ninja-1.11.1.1 safetensors-0.4.0 sentencepiece-0.1.99 websockets-12.0\n",
"\u001b[32mInstalled successfully\u001b[0m\n",
"Download Progress: 0%| | 0/100 [00:00<?, ?%/s]\n",
"Downloading: 0% 0/100 [00:00<?, ?step/s]\u001b[A\n",
"Downloading https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GPTQ/resolve/main/README.md\n",
"\n",
"Downloading: 0% 0.0/100 [00:00<?, ?step/s]\u001b[A\n",
"Downloading https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GPTQ/resolve/main/config.json\n",
"\n",
"Downloading: 0% 0.0/100 [00:00<00:00, 996.17step/s]\u001b[A\n",
"Downloading https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GPTQ/resolve/main/generation_config.json\n",
"\n",
"Downloading https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GPTQ/resolve/main/model.safetensors\n",
"\n",
"Download Progress: 21569113792512% [00:19, 2119479570613.05%/s]\n",
"Downloading: 15% 14.606740773295085/100 [00:19<00:00, 517.09step/s]\u001b[A\n",
"Download Progress: 22522237976576% [00:20, 2348664474469.81%/s]\n",
"Download Progress: 76488317976576% [00:35, 5160444535118.35%/s]"
]
}
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

View File

@ -26,7 +26,7 @@ def get_all_files(path):
setuptools.setup(
name="lollms",
version="5.9.3",
version="5.9.5",
author="Saifeddine ALOUI",
author_email="aloui.saifeddine@gmail.com",
description="A python library for AI personality definition",