mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-24 06:46:40 +00:00
enhanced core
This commit is contained in:
parent
9f3eb91f56
commit
2a00bede8c
@ -95,6 +95,25 @@ class LollmsApplication:
|
||||
self.mount_personalities()
|
||||
self.mount_extensions()
|
||||
|
||||
|
||||
def remove_text_from_string(self, string, text_to_find):
|
||||
"""
|
||||
Removes everything from the first occurrence of the specified text in the string (case-insensitive).
|
||||
|
||||
Parameters:
|
||||
string (str): The original string.
|
||||
text_to_find (str): The text to find in the string.
|
||||
|
||||
Returns:
|
||||
str: The updated string.
|
||||
"""
|
||||
index = string.lower().find(text_to_find.lower())
|
||||
|
||||
if index != -1:
|
||||
string = string[:index]
|
||||
|
||||
return string
|
||||
|
||||
def safe_generate(self, full_discussion:str, n_predict=None, callback: Callable[[str, int, dict], bool]=None):
|
||||
"""safe_generate
|
||||
|
||||
@ -111,8 +130,13 @@ class LollmsApplication:
|
||||
n_tokens = len(tk)
|
||||
fd = self.personality.model.detokenize(tk[-min(self.config.ctx_size-self.n_cond_tk,n_tokens):])
|
||||
self.bot_says = ""
|
||||
output = self.personality.model.generate(self.personality.personality_conditioning+fd, n_predict=n_predict, callback=callback)
|
||||
return output
|
||||
if self.personality.processor is not None and self.personality.processor_cfg["custom_workflow"]:
|
||||
ASCIIColors.info("processing...")
|
||||
generated_text = self.personality.processor.run_workflow(full_discussion.split("!@>")[-1] if "!@>" in full_discussion else full_discussion, previous_discussion_text=self.personality.personality_conditioning+fd, callback=callback)
|
||||
else:
|
||||
ASCIIColors.info("generating...")
|
||||
generated_text = self.personality.model.generate(self.personality.personality_conditioning+fd, n_predict=n_predict, callback=callback)
|
||||
return generated_text
|
||||
|
||||
def notify(self, content, is_success, client_id=None):
|
||||
if is_success:
|
||||
|
@ -8,7 +8,9 @@ from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
from safe_store import TextVectorizer, GenericDataLoader, VisualizationMethod, VectorizationMethod
|
||||
|
||||
contexts={}
|
||||
context={
|
||||
"discussion":""
|
||||
}
|
||||
|
||||
client = commands.Bot(command_prefix='!', intents=discord.Intents.all())
|
||||
|
||||
@ -62,31 +64,48 @@ async def on_message(message):
|
||||
return
|
||||
if message.content.startswith(config["summoning_word"]):
|
||||
prompt = message.content[len(config["summoning_word"]):]
|
||||
context['discussion']+= message.author.name +":"+ prompt + "\n" + f"{lollms_app.personality.ai_message_prefix}"
|
||||
context['current_response']=""
|
||||
print("Chatting")
|
||||
try:
|
||||
docs, _ = text_vectorzer.recover_text(prompt,3)
|
||||
docs = "!#>Documentation:\n"+'\n'.join(docs)
|
||||
docs = "!@>Documentation:\n"+'\n'.join(docs)
|
||||
except:
|
||||
docs=""
|
||||
context = f"""!#>instruction:
|
||||
context_text = f"""!@>instruction:
|
||||
{lollms_app.personality.personality_conditioning}
|
||||
!#>Informations:
|
||||
!@>Informations:
|
||||
Current model:{lollms_app.config.model_name}
|
||||
Current personality:{lollms_app.personality.name}
|
||||
{docs}
|
||||
!#>{message.author.id}: {prompt}
|
||||
!#>{lollms_app.personality.ai_message_prefix}: """
|
||||
print("Context:"+context)
|
||||
out = ['']
|
||||
{context['discussion']}"""
|
||||
print("Context:"+context_text)
|
||||
def callback(text, type=None):
|
||||
out[0] += text
|
||||
antiprompt = lollms_app.personality.detect_antiprompt(context['current_response'])
|
||||
if antiprompt:
|
||||
ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}")
|
||||
context['current_response'] = lollms_app.remove_text_from_string(context['current_response'],antiprompt)
|
||||
return False
|
||||
|
||||
context['current_response'] += text
|
||||
print(text,end="")
|
||||
return True
|
||||
|
||||
ASCIIColors.green("Warming up")
|
||||
lollms_app.safe_generate(context, n_predict=1024, callback=callback)
|
||||
lollms_app.safe_generate(context_text, n_predict=1024, callback=callback)
|
||||
|
||||
print()
|
||||
await message.channel.send(out[0])
|
||||
context['discussion'] += context['current_response']
|
||||
await message.channel.send(context['current_response'])
|
||||
elif message.content.startswith('!mount'):
|
||||
personality_name = message.content[len('!mount')+1:]
|
||||
lollms_app.config.personalities.append(personality_name)
|
||||
lollms_app.mount_personality(len(lollms_app.config.personalities)-1)
|
||||
await message.channel.send(f"Personality {personality_name} mounted successfuly")
|
||||
elif message.content.startswith('!list'):
|
||||
personality_name = message.content[len('!mount')+1:]
|
||||
await message.channel.send(f"Mounted personalities:\n{[p.name for p in lollms_app.mounted_personalities]}")
|
||||
|
||||
elif message.content.startswith('!install'):
|
||||
response = "To install lollms, make sure you have installed the currently supported python version (consult the repository to verify what version is currently supported, but as of 10/22/2023, the version is 3.10).\nThen you can follow these steps:\n1. Open your command line interface.\n2. Navigate to the directory where you want to install lollms.\n3. Run the following command to clone the lollms repository: `git clone https://github.com/lollms/lollms.git`.\n4. Once the repository is cloned, navigate into the lollms directory.\n5. Run `pip install -r requirements.txt` to install the required dependencies.\n6. You can now use lollms by importing it in your Python code."
|
||||
await message.channel.send(response)
|
||||
|
@ -178,11 +178,10 @@ class LOLLMSConfig(BaseConfig):
|
||||
else:
|
||||
# Create folder if it doesn't exist
|
||||
folder_path.mkdir(parents=True, exist_ok=True)
|
||||
progress_bar = tqdm(total=None, unit="B", unit_scale=True, desc=f"Downloading {url.split('/')[-1]}")
|
||||
progress_bar = tqdm(total=100, unit="%", unit_scale=True, desc=f"Downloading {url.split('/')[-1]}")
|
||||
# Define callback function for urlretrieve
|
||||
def report_progress(block_num, block_size, total_size):
|
||||
progress_bar.total=total_size
|
||||
progress_bar.update(block_size)
|
||||
progress_bar.update(block_size/total_size)
|
||||
# Download file from URL to folder
|
||||
try:
|
||||
rq.urlretrieve(url, folder_path / url.split("/")[-1], reporthook=report_progress if callback is None else callback)
|
||||
|
667
notebooks/lollms_discord.ipynb
Normal file
667
notebooks/lollms_discord.ipynb
Normal file
@ -0,0 +1,667 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "Lm1ahQJs7lMk",
|
||||
"outputId": "f486f019-ff7e-4971-ab2d-53eddf47cdec"
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"Looking in indexes: https://download.pytorch.org/whl/cu118\n",
|
||||
"Requirement already satisfied: torch in /usr/local/lib/python3.10/dist-packages (2.1.0+cu118)\n",
|
||||
"Requirement already satisfied: torchvision in /usr/local/lib/python3.10/dist-packages (0.16.0+cu118)\n",
|
||||
"Requirement already satisfied: torchaudio in /usr/local/lib/python3.10/dist-packages (2.1.0+cu118)\n",
|
||||
"Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch) (3.12.4)\n",
|
||||
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from torch) (4.5.0)\n",
|
||||
"Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch) (1.12)\n",
|
||||
"Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch) (3.2)\n",
|
||||
"Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch) (3.1.2)\n",
|
||||
"Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch) (2023.6.0)\n",
|
||||
"Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch) (2.1.0)\n",
|
||||
"Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from torchvision) (1.23.5)\n",
|
||||
"Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from torchvision) (2.31.0)\n",
|
||||
"Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /usr/local/lib/python3.10/dist-packages (from torchvision) (9.4.0)\n",
|
||||
"Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch) (2.1.3)\n",
|
||||
"Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->torchvision) (3.3.1)\n",
|
||||
"Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->torchvision) (3.4)\n",
|
||||
"Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->torchvision) (2.0.7)\n",
|
||||
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->torchvision) (2023.7.22)\n",
|
||||
"Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch) (1.3.0)\n",
|
||||
"Collecting lollms\n",
|
||||
" Downloading lollms-6.0.1-py3-none-any.whl (164 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m164.9/164.9 kB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from lollms) (4.66.1)\n",
|
||||
"Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from lollms) (6.0.1)\n",
|
||||
"Requirement already satisfied: Pillow in /usr/local/lib/python3.10/dist-packages (from lollms) (9.4.0)\n",
|
||||
"Requirement already satisfied: flask in /usr/local/lib/python3.10/dist-packages (from lollms) (2.2.5)\n",
|
||||
"Collecting flask-socketio (from lollms)\n",
|
||||
" Downloading Flask_SocketIO-5.3.6-py3-none-any.whl (18 kB)\n",
|
||||
"Collecting flask-cors (from lollms)\n",
|
||||
" Downloading Flask_Cors-4.0.0-py2.py3-none-any.whl (14 kB)\n",
|
||||
"Collecting simple-websocket (from lollms)\n",
|
||||
" Downloading simple_websocket-1.0.0-py3-none-any.whl (13 kB)\n",
|
||||
"Collecting eventlet (from lollms)\n",
|
||||
" Downloading eventlet-0.33.3-py2.py3-none-any.whl (226 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m226.8/226.8 kB\u001b[0m \u001b[31m19.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hCollecting wget (from lollms)\n",
|
||||
" Downloading wget-3.2.zip (10 kB)\n",
|
||||
" Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
|
||||
"Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from lollms) (67.7.2)\n",
|
||||
"Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from lollms) (2.31.0)\n",
|
||||
"Requirement already satisfied: matplotlib in /usr/local/lib/python3.10/dist-packages (from lollms) (3.7.1)\n",
|
||||
"Requirement already satisfied: seaborn in /usr/local/lib/python3.10/dist-packages (from lollms) (0.12.2)\n",
|
||||
"Collecting mplcursors (from lollms)\n",
|
||||
" Downloading mplcursors-0.5.2.tar.gz (89 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m89.0/89.0 kB\u001b[0m \u001b[31m13.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
|
||||
"Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (from lollms) (1.2.2)\n",
|
||||
"Collecting ascii-colors (from lollms)\n",
|
||||
" Downloading ascii_colors-0.1.3-py3-none-any.whl (6.9 kB)\n",
|
||||
"Collecting safe-store (from lollms)\n",
|
||||
" Downloading safe_store-0.2.6-py3-none-any.whl (19 kB)\n",
|
||||
"Collecting dnspython>=1.15.0 (from eventlet->lollms)\n",
|
||||
" Downloading dnspython-2.4.2-py3-none-any.whl (300 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m300.4/300.4 kB\u001b[0m \u001b[31m38.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: greenlet>=0.3 in /usr/local/lib/python3.10/dist-packages (from eventlet->lollms) (3.0.0)\n",
|
||||
"Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from eventlet->lollms) (1.16.0)\n",
|
||||
"Requirement already satisfied: Werkzeug>=2.2.2 in /usr/local/lib/python3.10/dist-packages (from flask->lollms) (3.0.1)\n",
|
||||
"Requirement already satisfied: Jinja2>=3.0 in /usr/local/lib/python3.10/dist-packages (from flask->lollms) (3.1.2)\n",
|
||||
"Requirement already satisfied: itsdangerous>=2.0 in /usr/local/lib/python3.10/dist-packages (from flask->lollms) (2.1.2)\n",
|
||||
"Requirement already satisfied: click>=8.0 in /usr/local/lib/python3.10/dist-packages (from flask->lollms) (8.1.7)\n",
|
||||
"Collecting python-socketio>=5.0.2 (from flask-socketio->lollms)\n",
|
||||
" Downloading python_socketio-5.10.0-py3-none-any.whl (74 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m74.4/74.4 kB\u001b[0m \u001b[31m12.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (1.1.1)\n",
|
||||
"Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (0.12.1)\n",
|
||||
"Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (4.43.1)\n",
|
||||
"Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (1.4.5)\n",
|
||||
"Requirement already satisfied: numpy>=1.20 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (1.23.5)\n",
|
||||
"Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (23.2)\n",
|
||||
"Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (3.1.1)\n",
|
||||
"Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib->lollms) (2.8.2)\n",
|
||||
"Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->lollms) (3.3.1)\n",
|
||||
"Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->lollms) (3.4)\n",
|
||||
"Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->lollms) (2.0.7)\n",
|
||||
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->lollms) (2023.7.22)\n",
|
||||
"Collecting python-docx (from safe-store->lollms)\n",
|
||||
" Downloading python_docx-1.0.1-py3-none-any.whl (237 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m237.4/237.4 kB\u001b[0m \u001b[31m30.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hCollecting python-pptx (from safe-store->lollms)\n",
|
||||
" Downloading python_pptx-0.6.22-py3-none-any.whl (471 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m471.5/471.5 kB\u001b[0m \u001b[31m48.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from safe-store->lollms) (1.5.3)\n",
|
||||
"Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.10/dist-packages (from safe-store->lollms) (4.11.2)\n",
|
||||
"Collecting PyPDF2 (from safe-store->lollms)\n",
|
||||
" Downloading pypdf2-3.0.1-py3-none-any.whl (232 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m232.6/232.6 kB\u001b[0m \u001b[31m27.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->lollms) (1.11.3)\n",
|
||||
"Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->lollms) (1.3.2)\n",
|
||||
"Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->lollms) (3.2.0)\n",
|
||||
"Collecting wsproto (from simple-websocket->lollms)\n",
|
||||
" Downloading wsproto-1.2.0-py3-none-any.whl (24 kB)\n",
|
||||
"Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from Jinja2>=3.0->flask->lollms) (2.1.3)\n",
|
||||
"Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->safe-store->lollms) (2023.3.post1)\n",
|
||||
"Requirement already satisfied: bidict>=0.21.0 in /usr/local/lib/python3.10/dist-packages (from python-socketio>=5.0.2->flask-socketio->lollms) (0.22.1)\n",
|
||||
"Collecting python-engineio>=4.8.0 (from python-socketio>=5.0.2->flask-socketio->lollms)\n",
|
||||
" Downloading python_engineio-4.8.0-py3-none-any.whl (56 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.8/56.8 kB\u001b[0m \u001b[31m8.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.10/dist-packages (from beautifulsoup4->safe-store->lollms) (2.5)\n",
|
||||
"Requirement already satisfied: lxml>=3.1.0 in /usr/local/lib/python3.10/dist-packages (from python-docx->safe-store->lollms) (4.9.3)\n",
|
||||
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from python-docx->safe-store->lollms) (4.5.0)\n",
|
||||
"Collecting XlsxWriter>=0.5.7 (from python-pptx->safe-store->lollms)\n",
|
||||
" Downloading XlsxWriter-3.1.9-py3-none-any.whl (154 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m154.8/154.8 kB\u001b[0m \u001b[31m20.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hCollecting h11<1,>=0.9.0 (from wsproto->simple-websocket->lollms)\n",
|
||||
" Downloading h11-0.14.0-py3-none-any.whl (58 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m9.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hBuilding wheels for collected packages: mplcursors, wget\n",
|
||||
" Building wheel for mplcursors (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
|
||||
" Created wheel for mplcursors: filename=mplcursors-0.5.2-py3-none-any.whl size=21166 sha256=a6f01c9e9b4427e68cb7f0de75bced6c64fe5de7cfab0adcf8c0f693a6a4c357\n",
|
||||
" Stored in directory: /root/.cache/pip/wheels/b5/5b/fb/aed35cc15262c380536820fa3cb2e2d41fb52450de918a6785\n",
|
||||
" Building wheel for wget (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
|
||||
" Created wheel for wget: filename=wget-3.2-py3-none-any.whl size=9655 sha256=ae368c6811f7d8994dbce2dde0c25049f0961b046500e2051893b74a9a62bd70\n",
|
||||
" Stored in directory: /root/.cache/pip/wheels/8b/f1/7f/5c94f0a7a505ca1c81cd1d9208ae2064675d97582078e6c769\n",
|
||||
"Successfully built mplcursors wget\n",
|
||||
"Installing collected packages: wget, XlsxWriter, python-docx, PyPDF2, h11, dnspython, ascii-colors, wsproto, python-pptx, eventlet, simple-websocket, mplcursors, flask-cors, safe-store, python-engineio, python-socketio, flask-socketio, lollms\n",
|
||||
"Successfully installed PyPDF2-3.0.1 XlsxWriter-3.1.9 ascii-colors-0.1.3 dnspython-2.4.2 eventlet-0.33.3 flask-cors-4.0.0 flask-socketio-5.3.6 h11-0.14.0 lollms-6.0.1 mplcursors-0.5.2 python-docx-1.0.1 python-engineio-4.8.0 python-pptx-0.6.22 python-socketio-5.10.0 safe-store-0.2.6 simple-websocket-1.0.0 wget-3.2 wsproto-1.2.0\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"!pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118\n",
|
||||
"!pip install lollms --upgrade"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "YP6kXgau8Wej",
|
||||
"outputId": "3c7196be-4ce1-4314-f0a0-7738e13eb957"
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"Cloning into 'zoos/bindings_zoo'...\n",
|
||||
"remote: Enumerating objects: 2153, done.\u001b[K\n",
|
||||
"remote: Counting objects: 100% (1095/1095), done.\u001b[K\n",
|
||||
"remote: Compressing objects: 100% (337/337), done.\u001b[K\n",
|
||||
"remote: Total 2153 (delta 832), reused 995 (delta 754), pack-reused 1058\u001b[K\n",
|
||||
"Receiving objects: 100% (2153/2153), 6.34 MiB | 14.18 MiB/s, done.\n",
|
||||
"Resolving deltas: 100% (1577/1577), done.\n",
|
||||
"Cloning into 'zoos/extensions_zoo'...\n",
|
||||
"remote: Enumerating objects: 82, done.\u001b[K\n",
|
||||
"remote: Counting objects: 100% (82/82), done.\u001b[K\n",
|
||||
"remote: Compressing objects: 100% (48/48), done.\u001b[K\n",
|
||||
"remote: Total 82 (delta 31), reused 51 (delta 12), pack-reused 0\u001b[K\n",
|
||||
"Receiving objects: 100% (82/82), 928.75 KiB | 32.03 MiB/s, done.\n",
|
||||
"Resolving deltas: 100% (31/31), done.\n",
|
||||
"Cloning into 'zoos/models_zoo'...\n",
|
||||
"remote: Enumerating objects: 63, done.\u001b[K\n",
|
||||
"remote: Counting objects: 100% (63/63), done.\u001b[K\n",
|
||||
"remote: Compressing objects: 100% (43/43), done.\u001b[K\n",
|
||||
"remote: Total 63 (delta 32), reused 46 (delta 19), pack-reused 0\u001b[K\n",
|
||||
"Receiving objects: 100% (63/63), 198.67 KiB | 9.46 MiB/s, done.\n",
|
||||
"Resolving deltas: 100% (32/32), done.\n",
|
||||
"Cloning into 'zoos/personalities_zoo'...\n",
|
||||
"remote: Enumerating objects: 5991, done.\u001b[K\n",
|
||||
"remote: Counting objects: 100% (1662/1662), done.\u001b[K\n",
|
||||
"remote: Compressing objects: 100% (602/602), done.\u001b[K\n",
|
||||
"remote: Total 5991 (delta 1048), reused 1446 (delta 960), pack-reused 4329\u001b[K\n",
|
||||
"Receiving objects: 100% (5991/5991), 65.03 MiB | 16.39 MiB/s, done.\n",
|
||||
"Resolving deltas: 100% (2747/2747), done.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"!mkdir zoos\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_bindings_zoo.git zoos/bindings_zoo\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_extensions_zoo.git zoos/extensions_zoo\n",
|
||||
"!git clone https://github.com/ParisNeo/models_zoo.git zoos/models_zoo\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_personalities_zoo.git zoos/personalities_zoo\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"!lollms-settings --silent --tool_prefix lollms_discord_ --set_personal_folder_path ./personal_data --install_binding exllama2 --install_model TheBloke/Airoboros-L2-13B-3.1.1-GPTQ"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "DSsf2L1kPc0K",
|
||||
"outputId": "fe6dc4d1-c652-46bf-ed6e-9c4dfb80dc82"
|
||||
},
|
||||
"execution_count": 7,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"\u001b[36mTrying to use Configuration at :lollms_discord_global_paths_cfg.yaml\u001b[0m\n",
|
||||
"\u001b[31mlollms_discord_global_paths_cfg.yaml not found! Searching in your home folder.\u001b[0m\n",
|
||||
"Selected: ./personal_data\n",
|
||||
"\u001b[33mExecution path : /content\u001b[0m\n",
|
||||
"\u001b[32mLocal zoos folder found\u001b[0m\n",
|
||||
"\u001b[32m----------------------Paths information-----------------------\u001b[0m\n",
|
||||
"\u001b[31mpersonal_path:\u001b[0m\u001b[33m/content/personal_data\u001b[0m\n",
|
||||
"\u001b[31mpersonal_configuration_path:\u001b[0m\u001b[33m/content/personal_data/configs\u001b[0m\n",
|
||||
"\u001b[31mpersonal_databases_path:\u001b[0m\u001b[33m/content/personal_data/databases\u001b[0m\n",
|
||||
"\u001b[31mpersonal_models_path:\u001b[0m\u001b[33m/content/personal_data/models\u001b[0m\n",
|
||||
"\u001b[31mpersonal_user_infos_path:\u001b[0m\u001b[33m/content/personal_data/user_infos\u001b[0m\n",
|
||||
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers\u001b[0m\n",
|
||||
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers/gptqlora\u001b[0m\n",
|
||||
"\u001b[31mpersonal_data_path:\u001b[0m\u001b[33m/content/personal_data/data\u001b[0m\n",
|
||||
"\u001b[31mcustom_personalities_path:\u001b[0m\u001b[33m/content/personal_data/custom_personalities\u001b[0m\n",
|
||||
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
|
||||
"\u001b[32m----------------------Zoo information-----------------------\u001b[0m\n",
|
||||
"\u001b[31mbindings_zoo_path:\u001b[0m\u001b[33m/content/zoos/bindings_zoo\u001b[0m\n",
|
||||
"\u001b[31mpersonalities_zoo_path:\u001b[0m\u001b[33m/content/zoos/personalities_zoo\u001b[0m\n",
|
||||
"\u001b[31mextensions_zoo_path:\u001b[0m\u001b[33m/content/zoos/extensions_zoo\u001b[0m\n",
|
||||
"\u001b[31mmodels_zoo_path:\u001b[0m\u001b[33m/content/zoos/models_zoo\u001b[0m\n",
|
||||
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
|
||||
"\u001b[32mSelected personal path: ./personal_data\u001b[0m\n",
|
||||
"\u001b[36mTrying to use Configuration at :/root/lollms_discord_global_paths_cfg.yaml\u001b[0m\n",
|
||||
"\u001b[32m/root/lollms_discord_global_paths_cfg.yaml found!\u001b[0m\n",
|
||||
"\u001b[33mExecution path : /content\u001b[0m\n",
|
||||
"\u001b[32mLocal zoos folder found\u001b[0m\n",
|
||||
"\u001b[32m----------------------Paths information-----------------------\u001b[0m\n",
|
||||
"\u001b[31mpersonal_path:\u001b[0m\u001b[33m/content/personal_data\u001b[0m\n",
|
||||
"\u001b[31mpersonal_configuration_path:\u001b[0m\u001b[33m/content/personal_data/configs\u001b[0m\n",
|
||||
"\u001b[31mpersonal_databases_path:\u001b[0m\u001b[33m/content/personal_data/databases\u001b[0m\n",
|
||||
"\u001b[31mpersonal_models_path:\u001b[0m\u001b[33m/content/personal_data/models\u001b[0m\n",
|
||||
"\u001b[31mpersonal_user_infos_path:\u001b[0m\u001b[33m/content/personal_data/user_infos\u001b[0m\n",
|
||||
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers\u001b[0m\n",
|
||||
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers/gptqlora\u001b[0m\n",
|
||||
"\u001b[31mpersonal_data_path:\u001b[0m\u001b[33m/content/personal_data/data\u001b[0m\n",
|
||||
"\u001b[31mcustom_personalities_path:\u001b[0m\u001b[33m/content/personal_data/custom_personalities\u001b[0m\n",
|
||||
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
|
||||
"\u001b[32m----------------------Zoo information-----------------------\u001b[0m\n",
|
||||
"\u001b[31mbindings_zoo_path:\u001b[0m\u001b[33m/content/zoos/bindings_zoo\u001b[0m\n",
|
||||
"\u001b[31mpersonalities_zoo_path:\u001b[0m\u001b[33m/content/zoos/personalities_zoo\u001b[0m\n",
|
||||
"\u001b[31mextensions_zoo_path:\u001b[0m\u001b[33m/content/zoos/extensions_zoo\u001b[0m\n",
|
||||
"\u001b[31mmodels_zoo_path:\u001b[0m\u001b[33m/content/zoos/models_zoo\u001b[0m\n",
|
||||
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
|
||||
"\u001b[34;1mBindings zoo found in your personal space.\n",
|
||||
"Pulling last personalities zoo\u001b[0m\n",
|
||||
"Already up to date.\n",
|
||||
"\u001b[34;1mPersonalities zoo found in your personal space.\n",
|
||||
"Pulling last personalities zoo\u001b[0m\n",
|
||||
"Already up to date.\n",
|
||||
"\u001b[34;1mExtensions zoo found in your personal space.\n",
|
||||
"Pulling last Extensions zoo\u001b[0m\n",
|
||||
"Already up to date.\n",
|
||||
"\u001b[34;1mModels zoo found in your personal space.\n",
|
||||
"Pulling last Models zoo\u001b[0m\n",
|
||||
"Already up to date.\n",
|
||||
"\u001b[38;5;208mNo binding selected\u001b[0m\n",
|
||||
"\u001b[32mPersonality lollms mounted successfully but no model is selected\u001b[0m\n",
|
||||
"\u001b[33;1m\n",
|
||||
" ___ ___ ___ ___ ___ ___ \n",
|
||||
" /\\__\\ /\\ \\ /\\__\\ /\\__\\ /\\__\\ /\\ \\ \n",
|
||||
" /:/ / /::\\ \\ /:/ / /:/ / /::| | /::\\ \\ \n",
|
||||
" /:/ / /:/\\:\\ \\ /:/ / /:/ / /:|:| | /:/\\ \\ \\ \n",
|
||||
" /:/ / /:/ \\:\\ \\ /:/ / /:/ / /:/|:|__|__ _\\:\\~\\ \\ \\ \n",
|
||||
" /:/__/ /:/__/ \\:\\__\\ /:/__/ /:/__/ /:/ |::::\\__\\ /\\ \\:\\ \\ \\__\\ \n",
|
||||
" \\:\\ \\ \\:\\ \\ /:/ / \\:\\ \\ \\:\\ \\ \\/__/~~/:/ / \\:\\ \\:\\ \\/__/ \n",
|
||||
" \\:\\ \\ \\:\\ /:/ / \\:\\ \\ \\:\\ \\ /:/ / \\:\\ \\:\\__\\ \n",
|
||||
" \\:\\ \\ \\:\\/:/ / \\:\\ \\ \\:\\ \\ /:/ / \\:\\/:/ / \n",
|
||||
" \\:\\__\\ \\::/ / \\:\\__\\ \\:\\__\\ /:/ / \\::/ / \n",
|
||||
" \\/__/ \\/__/ \\/__/ \\/__/ \\/__/ \\/__/ \n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[31mVersion: \u001b[32m6.0.1\n",
|
||||
"\u001b[31mBy : \u001b[32mParisNeo\n",
|
||||
"\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[32mCurrent personality : \u001b[0mgeneric/lollms\n",
|
||||
"\u001b[32mVersion : \u001b[0m1.0.0\n",
|
||||
"\u001b[32mAuthor : \u001b[0mParisNeo\n",
|
||||
"\u001b[32mDescription : \u001b[0mThis personality is a helpful and Kind AI ready to help you solve your problems \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\u001b[32mCurrent binding : \u001b[0mNone\n",
|
||||
"\u001b[32mCurrent model : \u001b[0mNone\n",
|
||||
"\n",
|
||||
"\u001b[32mfreeing memory\u001b[0m\n",
|
||||
"The variable 'ExLlamaGenerator' does not exist in the local or global namespace.\n",
|
||||
"The variable 'ExLlama' does not exist in the local or global namespace.\n",
|
||||
"The variable 'ExLlamaCache' does not exist in the local or global namespace.\n",
|
||||
"The variable 'ExLlamaConfig' does not exist in the local or global namespace.\n",
|
||||
"The variable 'ExLlamaTokenizer' does not exist in the local or global namespace.\n",
|
||||
"The variable 'torch_version' does not exist in the local or global namespace.\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"\u001b[31mCuda VRAM usage\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"{'nb_gpus': 1, 'gpu_0_total_vram': 16106127360, 'gpu_0_used_vram': 0, 'gpu_0_model': 'Tesla T4'}\n",
|
||||
"\u001b[31mCleared cache\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"\u001b[31mCuda VRAM usage\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"{'nb_gpus': 1, 'gpu_0_total_vram': 16106127360, 'gpu_0_used_vram': 0, 'gpu_0_model': 'Tesla T4'}\n",
|
||||
"\u001b[32mfreed memory\u001b[0m\n",
|
||||
"\u001b[34m*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"\u001b[31mInstalling exllama2\u001b[0m\n",
|
||||
"\u001b[34m*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"This is the first time you are using this binding.\n",
|
||||
"\u001b[34;1mChecking pytorch\u001b[0m\n",
|
||||
"\u001b[32mCUDA is supported.\u001b[0m\n",
|
||||
"Collecting exllamav2==0.0.6+cu118\n",
|
||||
" Downloading https://github.com/turboderp/exllamav2/releases/download/v0.0.6/exllamav2-0.0.6+cu118-cp310-cp310-linux_x86_64.whl (12.2 MB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m12.2/12.2 MB\u001b[0m \u001b[31m100.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from exllamav2==0.0.6+cu118) (1.5.3)\n",
|
||||
"Collecting ninja (from exllamav2==0.0.6+cu118)\n",
|
||||
" Downloading ninja-1.11.1.1-py2.py3-none-manylinux1_x86_64.manylinux_2_5_x86_64.whl (307 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m307.2/307.2 kB\u001b[0m \u001b[31m5.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hCollecting fastparquet (from exllamav2==0.0.6+cu118)\n",
|
||||
" Downloading fastparquet-2023.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.7 MB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.7/1.7 MB\u001b[0m \u001b[31m68.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: torch>=2.0.1 in /usr/local/lib/python3.10/dist-packages (from exllamav2==0.0.6+cu118) (2.1.0+cu118)\n",
|
||||
"Collecting safetensors>=0.3.2 (from exllamav2==0.0.6+cu118)\n",
|
||||
" Downloading safetensors-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m82.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hCollecting sentencepiece>=0.1.97 (from exllamav2==0.0.6+cu118)\n",
|
||||
" Downloading sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m88.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from exllamav2==0.0.6+cu118) (2.16.1)\n",
|
||||
"Collecting websockets (from exllamav2==0.0.6+cu118)\n",
|
||||
" Downloading websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (130 kB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m130.2/130.2 kB\u001b[0m \u001b[31m18.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: regex in /usr/local/lib/python3.10/dist-packages (from exllamav2==0.0.6+cu118) (2023.6.3)\n",
|
||||
"Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (3.12.4)\n",
|
||||
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (4.5.0)\n",
|
||||
"Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (1.12)\n",
|
||||
"Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (3.2)\n",
|
||||
"Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (3.1.2)\n",
|
||||
"Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (2023.6.0)\n",
|
||||
"Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.1->exllamav2==0.0.6+cu118) (2.1.0)\n",
|
||||
"Requirement already satisfied: numpy>=1.20.3 in /usr/local/lib/python3.10/dist-packages (from fastparquet->exllamav2==0.0.6+cu118) (1.23.5)\n",
|
||||
"Collecting cramjam>=2.3 (from fastparquet->exllamav2==0.0.6+cu118)\n",
|
||||
" Downloading cramjam-2.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.6 MB)\n",
|
||||
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m89.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from fastparquet->exllamav2==0.0.6+cu118) (23.2)\n",
|
||||
"Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas->exllamav2==0.0.6+cu118) (2.8.2)\n",
|
||||
"Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->exllamav2==0.0.6+cu118) (2023.3.post1)\n",
|
||||
"Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.1->pandas->exllamav2==0.0.6+cu118) (1.16.0)\n",
|
||||
"Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=2.0.1->exllamav2==0.0.6+cu118) (2.1.3)\n",
|
||||
"Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=2.0.1->exllamav2==0.0.6+cu118) (1.3.0)\n",
|
||||
"Installing collected packages: sentencepiece, ninja, websockets, safetensors, cramjam, fastparquet, exllamav2\n",
|
||||
"Successfully installed cramjam-2.7.0 exllamav2-0.0.6+cu118 fastparquet-2023.10.1 ninja-1.11.1.1 safetensors-0.4.0 sentencepiece-0.1.99 websockets-12.0\n",
|
||||
"\u001b[32mInstalled successfully\u001b[0m\n",
|
||||
"Download Progress: 0%| | 0/100 [00:00<?, ?%/s]\n",
|
||||
"Downloading: 0% 0/100 [00:00<?, ?step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/LICENSE.txt\n",
|
||||
"\n",
|
||||
"Downloading: 0% 0.0/100 [00:00<?, ?step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/Notice\n",
|
||||
"\n",
|
||||
"Downloading: 0% 0.0/100 [00:00<00:00, 380.07step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/README.md\n",
|
||||
"\n",
|
||||
"Downloading: 0% 0.0/100 [00:00<00:00, 380.43step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/USE_POLICY.md\n",
|
||||
"\n",
|
||||
"Downloading: 0% 0.0/100 [00:01<00:00, 279.10step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/config.json\n",
|
||||
"\n",
|
||||
"Downloading: 0% 0.0/100 [00:01<00:00, 314.32step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/generation_config.json\n",
|
||||
"\n",
|
||||
"Downloading: 0% 0.0/100 [00:01<00:00, 337.94step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/model.safetensors\n",
|
||||
"\n",
|
||||
"Download Progress: 373689107480576% [00:16, 44742331215743.43%/s]\n",
|
||||
"Download Progress: 380651962171392% [00:16, 51699614587999.87%/s]\n",
|
||||
"Download Progress: 1079926817816576% [00:25, 118310329425449.44%/s]\n",
|
||||
"Download Progress: 1763470503739392% [00:32, 127034715138359.36%/s]\n",
|
||||
"Download Progress: 2335734770712576% [00:37, 153136886137461.28%/s]\n",
|
||||
"Download Progress: 2801173331435520% [00:42, 111874516041465.06%/s]\n",
|
||||
"Download Progress: 3139287389069312% [00:44, 180111044610096.66%/s]\n",
|
||||
"Download Progress: 3198190232870912% [00:44, 164216355766300.78%/s]\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/quantize_config.json\n",
|
||||
"\n",
|
||||
"Downloading: 100% 100.0/100 [00:44<00:00, 12.99step/s] \u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/special_tokens_map.json\n",
|
||||
"\n",
|
||||
"Downloading: 100% 100.0/100 [00:44<00:00, 25.33step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/tokenizer.json\n",
|
||||
"\n",
|
||||
"Downloading: 16% 16.44830844051364/100 [00:44<00:03, 26.98step/s]\u001b[A\n",
|
||||
"Downloading: 41% 41.34304553966942/100 [00:45<00:01, 31.47step/s]\u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/tokenizer.model\n",
|
||||
"\n",
|
||||
"Downloading: 0% 0.0/100 [00:45<00:02, 46.52step/s] \u001b[A\n",
|
||||
"Downloading https://huggingface.co/TheBloke/Airoboros-L2-13B-3.1.1-GPTQ/resolve/main/tokenizer_config.json\n",
|
||||
"\n",
|
||||
"Downloading: 0% 0.0/100 [00:45<00:01, 80.93step/s]\u001b[ADone\n",
|
||||
"Downloading: 100% 100.0/100 [00:45<00:00, 2.19step/s]\n",
|
||||
"Download Progress: 3216523167355064% [00:46, 68915229855797.49%/s] \n",
|
||||
"\u001b[32mfreeing memory\u001b[0m\n",
|
||||
"The variable 'settings' does not exist in the instance.\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"\u001b[31mCuda VRAM usage\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"{'nb_gpus': 1, 'gpu_0_total_vram': 16106127360, 'gpu_0_used_vram': 3145728, 'gpu_0_model': 'Tesla T4'}\n",
|
||||
"\u001b[31mCleared cache\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"\u001b[31mCuda VRAM usage\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"{'nb_gpus': 1, 'gpu_0_total_vram': 16106127360, 'gpu_0_used_vram': 3145728, 'gpu_0_model': 'Tesla T4'}\n",
|
||||
"\u001b[32mfreed memory\u001b[0m\n",
|
||||
"\u001b[31m----------- LOLLMS EXLLAMA2 Model Information -----------------\u001b[0m\n",
|
||||
"\u001b[35mModel name:Airoboros-L2-13B-3.1.1-GPTQ\u001b[0m\n",
|
||||
"\u001b[31mmodel_dir: \u001b[0m\u001b[33m/content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ\u001b[0m\n",
|
||||
"\u001b[31mmodel_config: \u001b[0m\u001b[33m/content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ/config.json\u001b[0m\n",
|
||||
"\u001b[31mbos_token_id: \u001b[0m\u001b[33m1\u001b[0m\n",
|
||||
"\u001b[31meos_token_id: \u001b[0m\u001b[33m2\u001b[0m\n",
|
||||
"\u001b[31mpad_token_id: \u001b[0m\u001b[33m0\u001b[0m\n",
|
||||
"\u001b[31mhidden_size: \u001b[0m\u001b[33m5120\u001b[0m\n",
|
||||
"\u001b[31minitializer_range: \u001b[0m\u001b[33m0.02\u001b[0m\n",
|
||||
"\u001b[31mintermediate_size: \u001b[0m\u001b[33m13824\u001b[0m\n",
|
||||
"\u001b[31mnum_attention_heads: \u001b[0m\u001b[33m40\u001b[0m\n",
|
||||
"\u001b[31mnum_hidden_layers: \u001b[0m\u001b[33m40\u001b[0m\n",
|
||||
"\u001b[31mrms_norm_eps: \u001b[0m\u001b[33m1e-05\u001b[0m\n",
|
||||
"\u001b[31mvocab_size: \u001b[0m\u001b[33m32000\u001b[0m\n",
|
||||
"\u001b[31mrotary_embedding_base: \u001b[0m\u001b[33m10000.0\u001b[0m\n",
|
||||
"\u001b[31mnum_key_value_heads: \u001b[0m\u001b[33m40\u001b[0m\n",
|
||||
"\u001b[31mnum_key_value_groups: \u001b[0m\u001b[33m1\u001b[0m\n",
|
||||
"\u001b[31mmax_seq_len: \u001b[0m\u001b[33m4090\u001b[0m\n",
|
||||
"\u001b[31mtensor_files: \u001b[0m\u001b[33m['/content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ/model.safetensors']\u001b[0m\n",
|
||||
"\u001b[31mhead_dim: \u001b[0m\u001b[33m128\u001b[0m\n",
|
||||
"\u001b[31mtokenizer_path: \u001b[0m\u001b[33m/content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ/tokenizer.model\u001b[0m\n",
|
||||
"\u001b[31mmax_input_len: \u001b[0m\u001b[33m2048\u001b[0m\n",
|
||||
"\u001b[31mmax_attention_size: \u001b[0m\u001b[33m4194304\u001b[0m\n",
|
||||
"\u001b[31mscale_pos_emb: \u001b[0m\u001b[33m1\u001b[0m\n",
|
||||
"\u001b[31mscale_alpha_value: \u001b[0m\u001b[33m1\u001b[0m\n",
|
||||
"\u001b[31m--------------------------------------------------------------\u001b[0m\n",
|
||||
"Loading model: /content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ\n",
|
||||
"\u001b[32mGPU split:[24]\u001b[0m\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"!pip install discord.py"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "R3IOhe7KJZQT",
|
||||
"outputId": "95022a48-2e25-479f-aef7-e20f9c3ed1f9"
|
||||
},
|
||||
"execution_count": 8,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"Collecting discord.py\n",
|
||||
" Downloading discord.py-2.3.2-py3-none-any.whl (1.1 MB)\n",
|
||||
"\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/1.1 MB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[91m━━━━━━━━\u001b[0m\u001b[90m╺\u001b[0m\u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.2/1.1 MB\u001b[0m \u001b[31m6.6 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.1/1.1 MB\u001b[0m \u001b[31m20.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
||||
"\u001b[?25hRequirement already satisfied: aiohttp<4,>=3.7.4 in /usr/local/lib/python3.10/dist-packages (from discord.py) (3.8.6)\n",
|
||||
"Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4,>=3.7.4->discord.py) (23.1.0)\n",
|
||||
"Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4,>=3.7.4->discord.py) (3.3.1)\n",
|
||||
"Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4,>=3.7.4->discord.py) (6.0.4)\n",
|
||||
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4,>=3.7.4->discord.py) (4.0.3)\n",
|
||||
"Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4,>=3.7.4->discord.py) (1.9.2)\n",
|
||||
"Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4,>=3.7.4->discord.py) (1.4.0)\n",
|
||||
"Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4,>=3.7.4->discord.py) (1.3.1)\n",
|
||||
"Requirement already satisfied: idna>=2.0 in /usr/local/lib/python3.10/dist-packages (from yarl<2.0,>=1.0->aiohttp<4,>=3.7.4->discord.py) (3.4)\n",
|
||||
"Installing collected packages: discord.py\n",
|
||||
"Successfully installed discord.py-2.3.2\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"!lollms-discord"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "x1oO7C_v8oTI",
|
||||
"outputId": "cdca280b-d8bc-4d18-de80-86e4a4637b41"
|
||||
},
|
||||
"execution_count": null,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"\u001b[36mTrying to use Configuration at :lollms_discord_global_paths_cfg.yaml\u001b[0m\n",
|
||||
"\u001b[31mlollms_discord_global_paths_cfg.yaml not found! Searching in your home folder.\u001b[0m\n",
|
||||
"\u001b[32m/root/lollms_discord_global_paths_cfg.yaml found!\u001b[0m\n",
|
||||
"\u001b[33mExecution path : /content\u001b[0m\n",
|
||||
"\u001b[32mLocal zoos folder found\u001b[0m\n",
|
||||
"\u001b[32m----------------------Paths information-----------------------\u001b[0m\n",
|
||||
"\u001b[31mpersonal_path:\u001b[0m\u001b[33m/content/personal_data\u001b[0m\n",
|
||||
"\u001b[31mpersonal_configuration_path:\u001b[0m\u001b[33m/content/personal_data/configs\u001b[0m\n",
|
||||
"\u001b[31mpersonal_databases_path:\u001b[0m\u001b[33m/content/personal_data/databases\u001b[0m\n",
|
||||
"\u001b[31mpersonal_models_path:\u001b[0m\u001b[33m/content/personal_data/models\u001b[0m\n",
|
||||
"\u001b[31mpersonal_user_infos_path:\u001b[0m\u001b[33m/content/personal_data/user_infos\u001b[0m\n",
|
||||
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers\u001b[0m\n",
|
||||
"\u001b[31mpersonal_trainers_path:\u001b[0m\u001b[33m/content/personal_data/trainers/gptqlora\u001b[0m\n",
|
||||
"\u001b[31mpersonal_data_path:\u001b[0m\u001b[33m/content/personal_data/data\u001b[0m\n",
|
||||
"\u001b[31mcustom_personalities_path:\u001b[0m\u001b[33m/content/personal_data/custom_personalities\u001b[0m\n",
|
||||
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
|
||||
"\u001b[32m----------------------Zoo information-----------------------\u001b[0m\n",
|
||||
"\u001b[31mbindings_zoo_path:\u001b[0m\u001b[33m/content/zoos/bindings_zoo\u001b[0m\n",
|
||||
"\u001b[31mpersonalities_zoo_path:\u001b[0m\u001b[33m/content/zoos/personalities_zoo\u001b[0m\n",
|
||||
"\u001b[31mextensions_zoo_path:\u001b[0m\u001b[33m/content/zoos/extensions_zoo\u001b[0m\n",
|
||||
"\u001b[31mmodels_zoo_path:\u001b[0m\u001b[33m/content/zoos/models_zoo\u001b[0m\n",
|
||||
"\u001b[32m-------------------------------------------------------------\u001b[0m\n",
|
||||
"\u001b[34;1mBindings zoo found in your personal space.\n",
|
||||
"Pulling last personalities zoo\u001b[0m\n",
|
||||
"Already up to date.\n",
|
||||
"\u001b[34;1mPersonalities zoo found in your personal space.\n",
|
||||
"Pulling last personalities zoo\u001b[0m\n",
|
||||
"Already up to date.\n",
|
||||
"\u001b[34;1mExtensions zoo found in your personal space.\n",
|
||||
"Pulling last Extensions zoo\u001b[0m\n",
|
||||
"Already up to date.\n",
|
||||
"\u001b[34;1mModels zoo found in your personal space.\n",
|
||||
"Pulling last Models zoo\u001b[0m\n",
|
||||
"Already up to date.\n",
|
||||
"\u001b[34;1m>Loading binding exllama2. Please wait ...\u001b[0m\n",
|
||||
"\u001b[32mBinding exllama2 loaded successfully.\u001b[0m\n",
|
||||
"\u001b[34;1m>Loading model Airoboros-L2-13B-3.1.1-GPTQ. Please wait ...\u001b[0m\n",
|
||||
"\u001b[32mfreeing memory\u001b[0m\n",
|
||||
"The variable 'settings' does not exist in the instance.\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"\u001b[31mCuda VRAM usage\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"{'nb_gpus': 1, 'gpu_0_total_vram': 16106127360, 'gpu_0_used_vram': 3145728, 'gpu_0_model': 'Tesla T4'}\n",
|
||||
"\u001b[31mCleared cache\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"\u001b[31mCuda VRAM usage\u001b[0m\n",
|
||||
"\u001b[31m*-*-*-*-*-*-*-*\u001b[0m\n",
|
||||
"{'nb_gpus': 1, 'gpu_0_total_vram': 16106127360, 'gpu_0_used_vram': 3145728, 'gpu_0_model': 'Tesla T4'}\n",
|
||||
"\u001b[32mfreed memory\u001b[0m\n",
|
||||
"\u001b[31m----------- LOLLMS EXLLAMA2 Model Information -----------------\u001b[0m\n",
|
||||
"\u001b[35mModel name:Airoboros-L2-13B-3.1.1-GPTQ\u001b[0m\n",
|
||||
"\u001b[31mmodel_dir: \u001b[0m\u001b[33m/content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ\u001b[0m\n",
|
||||
"\u001b[31mmodel_config: \u001b[0m\u001b[33m/content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ/config.json\u001b[0m\n",
|
||||
"\u001b[31mbos_token_id: \u001b[0m\u001b[33m1\u001b[0m\n",
|
||||
"\u001b[31meos_token_id: \u001b[0m\u001b[33m2\u001b[0m\n",
|
||||
"\u001b[31mpad_token_id: \u001b[0m\u001b[33m0\u001b[0m\n",
|
||||
"\u001b[31mhidden_size: \u001b[0m\u001b[33m5120\u001b[0m\n",
|
||||
"\u001b[31minitializer_range: \u001b[0m\u001b[33m0.02\u001b[0m\n",
|
||||
"\u001b[31mintermediate_size: \u001b[0m\u001b[33m13824\u001b[0m\n",
|
||||
"\u001b[31mnum_attention_heads: \u001b[0m\u001b[33m40\u001b[0m\n",
|
||||
"\u001b[31mnum_hidden_layers: \u001b[0m\u001b[33m40\u001b[0m\n",
|
||||
"\u001b[31mrms_norm_eps: \u001b[0m\u001b[33m1e-05\u001b[0m\n",
|
||||
"\u001b[31mvocab_size: \u001b[0m\u001b[33m32000\u001b[0m\n",
|
||||
"\u001b[31mrotary_embedding_base: \u001b[0m\u001b[33m10000.0\u001b[0m\n",
|
||||
"\u001b[31mnum_key_value_heads: \u001b[0m\u001b[33m40\u001b[0m\n",
|
||||
"\u001b[31mnum_key_value_groups: \u001b[0m\u001b[33m1\u001b[0m\n",
|
||||
"\u001b[31mmax_seq_len: \u001b[0m\u001b[33m4090\u001b[0m\n",
|
||||
"\u001b[31mtensor_files: \u001b[0m\u001b[33m['/content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ/model.safetensors']\u001b[0m\n",
|
||||
"\u001b[31mhead_dim: \u001b[0m\u001b[33m128\u001b[0m\n",
|
||||
"\u001b[31mtokenizer_path: \u001b[0m\u001b[33m/content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ/tokenizer.model\u001b[0m\n",
|
||||
"\u001b[31mmax_input_len: \u001b[0m\u001b[33m2048\u001b[0m\n",
|
||||
"\u001b[31mmax_attention_size: \u001b[0m\u001b[33m4194304\u001b[0m\n",
|
||||
"\u001b[31mscale_pos_emb: \u001b[0m\u001b[33m1\u001b[0m\n",
|
||||
"\u001b[31mscale_alpha_value: \u001b[0m\u001b[33m1\u001b[0m\n",
|
||||
"\u001b[31m--------------------------------------------------------------\u001b[0m\n",
|
||||
"Loading model: /content/personal_data/models/gptq/Airoboros-L2-13B-3.1.1-GPTQ\n",
|
||||
"\u001b[32mGPU split:[24]\u001b[0m\n",
|
||||
"\u001b[32mModel Airoboros-L2-13B-3.1.1-GPTQ loaded successfully.\u001b[0m\n",
|
||||
"\u001b[32mPersonality lollms mounted successfully\u001b[0m\n",
|
||||
"\u001b[34;1mNo database file found : /content/personal_data/data/discord_bot/db.json\u001b[0m\n",
|
||||
"\u001b[38;5;202mWarning! Database empty! Coudln't index anything\u001b[0m\n",
|
||||
"Please enter your bot token: MTE2NTQ3Mzc5ODIwNDY5MDQ1NQ.Gv6vqA.ad0FFUp_INFjRnLl8-KeyIO0UZ7mCJRNGEm0Jg\n",
|
||||
"\u001b[30;1m2023-10-30 20:17:38\u001b[0m \u001b[34;1mINFO \u001b[0m \u001b[35mdiscord.client\u001b[0m logging in using static token\n",
|
||||
"\u001b[30;1m2023-10-30 20:17:39\u001b[0m \u001b[34;1mINFO \u001b[0m \u001b[35mdiscord.gateway\u001b[0m Shard ID None has connected to Gateway (Session ID: 68f3868c6225aa18a987dde790241da9).\n",
|
||||
"Logged in as lollms#9232\n",
|
||||
"------\n",
|
||||
"Chatting\n",
|
||||
"Context:!#>instruction:\n",
|
||||
"!@>information:\n",
|
||||
"Assistant's name: LoLLMs\n",
|
||||
"Author : ParisNeo a computer geek pationed by AI\n",
|
||||
"Today's date is Monday, October 30, 2023\n",
|
||||
"!@>instructions:\n",
|
||||
"Your mission is to assist user to perform various tasks and answer his questions\n",
|
||||
"\n",
|
||||
"!#>Informations:\n",
|
||||
"Current model:Airoboros-L2-13B-3.1.1-GPTQ\n",
|
||||
"Current personality:lollms\n",
|
||||
"\n",
|
||||
"!#>1006867658454601770: what do you think about building an AI character to anlyze code and repare bugs?\n",
|
||||
"!#>!@> Assistant:\n",
|
||||
": \n",
|
||||
"\u001b[32mWarming up\u001b[0m\n",
|
||||
"I believe that it could be beneficial as long as the artificial intelligence system has been trained on large amounts of data related to coding languages and debugging processes. However, such systems are still in their infancy stage and may not yet have the sophisticated understanding needed for complex software repair or advanced programming tasks.\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"!ip addr show"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "fbPrg7RH-Ui8",
|
||||
"outputId": "1b82968d-06c6-4a73-8a28-67e8f7619099"
|
||||
},
|
||||
"execution_count": null,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"1: lo: <LOOPBACK,UP,LOWER_UP> mtu 65536 qdisc noqueue state UNKNOWN group default qlen 1000\n",
|
||||
" link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00\n",
|
||||
" inet 127.0.0.1/8 scope host lo\n",
|
||||
" valid_lft forever preferred_lft forever\n",
|
||||
"5: eth0@if6: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc noqueue state UP group default \n",
|
||||
" link/ether 02:42:ac:1c:00:0c brd ff:ff:ff:ff:ff:ff link-netnsid 0\n",
|
||||
" inet 172.28.0.12/16 brd 172.28.255.255 scope global eth0\n",
|
||||
" valid_lft forever preferred_lft forever\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"accelerator": "GPU",
|
||||
"colab": {
|
||||
"provenance": []
|
||||
},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
@ -12,8 +12,8 @@
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Looking in indexes: https://download.pytorch.org/whl/cu118\n",
|
||||
"Requirement already satisfied: torch in /usr/local/lib/python3.10/dist-packages (2.1.0+cu118)\n",
|
||||
@ -120,8 +120,8 @@
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Cloning into 'lollms_bindings_zoo'...\n",
|
||||
"remote: Enumerating objects: 2153, done.\u001b[K\n",
|
||||
@ -156,20 +156,16 @@
|
||||
],
|
||||
"source": [
|
||||
"!mkdir zoos\n",
|
||||
"!cd zoos\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_bindings_zoo.git\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_extensions_zoo.git\n",
|
||||
"!git clone https://github.com/ParisNeo/models_zoo.git\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_personalities_zoo.git\n",
|
||||
"!cd ..\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_bindings_zoo.git zoos/bindings_zoo\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_extensions_zoo.git zoos/extensions_zoo\n",
|
||||
"!git clone https://github.com/ParisNeo/models_zoo.git zoos/models_zoo\n",
|
||||
"!git clone https://github.com/ParisNeo/lollms_personalities_zoo.git zoos/personalities_zoo\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"!lollms-settings --silent --set_personal_folder_path ./personal_data --install_binding exllama2 --install_model TheBloke/Mistral-7B-Instruct-v0.1-GPTQ"
|
||||
],
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
@ -177,11 +173,10 @@
|
||||
"id": "DSsf2L1kPc0K",
|
||||
"outputId": "5a7c6af4-99f0-4c6f-a1cc-cb174b968879"
|
||||
},
|
||||
"execution_count": null,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[36mTrying to use Configuration at :lollms_server_global_paths_cfg.yaml\u001b[0m\n",
|
||||
"\u001b[31mlollms_server_global_paths_cfg.yaml not found! Searching in your home folder.\u001b[0m\n",
|
||||
@ -387,6 +382,9 @@
|
||||
"Download Progress: 76488317976576% [00:35, 5160444535118.35%/s]"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"!lollms-settings --silent --set_personal_folder_path ./personal_data --install_binding exllama2 --install_model TheBloke/Airoboros-L2-13B-3.1.1-GPTQ"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
48
personal_data/configs/lollms_discord_local_config.yaml
Normal file
48
personal_data/configs/lollms_discord_local_config.yaml
Normal file
@ -0,0 +1,48 @@
|
||||
active_personality_id: -1
|
||||
audio_auto_send_input: true
|
||||
audio_in_language: en-US
|
||||
audio_out_voice: null
|
||||
audio_pitch: 1
|
||||
audio_silenceTimer: 5000
|
||||
auto_save: true
|
||||
auto_show_browser: true
|
||||
auto_speak: false
|
||||
auto_update: true
|
||||
binding_name: null
|
||||
ctx_size: 4084
|
||||
data_vectorization_activate: true
|
||||
data_vectorization_build_keys_words: false
|
||||
data_vectorization_chunk_size: 512
|
||||
data_vectorization_method: tfidf_vectorizer
|
||||
data_vectorization_nb_chunks: 2
|
||||
data_vectorization_overlap_size: 128
|
||||
data_vectorization_save_db: false
|
||||
data_vectorization_visualize_on_vectorization: false
|
||||
data_visualization_method: PCA
|
||||
db_path: database.db
|
||||
debug: false
|
||||
discussion_prompt_separator: '!@>'
|
||||
enable_gpu: true
|
||||
extensions: []
|
||||
host: localhost
|
||||
min_n_predict: 256
|
||||
model_name: null
|
||||
n_predict: 1024
|
||||
n_threads: 8
|
||||
override_personality_model_parameters: false
|
||||
personalities: []
|
||||
port: 9600
|
||||
repeat_last_n: 40
|
||||
repeat_penalty: 1.2
|
||||
seed: -1
|
||||
temperature: 0.9
|
||||
top_k: 50
|
||||
top_p: 0.95
|
||||
use_discussions_history: false
|
||||
use_files: true
|
||||
use_user_informations_in_discussion: false
|
||||
use_user_name_in_discussions: false
|
||||
user_avatar: default_user.svg
|
||||
user_description: ''
|
||||
user_name: user
|
||||
version: 27
|
Loading…
Reference in New Issue
Block a user