mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2025-06-01 23:30:43 +00:00
Repared windows issues
This commit is contained in:
parent
0c6a8dd1e7
commit
88c7f68a93
3
app.py
3
app.py
@ -5,7 +5,7 @@ import sqlite3
|
|||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
import sys
|
||||||
|
|
||||||
from flask import (
|
from flask import (
|
||||||
Flask,
|
Flask,
|
||||||
@ -250,6 +250,7 @@ GPT4All:Welcome! I'm here to assist you with anything you need. What can I do fo
|
|||||||
|
|
||||||
def new_text_callback(self, text: str):
|
def new_text_callback(self, text: str):
|
||||||
print(text, end="")
|
print(text, end="")
|
||||||
|
sys.stdout.flush()
|
||||||
self.full_text += text
|
self.full_text += text
|
||||||
if self.is_bot_text_started:
|
if self.is_bot_text_started:
|
||||||
self.bot_says += text
|
self.bot_says += text
|
||||||
|
@ -145,7 +145,7 @@ if not exist models/gpt4all-lora-quantized-ggml.bin (
|
|||||||
|
|
||||||
:DOWNLOAD_WITH_BROWSER
|
:DOWNLOAD_WITH_BROWSER
|
||||||
start https://the-eye.eu/public/AI/models/nomic-ai/gpt4all/gpt4all-lora-quantized-ggml.bin
|
start https://the-eye.eu/public/AI/models/nomic-ai/gpt4all/gpt4all-lora-quantized-ggml.bin
|
||||||
echo Link has been opened with the default web browser, make sure to save it into the models folder. Press any key to continue.
|
echo Link has been opened with the default web browser, make sure to save it into the models folder. When it finishes the download, press any key to continue.
|
||||||
pause
|
pause
|
||||||
goto :CONTINUE
|
goto :CONTINUE
|
||||||
|
|
||||||
@ -171,6 +171,13 @@ goto :CONTINUE
|
|||||||
:CONTINUE
|
:CONTINUE
|
||||||
echo.
|
echo.
|
||||||
|
|
||||||
|
echo Converting the model to the new format
|
||||||
|
if not exist tmp/llama.cpp git clone https://github.com/ggerganov/llama.cpp.git tmp\llama.cpp
|
||||||
|
move models\gpt4all-lora-quantized-ggml.bin models\gpt4all-lora-quantized-ggml.bin.original
|
||||||
|
python tmp\llama.cpp\migrate-ggml-2023-03-30-pr613.py models\gpt4all-lora-quantized-ggml.bin.original models\gpt4all-lora-quantized-ggml.bin
|
||||||
|
echo The model file (gpt4all-lora-quantized-ggml.bin) has been fixed.
|
||||||
|
|
||||||
|
|
||||||
echo Cleaning tmp folder
|
echo Cleaning tmp folder
|
||||||
rd /s /q "./tmp"
|
rd /s /q "./tmp"
|
||||||
|
|
||||||
|
24
run.bat
24
run.bat
@ -38,32 +38,8 @@ echo HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
|||||||
REM Activate the virtual environment
|
REM Activate the virtual environment
|
||||||
call env\Scripts\activate.bat
|
call env\Scripts\activate.bat
|
||||||
|
|
||||||
:RESTART
|
|
||||||
REM Run the Python app
|
REM Run the Python app
|
||||||
python app.py %*
|
python app.py %*
|
||||||
set app_result=%errorlevel%
|
set app_result=%errorlevel%
|
||||||
|
|
||||||
REM Ask if user wants the model fixed
|
|
||||||
IF %app_result% EQU 0 (
|
|
||||||
goto END
|
|
||||||
) ELSE (
|
|
||||||
echo.
|
|
||||||
choice /C YN /M "The model file (gpt4all-lora-quantized-ggml.bin) appears to be invalid. Do you want to fix it?"
|
|
||||||
if errorlevel 2 goto END
|
|
||||||
if errorlevel 1 goto MODEL_FIX
|
|
||||||
)
|
|
||||||
|
|
||||||
REM Git Clone, Renames the bad model and fixes it using the same original name
|
|
||||||
:MODEL_FIX
|
|
||||||
if not exist llama.cpp git clone https://github.com/ggerganov/llama.cpp.git
|
|
||||||
move models\gpt4all-lora-quantized-ggml.bin models\gpt4all-lora-quantized-ggml.bin.original
|
|
||||||
python llama.cpp\migrate-ggml-2023-03-30-pr613.py models\gpt4all-lora-quantized-ggml.bin.original models\gpt4all-lora-quantized-ggml.bin
|
|
||||||
echo The model file (gpt4all-lora-quantized-ggml.bin) has been fixed. Press any key to restart...
|
|
||||||
pause >nul
|
|
||||||
goto RESTART
|
|
||||||
|
|
||||||
:END
|
|
||||||
REM Wait for user input before exiting
|
|
||||||
echo.
|
|
||||||
echo Press any key to exit...
|
|
||||||
pause >nul
|
pause >nul
|
||||||
|
Loading…
x
Reference in New Issue
Block a user