This commit is contained in:
Saifeddine ALOUI 2023-08-26 18:01:09 +02:00
parent ea87ea7aea
commit afe72e3898
9 changed files with 72 additions and 46 deletions

View File

@ -1021,8 +1021,8 @@ class LoLLMsAPPI(LollmsApplication):
"content":discussion_messages
}, self.model.tokenize, self.model.detokenize, self.config.ctx_size, place_holders_to_sacrifice=["content"])
tokens = self.model.tokenize(discussion_messages)
if self.config["debug"]:
tokens = self.model.tokenize(discussion_messages)
ASCIIColors.yellow(discussion_messages)
ASCIIColors.info(f"prompt size:{len(tokens)} tokens")

View File

@ -39,8 +39,9 @@ echo " \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ "
echo " By ParisNeo"
echo "Please specify if you want to use a GPU or CPU. Note thaty only NVidea GPUs are supported?"
echo "A) Enable GPU"
echo "B) Run CPU mode"
echo "A) Enable Cuda (for nvidia GPUS)"
echo "B) Enable ROCm (for AMD GPUs)"
echo "C) Run CPU mode"
echo
read -rp "Input> " gpuchoice
gpuchoice="${gpuchoice:0:1}"
@ -49,6 +50,9 @@ if [[ "${gpuchoice^^}" == "A" ]]; then
PACKAGES_TO_INSTALL="python=3.10 cuda-toolkit ninja git gcc"
CHANNEL="-c nvidia/label/cuda-11.7.0 -c nvidia -c conda-forge"
elif [[ "${gpuchoice^^}" == "B" ]]; then
PACKAGES_TO_INSTALL="python=3.10 rocm-comgr rocm-smi ninja git gcc"
CHANNEL=" -c conda-forge"
elif [[ "${gpuchoice^^}" == "C" ]]; then
PACKAGES_TO_INSTALL="python=3.10 ninja git gcc"
CHANNEL="-c conda-forge"
else
@ -96,6 +100,8 @@ if [ ! -d "$INSTALL_ENV_DIR" ]; then
if [[ "${gpuchoice^^}" == "A" ]]; then
conda run --live-stream -p "$INSTALL_ENV_DIR" python -m pip install torch==2.0.1+cu117 torchvision torchaudio --index-url https://download.pytorch.org/whl/cu117 || ( echo && echo "Pytorch installation failed." && exit 1 )
elif [[ "${gpuchoice^^}" == "B" ]]; then
conda run --live-stream -p "$INSTALL_ENV_DIR" python -m pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm5.4.2 || ( echo && echo "Pytorch installation failed." && exit 1 )
elif [[ "${gpuchoice^^}" == "C" ]]; then
conda run --live-stream -p "$INSTALL_ENV_DIR" python -m pip install torch torchvision torchaudio || ( echo && echo "Pytorch installation failed." && exit 1 )
fi
fi
@ -143,10 +149,11 @@ else
cp linux_update.sh ../
fi
if [[ "${gpuchoice^^}" == "B" ]]; then
if [[ "${gpuchoice^^}" == "C" ]]; then
echo "This is a .no_gpu file." > .no_gpu
echo "You have chosen to use only CPU on this system."
else
echo "GPU is enabled, no .no_gpu file will be created."
echo "You have chosen to use GPU on this system."
fi
PrintBigMessage() {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-d19f79bc.js"></script>
<link rel="stylesheet" href="/assets/index-59f224ec.css">
<script type="module" crossorigin src="/assets/index-1bed7c67.js"></script>
<link rel="stylesheet" href="/assets/index-9d30d2d7.css">
</head>
<body>
<div id="app"></div>

View File

@ -122,7 +122,7 @@
</div>
<!-- CHAT BOX -->
<div class="flex flex-row flex-grow items-center gap-2 overflow-visible">
<select v-model="selectedModel" @change="setModel" class="bg-white dark:bg-black m-0 border-2 rounded-md shadow-sm w-3">
<select v-model="selectedModel" @change="setModel" class="bg-white dark:bg-black m-0 border-2 rounded-md shadow-sm w-0">
<option v-for="model in models" :key="model" :value="model">
{{ model }}
</option>

View File

@ -77,20 +77,23 @@ handleClickOutside(event) {
})
},
executeCommand(command) {
if (typeof this[command.value] === 'function') {
this[command.value]();
}
this.isMenuOpen = false;
console.log("Selected")
console.log(command.value)
if (typeof command.value === 'function') {
console.log("Command detected")
command.value();
}
if (this.execute_cmd) {
console.log("executing generic command")
this.execute_cmd(command); // Call the execute_cmd property with the current command
}
},
positionMenu() {
var isMenuAboveButton;
if (this.$refs.menuButton!=undefined){
console.log(this.force_position)
if(this.force_position==0 || this.force_position==undefined){
console.log("auto position")
const buttonRect = this.$refs.menuButton.getBoundingClientRect();
//const menuRect = this.$refs.menu.getBoundingClientRect();
@ -99,11 +102,9 @@ handleClickOutside(event) {
}
else if (this.force_position==1){
console.log("Menu above button")
isMenuAboveButton=true;
}
else{
console.log("Menu below button")
isMenuAboveButton=false;
}
this.menuPosition.top = isMenuAboveButton ? 'auto' : 'calc(100% + 10px)';

View File

@ -1,7 +1,7 @@
<template>
<div
class=" min-w-96 items-start p-4 hover:bg-primary-light rounded-lg mb-2 shadow-lg border-2 cursor-pointer select-none"
tabindex="-1" :class="selected_computed ? 'border-primary-light' : 'border-transparent'"
tabindex="-1" :class="selected_computed ? 'border-2 border-primary-light' : 'border-transparent', isMounted ? 'bg-blue-200 dark:bg-blue-700':''"
:title="!personality.installed ? 'Not installed' : ''">
<div :class="!personality.installed ? 'opacity-50' : ''">
@ -13,19 +13,19 @@
<h3 class="font-bold font-large text-lg line-clamp-3">
{{ personality.name }}
</h3>
<button type="button" title="Talk"
<button v-if="isMounted" type="button" title="Select"
@click="toggleSelected"
class="hover:text-secondary duration-75 active:scale-90 font-medium rounded-lg text-sm p-2 text-center inline-flex items-center " @click.stop="">
<i data-feather="check" class="w-5"></i>
<span class="sr-only">Select</span>
</button>
<button type="button" title="Talk"
<button v-if="isMounted" type="button" title="Talk"
@click="toggleTalk"
class="hover:text-secondary duration-75 active:scale-90 font-medium rounded-lg text-sm p-2 text-center inline-flex items-center " @click.stop="">
<i data-feather="send" class="w-5"></i>
<span class="sr-only">Talk</span>
</button>
<InteractiveMenu :commands="commandsList" :force_position=2>
<InteractiveMenu :commands="commandsList" :force_position=2 title="Menu">
</InteractiveMenu>
@ -147,6 +147,7 @@ export default {
this.onRemount(this)
},
toggleMounted() {
console.log("Mounting/unmounting")
this.onMounted(this)
},
toggleSettings() {

View File

@ -19,6 +19,8 @@ set SPCHARMESSAGE=
pause
cls
md
echo " ___ ___ ___ ___ ___ ___ "
echo " /\__\ /\ \ /\__\ /\__\ /\__\ /\ \ "
echo " /:/ / /::\ \ /:/ / /:/ / /::| | /::\ \ "
@ -30,11 +32,14 @@ echo " \:\ \ \:\ /:/ / \:\ \ \:\ \ /:/ / \:\ \:\__\ "
echo " \:\ \ \:\/:/ / \:\ \ \:\ \ /:/ / \:\/:/ / "
echo " \:\__\ \::/ / \:\__\ \:\__\ /:/ / \::/ / "
echo " \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ "
echo " By ParisNeo"
echo
echo By ParisNeo
:retry
echo Please specify if you want to use a GPU or CPU. Note thaty only NVidea GPUs are supported?
echo A) Enable GPU
echo B) Run CPU mode
echo A) Enable cuda GPU
echo B) Enable ROCm compatible GPU (AMD and other GPUs)
echo C) Run CPU mode
set /p "gpuchoice=Input> "
set gpuchoice=%gpuchoice:~0,1%
@ -42,11 +47,14 @@ if /I "%gpuchoice%" == "A" (
set "PACKAGES_TO_INSTALL=python=3.10 cuda-toolkit ninja git"
set "CHANNEL=-c nvidia/label/cuda-11.7.0 -c nvidia -c conda-forge"
) else if /I "%gpuchoice%" == "B" (
set "PACKAGES_TO_INSTALL=python=3.10 rocm-comgr rocm-smi ninja git"
set "CHANNEL=-c conda-forge"
) else if /I "%gpuchoice%" == "C" (
set "PACKAGES_TO_INSTALL=python=3.10 m2w64-toolchain ninja git"
set "CHANNEL=-c conda-forge"
) else (
echo Invalid choice. Exiting...
exit
echo Invalid choice. Retry
goto retry
)
@rem better isolation for virtual environment
@ -82,7 +90,8 @@ if not exist "%INSTALL_ENV_DIR%" (
echo Packages to install: %PACKAGES_TO_INSTALL%
call conda create --no-shortcuts -y -k -p "%INSTALL_ENV_DIR%" %CHANNEL% %PACKAGES_TO_INSTALL% || ( echo. && echo Conda environment creation failed. && goto end )
if /I "%gpuchoice%" == "A" call conda run --live-stream -p "%INSTALL_ENV_DIR%" python -m pip install torch==2.0.1+cu117 torchvision torchaudio --index-url https://download.pytorch.org/whl/cu117|| ( echo. && echo Pytorch installation failed.&& goto end )
if /I "%gpuchoice%" == "B" call conda run --live-stream -p "%INSTALL_ENV_DIR%" python -m pip install torch torchvision torchaudio|| ( echo. && echo Pytorch installation failed.&& goto end )
if /I "%gpuchoice%" == "B" call conda run --live-stream -p "%INSTALL_ENV_DIR%" python -m pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm5.4.2|| ( echo. && echo Pytorch installation failed.&& goto end )
if /I "%gpuchoice%" == "C" call conda run --live-stream -p "%INSTALL_ENV_DIR%" python -m pip install torch torchvision torchaudio|| ( echo. && echo Pytorch installation failed.&& goto end )
)
@rem check if conda environment was actually created
@ -125,7 +134,7 @@ if exist ..\win_update.bat (
setlocal enabledelayedexpansion
if /I "%gpuchoice%"=="B" (
if /I "%gpuchoice%"=="C" (
echo This is a .no_gpu file. > .no_gpu
) else (
echo GPU is enabled, no .no_gpu file will be created.
@ -144,4 +153,12 @@ echo. && echo.
exit /b
:end
echo Creating bin folder (needed for ctransformers)
IF EXIST "installer_files\lollms_env\bin" (
echo Folder already existing
) ELSE (
MKDIR "installer_files\lollms_env\bin"
echo Folder created successfully!
)
pause