This commit is contained in:
Saifeddine ALOUI 2023-08-31 22:52:34 +02:00
parent 26ef478582
commit bffd5735c1
8 changed files with 101 additions and 63 deletions

13
app.py
View File

@ -360,6 +360,10 @@ class LoLLMsWebUI(LoLLMsAPPI):
"/extensions", "extensions", self.extensions, methods=["GET"]
)
self.add_endpoint(
"/upgrade_to_gpu", "upgrade_to_gpu", self.upgrade_to_gpu, methods=["GET"]
)
self.add_endpoint(
"/training", "training", self.training, methods=["GET"]
)
@ -715,6 +719,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
data = request.get_json()
setting_name = data['setting_name']
ASCIIColors.info(f"Requested updating of setting {data['setting_name']} to {data['setting_value']}")
if setting_name== "temperature":
self.config["temperature"]=float(data['setting_value'])
elif setting_name== "n_predict":
@ -841,6 +846,14 @@ class LoLLMsWebUI(LoLLMsAPPI):
return jsonify({"status":False,"error":str(ex)})
def upgrade_to_gpu(self):
res = subprocess.check_call(["conda", "install", "-c", "nvidia/label/cuda-11.7.0", "-c", "nvidia", "-c", "conda-forge", "cuda-toolkit", "ninja", "git", "--force-reinstall"])
if res!=0:
return jsonify({'status':False, "error": "Couldn't install cuda toolkit. Make sure you are running from conda environment"})
res = subprocess.check_call(["pip","install","--upgrade","torch==2.0.1+cu117", "torchvision", "torchaudio", "--index-url", "https://download.pytorch.org/whl/cu117"])
self.config.enable_gpu=True
return jsonify({'status':res==0})
def ram_usage(self):
"""
Returns the RAM usage in bytes.

BIN
assets/conda_session.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 113 KiB

BIN
assets/update.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

File diff suppressed because one or more lines are too long

8
web/dist/assets/index-5e1ef413.css vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-04bf888d.js"></script>
<link rel="stylesheet" href="/assets/index-5d272872.css">
<script type="module" crossorigin src="/assets/index-c865d936.js"></script>
<link rel="stylesheet" href="/assets/index-5e1ef413.css">
</head>
<body>
<div id="app"></div>

View File

@ -607,15 +607,20 @@
<td style="min-width: 200px;">
<label for="enable_gpu" class="text-sm font-bold" style="margin-right: 1rem;">Enable GPU:</label>
</td>
<td>
<td class="text-center items-center">
<div class="flex flex-row">
<input
type="checkbox"
id="enable_gpu"
required
v-model="configFile.enable_gpu"
@change="settingsChanged=true"
class="w-full mt-1 px-2 py-1 border border-gray-300 rounded dark:bg-gray-600"
class="m-2 h-50 w-50 py-1 border border-gray-300 rounded dark:bg-gray-600 "
>
<button v-if="!configFile.enable_gpu" @click.prevent="upgrade2GPU" class="w-100 text-center rounded m-2 bg-blue-300 hover:bg-blue-200 text-l hover:text-primary p-2 m-2 text-left flex flex-row ">
Upgrade from CPU to GPU
</button>
</div>
</td>
</tr>
<tr>
@ -2521,8 +2526,28 @@ export default {
return { 'status': false }
});
},
upgrade2GPU(){
this.isLoading = true
try{
axios.get('/upgrade_to_gpu').then(res => {
this.isLoading = false
if (res) {
if(res.status){
this.$refs.toast.showToast("Upgraded to GPU", 4, true)
this.configFile.enable_gpu=True
}
else{
this.$refs.toast.showToast("Could not upgrade to GPU. Endpoint error: " + res.error, 4, false)
}
}
})
}
catch (error) {
this.isLoading = false
this.$refs.toast.showToast("Could not open binding settings. Endpoint error: " + error.message, 4, false)
}
},
onSettingsBinding(bindingEntry) {
try {
this.isLoading = true
axios.get('/get_active_binding_settings').then(res => {