mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2025-02-20 17:22:47 +00:00
enhanced
This commit is contained in:
parent
86f7c651ce
commit
857a97be1c
@ -404,9 +404,9 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
self.current_discussion = self.db.load_last_discussion()
|
||||
|
||||
message = data["prompt"]
|
||||
ump = "!@>"+self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
|
||||
ump = self.config.discussion_prompt_separator +self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
|
||||
message_id = self.current_discussion.add_message(
|
||||
ump.replace("!@>","").replace(":",""),
|
||||
ump.replace(self.config.discussion_prompt_separator,"").replace(":",""),
|
||||
message,
|
||||
message_type=MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
parent=self.message_id
|
||||
@ -650,15 +650,15 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
for i, message in enumerate(messages):
|
||||
if message["id"]< message_id or (message_id==-1 and i<len(messages)-1):
|
||||
if message["type"]<=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message["type"]!=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value:
|
||||
self.full_message_list.append("\n!@>"+message["sender"]+": "+message["content"].strip())
|
||||
self.full_message_list.append("\n"+self.config.discussion_prompt_separator+message["sender"]+": "+message["content"].strip())
|
||||
else:
|
||||
break
|
||||
|
||||
link_text = self.personality.link_text
|
||||
if not is_continue:
|
||||
self.full_message_list.append("\n!@>"+message["sender"].replace(":","")+": "+message["content"].strip()+self.personality.link_text+self.personality.ai_message_prefix)
|
||||
self.full_message_list.append("\n"+self.config.discussion_prompt_separator +message["sender"].replace(":","")+": "+message["content"].strip()+self.personality.link_text+self.personality.ai_message_prefix)
|
||||
else:
|
||||
self.full_message_list.append("\n!@>"+message["sender"].replace(":","")+": "+message["content"].strip())
|
||||
self.full_message_list.append("\n"+self.config.discussion_prompt_separator +message["sender"].replace(":","")+": "+message["content"].strip())
|
||||
|
||||
|
||||
composed_messages = link_text.join(self.full_message_list)
|
||||
@ -682,7 +682,7 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
def get_discussion_to(self, message_id=-1):
|
||||
messages = self.current_discussion.get_messages()
|
||||
self.full_message_list = []
|
||||
ump = "!@>"+self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
|
||||
ump = self.config.discussion_prompt_separator +self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
|
||||
|
||||
for message in messages:
|
||||
if message["id"]<= message_id or message_id==-1:
|
||||
@ -744,18 +744,21 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
sys.stdout.flush()
|
||||
detected_anti_prompt = False
|
||||
anti_prompt_to_remove=""
|
||||
for prompt in self.personality.anti_prompts:
|
||||
if prompt.lower() in self.connections[client_id]["generated_text"].lower():
|
||||
for anti_prompt in self.personality.anti_prompts:
|
||||
if anti_prompt.lower() in self.connections[client_id]["generated_text"].lower():
|
||||
detected_anti_prompt=True
|
||||
anti_prompt_to_remove = prompt.lower()
|
||||
anti_prompt_to_remove = anti_prompt.lower()
|
||||
|
||||
if not detected_anti_prompt:
|
||||
self.socketio.emit('message', {
|
||||
'data': self.connections[client_id]["generated_text"],
|
||||
'data': chunk,# self.connections[client_id]["generated_text"],
|
||||
'user_message_id':self.current_user_message_id,
|
||||
'ai_message_id':self.current_ai_message_id,
|
||||
'discussion_id':self.current_discussion.discussion_id,
|
||||
'message_type': MSG_TYPE.MSG_TYPE_FULL.value
|
||||
'message_type': MSG_TYPE.MSG_TYPE_CHUNK.value if self.nb_received_tokens>1 else MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
"user_message_id": self.current_user_message_id,
|
||||
"ai_message_id": self.current_ai_message_id,
|
||||
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
|
||||
}, room=client_id
|
||||
)
|
||||
self.socketio.sleep(0.01)
|
||||
@ -769,6 +772,17 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
return False
|
||||
else:
|
||||
self.connections[client_id]["generated_text"] = self.remove_text_from_string(self.connections[client_id]["generated_text"], anti_prompt_to_remove)
|
||||
self.socketio.emit('message', {
|
||||
'data': self.connections[client_id]["generated_text"],
|
||||
'user_message_id':self.current_user_message_id,
|
||||
'ai_message_id':self.current_ai_message_id,
|
||||
'discussion_id':self.current_discussion.discussion_id,
|
||||
'message_type': MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
"user_message_id": self.current_user_message_id,
|
||||
"ai_message_id": self.current_ai_message_id,
|
||||
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
|
||||
}, room=client_id
|
||||
)
|
||||
ASCIIColors.warning("The model is halucinating")
|
||||
return False
|
||||
|
||||
@ -782,7 +796,8 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
'user_message_id':self.current_user_message_id,
|
||||
'ai_message_id':self.current_ai_message_id,
|
||||
'discussion_id':self.current_discussion.discussion_id,
|
||||
'message_type': message_type.value
|
||||
'message_type': message_type.value,
|
||||
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
|
||||
}, room=client_id
|
||||
)
|
||||
self.socketio.sleep(0.01)
|
||||
@ -794,7 +809,8 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
'user_message_id':self.current_user_message_id,
|
||||
'ai_message_id':self.current_ai_message_id,
|
||||
'discussion_id':self.current_discussion.discussion_id,
|
||||
'message_type': message_type.value
|
||||
'message_type': message_type.value,
|
||||
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
|
||||
}, room=client_id
|
||||
)
|
||||
self.socketio.sleep(0.01)
|
||||
@ -873,7 +889,7 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
else:
|
||||
self.connections[client_id]["generated_text"] = ""
|
||||
self.current_ai_message_id = self.current_discussion.add_message(
|
||||
self.personality.name,
|
||||
self.personality.name,
|
||||
"",
|
||||
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
parent = self.current_user_message_id,
|
||||
@ -890,6 +906,7 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
"message":message,#markdown.markdown(message),
|
||||
"user_message_id": self.current_user_message_id,
|
||||
"ai_message_id": self.current_ai_message_id,
|
||||
"content":"✍ please stand by ...",
|
||||
|
||||
'binding': self.current_discussion.current_message_binding,
|
||||
'model': self.current_discussion.current_message_model,
|
||||
@ -910,7 +927,6 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
n_predict = self.config.ctx_size-len(tokens)-1,
|
||||
client_id=client_id,
|
||||
callback=partial(self.process_chunk,client_id = client_id)
|
||||
|
||||
)
|
||||
print()
|
||||
print("## Done Generation ##")
|
||||
@ -942,20 +958,21 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
|
||||
}, room=client_id
|
||||
)
|
||||
|
||||
self.socketio.sleep(0.01)
|
||||
|
||||
ASCIIColors.success(f" ╔══════════════════════════════════════════════════╗ ")
|
||||
ASCIIColors.success(f" ║ Done ║ ")
|
||||
ASCIIColors.success(f" ╚══════════════════════════════════════════════════╝ ")
|
||||
else:
|
||||
ump = "!@>"+self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
|
||||
|
||||
ump = self.config.discussion_prompt_separator +self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
|
||||
|
||||
self.cancel_gen = False
|
||||
#No discussion available
|
||||
ASCIIColors.warning("No discussion selected!!!")
|
||||
self.socketio.emit('message', {
|
||||
'data': "No discussion selected!!!",
|
||||
'user_message_id':ump.replace("!@>","").replace(":",""),
|
||||
'user_message_id':ump.replace(self.config.discussion_prompt_separator,"").replace(":",""),
|
||||
'ai_message_id':self.current_ai_message_id,
|
||||
'discussion_id':0,
|
||||
'message_type': MSG_TYPE.MSG_TYPE_EXCEPTION.value
|
||||
|
21
app.py
21
app.py
@ -142,6 +142,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
|
||||
|
||||
self.add_endpoint("/send_file", "send_file", self.send_file, methods=["POST"])
|
||||
self.add_endpoint("/upload_model", "upload_model", self.upload_model, methods=["POST"])
|
||||
self.add_endpoint("/upload_avatar", "upload_avatar", self.upload_avatar, methods=["POST"])
|
||||
|
||||
|
||||
self.add_endpoint("/list_mounted_personalities", "list_mounted_personalities", self.list_mounted_personalities, methods=["POST"])
|
||||
@ -692,13 +693,19 @@ class LoLLMsWebUI(LoLLMsAPPI):
|
||||
return jsonify(personalities_languages)
|
||||
|
||||
def list_personalities_categories(self):
|
||||
personalities_categories_dir = self.lollms_paths.personalities_zoo_path/f'{self.personality_language}' # replace with the actual path to the models folder
|
||||
language = request.args.get('language')
|
||||
personalities_categories_dir = self.lollms_paths.personalities_zoo_path/f'{language}' # replace with the actual path to the models folder
|
||||
personalities_categories = [f.stem for f in personalities_categories_dir.iterdir() if f.is_dir() and not f.name.startswith(".")]
|
||||
return jsonify(personalities_categories)
|
||||
|
||||
def list_personalities(self):
|
||||
language = request.args.get('language')
|
||||
category = request.args.get('category')
|
||||
if not category:
|
||||
return jsonify([])
|
||||
|
||||
try:
|
||||
personalities_dir = self.lollms_paths.personalities_zoo_path/f'{self.personality_language}/{self.personality_category}' # replace with the actual path to the models folder
|
||||
personalities_dir = self.lollms_paths.personalities_zoo_path/f'{language}/{category}' # replace with the actual path to the models folder
|
||||
personalities = [f.stem for f in personalities_dir.iterdir() if f.is_dir() and not f.name.startswith(".")]
|
||||
except Exception as ex:
|
||||
personalities=[]
|
||||
@ -1302,6 +1309,13 @@ class LoLLMsWebUI(LoLLMsAPPI):
|
||||
file.save(self.lollms_paths.personal_models_path/self.config.binding_name/file.filename)
|
||||
return jsonify({"status": True})
|
||||
|
||||
def upload_avatar(self):
|
||||
file = request.files['avatar']
|
||||
file.save(self.lollms_paths.personal_user_infos_path/file.filename)
|
||||
return jsonify({"status": True})
|
||||
|
||||
|
||||
|
||||
def rename(self):
|
||||
data = request.get_json()
|
||||
title = data["title"]
|
||||
@ -1557,7 +1571,10 @@ if __name__ == "__main__":
|
||||
lollms_paths = LollmsPaths.find_paths(force_local=True, custom_default_cfg_path="configs/config.yaml")
|
||||
db_folder = lollms_paths.personal_path/"databases"
|
||||
db_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Parsong parameters
|
||||
parser = argparse.ArgumentParser(description="Start the chatbot Flask app.")
|
||||
|
||||
parser.add_argument(
|
||||
"-c", "--config", type=str, default="local_config", help="Sets the configuration file to be used."
|
||||
)
|
||||
|
@ -1,13 +1,17 @@
|
||||
# =================== Lord Of Large Language Models Configuration file ===========================
|
||||
version: 9
|
||||
version: 11
|
||||
binding_name: null
|
||||
model_name: null
|
||||
|
||||
# Enables gpu usage
|
||||
enable_gpu: true
|
||||
|
||||
# Host information
|
||||
host: localhost
|
||||
port: 9600
|
||||
|
||||
# Genreration parameters
|
||||
discussion_prompt_separator: "!@>"
|
||||
seed: -1
|
||||
n_predict: 1024
|
||||
ctx_size: 2048
|
||||
|
8
web/dist/assets/index-1d739553.css
vendored
Normal file
8
web/dist/assets/index-1d739553.css
vendored
Normal file
File diff suppressed because one or more lines are too long
8
web/dist/assets/index-bec8b3bc.css
vendored
8
web/dist/assets/index-bec8b3bc.css
vendored
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
4
web/dist/index.html
vendored
4
web/dist/index.html
vendored
@ -6,8 +6,8 @@
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>LoLLMS WebUI - Welcome</title>
|
||||
<script type="module" crossorigin src="/assets/index-5eee0659.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index-bec8b3bc.css">
|
||||
<script type="module" crossorigin src="/assets/index-cefbd10a.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index-1d739553.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
@ -237,17 +237,8 @@ export default {
|
||||
this.$emit('continueMessage', this.message.id, this.new_message_content)
|
||||
},
|
||||
getImgUrl() {
|
||||
|
||||
if (this.message.sender == "user") {
|
||||
if (this.avatar) {
|
||||
|
||||
return this.avatar
|
||||
}
|
||||
|
||||
return userImgPlaceholder;
|
||||
|
||||
}
|
||||
if (this.avatar) {
|
||||
console.log("Avatar",this.avatar)
|
||||
return bUrl + this.avatar
|
||||
}
|
||||
return botImgPlaceholder;
|
||||
|
@ -347,7 +347,7 @@ export default {
|
||||
|
||||
} catch (error) {
|
||||
console.log(error.message, 'getFileSize')
|
||||
this.linkNotValid = true
|
||||
//this.linkNotValid = true
|
||||
return 'Could not be determined'
|
||||
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
<template>
|
||||
<div class="step flex items-center mb-4">
|
||||
<div class="flex items-center justify-center w-6 h-6 rounded border border-gray-300 mr-2">
|
||||
<div class="flex items-center justify-center w-6 h-6 mr-2">
|
||||
<div v-if="!done">
|
||||
<i
|
||||
data-feather="square"
|
||||
@ -14,8 +14,18 @@
|
||||
></i>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="!done" role="status">
|
||||
<svg aria-hidden="true" class="w-6 h-6 animate-spin fill-secondary" viewBox="0 0 100 101"
|
||||
fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M100 50.5908C100 78.2051 77.6142 100.591 50 100.591C22.3858 100.591 0 78.2051 0 50.5908C0 22.9766 22.3858 0.59082 50 0.59082C77.6142 0.59082 100 22.9766 100 50.5908ZM9.08144 50.5908C9.08144 73.1895 27.4013 91.5094 50 91.5094C72.5987 91.5094 90.9186 73.1895 90.9186 50.5908C90.9186 27.9921 72.5987 9.67226 50 9.67226C27.4013 9.67226 9.08144 27.9921 9.08144 50.5908Z"
|
||||
fill="currentColor" />
|
||||
<path
|
||||
d="M93.9676 39.0409C96.393 38.4038 97.8624 35.9116 97.0079 33.5539C95.2932 28.8227 92.871 24.3692 89.8167 20.348C85.8452 15.1192 80.8826 10.7238 75.2124 7.41289C69.5422 4.10194 63.2754 1.94025 56.7698 1.05124C51.7666 0.367541 46.6976 0.446843 41.7345 1.27873C39.2613 1.69328 37.813 4.19778 38.4501 6.62326C39.0873 9.04874 41.5694 10.4717 44.0505 10.1071C47.8511 9.54855 51.7191 9.52689 55.5402 10.0491C60.8642 10.7766 65.9928 12.5457 70.6331 15.2552C75.2735 17.9648 79.3347 21.5619 82.5849 25.841C84.9175 28.9121 86.7997 32.2913 88.1811 35.8758C89.083 38.2158 91.5421 39.6781 93.9676 39.0409Z"
|
||||
fill="currentFill" />
|
||||
</svg>
|
||||
</div>
|
||||
<div class="content flex-1" :class="{'text-green-500': done, 'text-yellow-500': !done}">{{ message }}</div>
|
||||
<div class="loader w-6 h-6 border-t-4 border-b-4 border-blue-500 rounded-full animate-spin ml-2" v-if="!done"></div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
@ -438,8 +438,8 @@ export default {
|
||||
if (res) {
|
||||
// Filter out the user and bot entries
|
||||
this.discussionArr = res.data.filter((item) =>
|
||||
item.type == this.msgTypes.MSG_TYPE_CHUNK ||
|
||||
item.type == this.msgTypes.MSG_TYPE_FULL
|
||||
item.type == this.msgTypes.MSG_TYPE_FULL ||
|
||||
item.type == this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_AI
|
||||
)
|
||||
console.log("this.discussionArr")
|
||||
console.log(this.discussionArr)
|
||||
@ -808,14 +808,19 @@ export default {
|
||||
// Create response message
|
||||
|
||||
let responseMessage = {
|
||||
content: "✍ please stand by ...",//msgObj.message,
|
||||
content:msgObj.data, //content: "✍ please stand by ...",//msgObj.message,
|
||||
created_at:msgObj.created_at,
|
||||
binding:msgObj.binding,
|
||||
model:msgObj.model,
|
||||
id: msgObj.ai_message_id,
|
||||
parent: msgObj.user_message_id,
|
||||
personality:msgObj.personality,
|
||||
rank: 0,
|
||||
sender: msgObj.bot,
|
||||
created_at: msgObj.created_at,
|
||||
type:msgObj.type,
|
||||
steps: []
|
||||
//type: msgObj.type
|
||||
|
||||
}
|
||||
this.discussionArr.push(responseMessage)
|
||||
// nextTick(() => {
|
||||
@ -916,12 +921,17 @@ export default {
|
||||
this.isGenerating = true;
|
||||
const index = this.discussionArr.findIndex((x) => x.parent == parent && x.id == msgObj.ai_message_id)
|
||||
const messageItem = this.discussionArr[index]
|
||||
if (messageItem && msgObj.message_type<this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_USER) {
|
||||
if (
|
||||
messageItem && msgObj.message_type==this.msgTypes.MSG_TYPE_FULL ||
|
||||
messageItem && msgObj.message_type==this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_AI
|
||||
) {
|
||||
messageItem.content = msgObj.data
|
||||
messageItem.finished_generating_at = msgObj.finished_generating_at
|
||||
}
|
||||
else if (msgObj.message_type == this.msgTypes.MSG_TYPE_STEP_START){
|
||||
else if(messageItem && msgObj.message_type==this.msgTypes.MSG_TYPE_CHUNK){
|
||||
messageItem.content += msgObj.data
|
||||
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_STEP_START){
|
||||
messageItem.steps.push({"message":msgObj.data,"done":false})
|
||||
|
||||
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_STEP_END) {
|
||||
// Find the step with the matching message and update its 'done' property to true
|
||||
const matchingStep = messageItem.steps.find(step => step.message === msgObj.data);
|
||||
|
@ -610,7 +610,10 @@
|
||||
<label for="user_name" class="text-sm font-bold" style="margin-right: 1rem;">User avatar:</label>
|
||||
</td>
|
||||
<td style="width: 100%;">
|
||||
<img :src="user_avatar">
|
||||
<label for="avatar-upload">
|
||||
<img :src="user_avatar" class="w-50 h-50" style="max-width: 50px; max-height: 50px; cursor: pointer;">
|
||||
</label>
|
||||
<input type="file" id="avatar-upload" style="display: none" @change="uploadAvatar">
|
||||
</td>
|
||||
<td style="min-width: 300px;">
|
||||
<button
|
||||
@ -1546,7 +1549,27 @@ export default {
|
||||
socket.on('loading_text',this.on_loading_text);
|
||||
//await socket.on('install_progress', this.progressListener);
|
||||
|
||||
}, methods: {
|
||||
},
|
||||
methods: {
|
||||
uploadAvatar(event){
|
||||
console.log("here")
|
||||
const file = event.target.files[0]; // Get the selected file
|
||||
const formData = new FormData(); // Create a FormData object
|
||||
formData.append('avatar', file); // Add the file to the form data with the key 'avatar'
|
||||
console.log("Uploading avatar")
|
||||
// Make an API request to upload the avatar
|
||||
axios.post('/upload_avatar', formData)
|
||||
.then(response => {
|
||||
console.log("Avatar uploaded successfully")
|
||||
// Assuming the server responds with the file name after successful upload
|
||||
const fileName = response.data.fileName;
|
||||
this.user_avatar = fileName; // Update the user_avatar value with the file name
|
||||
this.update_setting("user_avatar",fileName).then(()=>{})
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error uploading avatar:', error);
|
||||
});
|
||||
},
|
||||
async update_software() {
|
||||
console.log("Posting")
|
||||
const res = await this.api_get_req('update_software')
|
||||
@ -1580,8 +1603,8 @@ export default {
|
||||
this.isModelSelected = true
|
||||
}
|
||||
this.persLangArr = await this.api_get_req("list_personalities_languages")
|
||||
this.persCatgArr = await this.api_get_req("list_personalities_categories")
|
||||
this.persArr = await this.api_get_req("list_personalities")
|
||||
this.persCatgArr = await this.api_get_req("list_personalities_categories?language="+this.configFile.personality_language)
|
||||
this.persArr = await this.api_get_req("list_personalities?language="+this.configFile.personality_language+"&category"+this.configFile.personality_category)
|
||||
this.langArr = await this.api_get_req("list_languages")
|
||||
|
||||
this.bindingsArr.sort((a, b) => a.name.localeCompare(b.name))
|
||||
@ -2265,12 +2288,16 @@ export default {
|
||||
// Refresh stuff
|
||||
refresh() {
|
||||
console.log("Refreshing")
|
||||
this.$store.dispatch('refreshConfig').then(() => {
|
||||
this.persCatgArr = this.api_get_req("list_personalities_categories?language="+this.configFile.personality_language).then(()=>{
|
||||
this.$store.dispatch('refreshConfig').then(() => {
|
||||
console.log(this.personality_language)
|
||||
console.log(this.personality_category)
|
||||
|
||||
this.personalitiesFiltered = this.personalities.filter((item) => item.category === this.personality_category && item.language === this.personality_language)
|
||||
this.personalitiesFiltered.sort()
|
||||
});
|
||||
})
|
||||
|
||||
|
||||
//this.fetchMainConfig();
|
||||
//this.fetchBindings();
|
||||
|
Loading…
x
Reference in New Issue
Block a user