Skip to content

Commit

Permalink
26 - added warning if download model is not an LLM
Browse files Browse the repository at this point in the history
Signed-off-by: julianbollig <[email protected]>
  • Loading branch information
julianbollig authored and marijnvg-tng committed Nov 21, 2024
1 parent ef9a0f7 commit f83a079
Show file tree
Hide file tree
Showing 7 changed files with 120 additions and 27 deletions.
18 changes: 13 additions & 5 deletions WebUI/src/App.vue
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,8 @@
<app-settings v-if="showSetting" @close="hideAppSettings" @show-download-model-confirm="showDownloadModelConfirm"></app-settings>
</div>
<download-dialog v-show="showDowloadDlg" ref="downloadDigCompt" @close="showDowloadDlg = false"></download-dialog>
<add-l-l-m-dialog v-show="showModelRequestDialog" ref="addLLMCompt" @close="showModelRequestDialog = false" @call-check-model="callCheckModel"></add-l-l-m-dialog>
<add-l-l-m-dialog v-show="showModelRequestDialog" ref="addLLMCompt" @close="showModelRequestDialog = false" @call-check-model="callCheckModel" @show-warning="showWarning"></add-l-l-m-dialog>
<warning-dialog v-show="showWarningDialog" ref="warningCompt" @close="showWarningDialog = false"></warning-dialog>
</main>
<footer class="flex-none px-4 flex justify-between items-center select-none" :class="{'bg-black bg-opacity-50': theme.active === 'lnl', 'bg-black bg-opacity-80': theme.active === 'bmg', 'border-t border-color-spilter': theme.active === 'dark'}">
<div>
Expand Down Expand Up @@ -117,6 +118,7 @@ import DownloadDialog from '@/components/DownloadDialog.vue';
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
import { useTheme } from "./assets/js/store/theme.ts";
import AddLLMDialog from "@/components/AddLLMDialog.vue";
import WarningDialog from "@/components/WarningDialog.vue";
const isOpen = ref(false);
Expand All @@ -137,10 +139,14 @@ const showDowloadDlg = ref(false);
const showModelRequestDialog = ref(false);
const showWarningDialog = ref(false);
const downloadDigCompt = ref<InstanceType<typeof DownloadDialog>>();
const addLLMCompt = ref<InstanceType<typeof AddLLMDialog>>();
const warningCompt = ref<InstanceType<typeof WarningDialog>>();
const fullscreen = ref(false);
const platformTitle = window.envVars.platformTitle;
Expand Down Expand Up @@ -169,7 +175,6 @@ onBeforeMount(async () => {
})
})
function showAppSettings() {
if (showSetting.value === false) {
showSetting.value = true;
Expand Down Expand Up @@ -227,20 +232,23 @@ function postImageToEnhance(imageUrl: string) {
function showDownloadModelConfirm(downList: DownloadModelParam[], success?: () => void, fail?: () => void) {
showDowloadDlg.value = true;
console.log(downList)
nextTick(() => {
downloadDigCompt.value!.showConfirm(downList, success, fail);
console.log(showDowloadDlg.value)
});
}
function showModelRequest(success?: () => void, fail?: () => void) {
function showModelRequest() {
showModelRequestDialog.value = true;
}
function callCheckModel(){
answer.value!.checkModel();
}
function showWarning(message : string, func : () => void) {
warningCompt.value!.warningMessage = message;
showWarningDialog.value = true;
warningCompt.value!.confirmFunction = func;
}
</script>
5 changes: 3 additions & 2 deletions WebUI/src/assets/i18n/en-US.json
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@
"DECREASE_FONT_SIZE": "Shrink Text",
"ANSWER_RAG_ENABLE":"Enable File Query",
"ANSWER_RAG_OPEN_DIALOG":"Open File Uploader",
"REQUEST_LLM_MODEL_NAME":"Add a model of your choice from huggingface.co,<br />e.g. meta-llama/Llama-3.2-11B-Vision-Instruct: ",
"REQUEST_LLM_MODEL_NAME":"Add a model of your choice from huggingface.co,<br />for example: <i>meta-llama/Llama-3.2-1B </i>",
"DOWNLOADER_CONFRIM_TIP":"You are missing one or more models needed to run. Would you like to download the model(s) listed below?",
"DOWNLOADER_MODEL":"Model",
"DOWNLOADER_INFO":"Info",
Expand Down Expand Up @@ -169,5 +169,6 @@
"ERROR_PYTHON_BACKEND_INIT": "Backend initialization failed",
"ERROR_PYTHON_BACKEND_INIT_DETAILS_TEXT": "The AI inference backend failed to initialize. Please try restarting the application. If the problem persists, you can check the Details for additional information about the error.",
"ERROR_PYTHON_BACKEND_INIT_DETAILS": "Details",
"ERROR_PYTHON_BACKEND_INIT_OPEN_LOG": "Open Log"
"ERROR_PYTHON_BACKEND_INIT_OPEN_LOG": "Open Log",
"WARNING_MODEL_TYPE_WRONG": "The model type doesn't seem to fit the requirements. Are you sure, you want to continue?"
}
2 changes: 1 addition & 1 deletion WebUI/src/assets/js/store/models.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { defineStore } from "pinia";

type ModelType = "llm" | "embedding" | "stableDiffusion" | "inpaint" | "lora" | "vae";
export type ModelType = "llm" | "embedding" | "stableDiffusion" | "inpaint" | "lora" | "vae" | "undefined";

export type Model = {
name: string;
Expand Down
58 changes: 39 additions & 19 deletions WebUI/src/components/AddLLMDialog.vue
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
<script setup lang="ts">
import { useGlobalSetup } from '@/assets/js/store/globalSetup';
import { useI18N } from '@/assets/js/store/i18n';
import { useModels, userModels } from '@/assets/js/store/models';
import { useModels, userModels , ModelType} from '@/assets/js/store/models';
const i18nState = useI18N().state;
const globalSetup = useGlobalSetup();
Expand All @@ -29,16 +30,15 @@ const addModelError = ref(false);
const animate = ref(false);
const emits = defineEmits<{
(e: "close"): void,
(e: "callCheckModel"): void
(e: "callCheckModel"): void,
(e: "showWarning", warning : string, func : () => void): void
}>();
onDeactivated(() => {
animate.value = false;
})
function fastGenerate(e: KeyboardEvent) {
// ToDo: Live-Check if model available
if (e.code == "Enter") {
if (e.ctrlKey || e.shiftKey || e.altKey) {
modelRequest.value += "\n";
Expand All @@ -51,28 +51,38 @@ function fastGenerate(e: KeyboardEvent) {
async function addModel() {
const previousModel = globalSetup.modelSettings.llm_model
const url_exists = await urlExists(modelRequest.value);
const is_in_models = models.models.some((model) => model.name === modelRequest.value)
if (url_exists && !is_in_models) {
userModels.push({ name: modelRequest.value, type: 'llm', downloaded: false })
await models.refreshModels()
console.log(models.models)
globalSetup.modelSettings.llm_model = modelRequest.value;
emits("callCheckModel");
closeAdd()
} else if (is_in_models) {
globalSetup.modelSettings.llm_model = previousModel
addModelErrorMessage.value = i18nState.ERROR_ALREADY_IN_MODELS
addModelError.value = true;
if (!is_in_models) {
const url_exists = await urlExists(modelRequest.value);
if (url_exists) {
addModelError.value = false
const is_llm = await isLLM(modelRequest.value);
if (!is_llm) {
emits("showWarning", i18nState.WARNING_MODEL_TYPE_WRONG, () => {performDownload()});
} else {
await performDownload()
}
} else {
globalSetup.modelSettings.llm_model = previousModel
addModelErrorMessage.value = i18nState.ERROR_REPO_NOT_EXISTS
addModelError.value = true;
console.log("Hey")
}
} else {
globalSetup.modelSettings.llm_model = previousModel
addModelErrorMessage.value = i18nState.ERROR_REPO_NOT_EXISTS
addModelErrorMessage.value = i18nState.ERROR_ALREADY_IN_MODELS
addModelError.value = true;
}
}
async function performDownload() {
userModels.push({name: modelRequest.value, type: 'llm', downloaded: false})
await models.refreshModels()
globalSetup.modelSettings.llm_model = modelRequest.value;
emits("callCheckModel");
closeAdd()
}
async function urlExists(repo_id: string) {
const response = await fetch(`${globalSetup.apiHost}/api/checkURLExists`, {
Expand All @@ -81,11 +91,21 @@ async function urlExists(repo_id: string) {
headers: {
"Content-Type": "application/json"
}})
const data = await response.json()
return data.exists;
}
async function isLLM(repo_id: string) {
const response = await fetch(`${globalSetup.apiHost}/api/isLLM`, {
method: "POST",
body: JSON.stringify(repo_id),
headers: {
"Content-Type": "application/json"
}})
const data = await response.json()
return data.isllm
}
function closeAdd() {
addModelError.value = false;
modelRequest.value = "";
Expand Down
46 changes: 46 additions & 0 deletions WebUI/src/components/WarningDialog.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
<template>
<div class="dialog-container z-10">
<div class="dialog-mask absolute left-0 top-0 w-full h-full bg-black/55 flex justify-center items-center">
<div class="py-10 px-20 w-500px flex flex-col items-center justify-center bg-gray-600 rounded-3xl gap-6 text-white"
:class="{ 'animate-scale-in': animate }">
<p v-html= "warningMessage"></p>
<div class="flex justify-center items-center gap-9">
<button @click="cancelConfirm" class="bg-color-control-bg py-1 px-4 rounded">{{i18nState.COM_CANCEL}}</button>
<button @click="confirmAdd" class="bg-color-control-bg py-1 px-4 rounded">{{i18nState.COM_CONFIRM}}</button>
</div>
</div>
</div>
</div>
</template>
<script setup lang="ts">
import { useI18N } from '@/assets/js/store/i18n.ts';
const i18nState = useI18N().state;
const confirmFunction = ref(() => {})
const warningMessage = ref("")
const animate = ref(false);
const emits = defineEmits<{
(e: "close"): void
}>();
onDeactivated(() => {
animate.value = false;
})
async function confirmAdd() {
confirmFunction.value()
emits("close");
}
function cancelConfirm() {
emits("close");
}
defineExpose({warningMessage, confirmFunction});
</script>
<style scoped>
table {
border-collapse: separate;
border-spacing: 10px;
}
</style>
3 changes: 3 additions & 0 deletions service/model_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,9 @@ def __init__(self, hf_token=None) -> None:
def probe_url(self, repo_id: str):
model_info(repo_id)

def probe_type(self, repo_id : str):
return model_info(repo_id).pipeline_tag

def is_gated(self, repo_id: str):
try:
info = model_info(repo_id)
Expand Down
15 changes: 15 additions & 0 deletions service/web_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,21 @@ def check_url_exists():
}
)

@app.route("/api/isLLM", methods=["POST"])
def is_llm():
address = request.get_json()
downloader = HFPlaygroundDownloader()
try:
model_type_hf = downloader.probe_type(address)
except:
model_type_hf = "undefined"
return jsonify(
{
"isllm": model_type_hf == "text-generation"
}
)


size_cache = dict()
lock = threading.Lock()

Expand Down

0 comments on commit f83a079

Please sign in to comment.