From 5f167ff6c8cbeae19620c5e77ca43660444d5888 Mon Sep 17 00:00:00 2001 From: marijnvg-tng Date: Mon, 25 Nov 2024 16:48:49 +0100 Subject: [PATCH 1/5] 53 - Adapted download functionality to also accept file instances instead of only full repositories. Signed-off-by: marijnvg-tng --- WebUI/src/components/DownloadDialog.vue | 4 ++-- service/model_downloader.py | 15 ++++++++++----- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/WebUI/src/components/DownloadDialog.vue b/WebUI/src/components/DownloadDialog.vue index 81a9c3ce..11fee262 100644 --- a/WebUI/src/components/DownloadDialog.vue +++ b/WebUI/src/components/DownloadDialog.vue @@ -197,8 +197,8 @@ async function showConfirm(downList: DownloadModelParam[], success?: () => void, const sizeData = (await sizeResponse.json()) as ApiResponse & { sizeList: StringKV }; const gatedData = (await gatedResponse.json()) as ApiResponse & { gatedList: Record }; for (const item of downloadList.value) { - item.size = sizeData.sizeList[`${item.repo_id}_${item.type}`] || ""; - item.gated = gatedData.gatedList[item.repo_id] || false; + item.size = sizeData.sizeList[`${item.repo_id}_${item.type}`] || ""; + item.gated = gatedData.gatedList[item.repo_id] || false; } downloadList.value = downloadList.value; sizeRequesting.value = false; diff --git a/service/model_downloader.py b/service/model_downloader.py index a9df3b9d..4f20baa8 100644 --- a/service/model_downloader.py +++ b/service/model_downloader.py @@ -84,19 +84,22 @@ def __init__(self, hf_token=None) -> None: self.thread_lock = Lock() self.hf_token = hf_token + def trim_repo(self, repo_id): + return "/".join(repo_id.split("/")[:2]) + def hf_url_exists(self, repo_id: str): try: - model_info(repo_id) + model_info(self.trim_repo(repo_id)) return True except RepositoryNotFoundError: return False def probe_type(self, repo_id : str): - return model_info(repo_id).pipeline_tag + return model_info(self.trim_repo(repo_id)).pipeline_tag def is_gated(self, repo_id: str): try: - info = model_info(repo_id) + info = model_info(self.trim_repo(repo_id)) return info.gated except Exception as ex: print(f"Error while trying to determine whether {repo_id} is gated: {ex}") @@ -179,6 +182,7 @@ def get_model_total_size(self, repo_id: str, model_type: int): def enum_file_list( self, file_list: List, enum_path: str, model_type: int, is_root=True ): + # repo = "/".join(enum_path.split("/")[:2]) list = self.fs.ls(enum_path, detail=True) if model_type == 1 and enum_path == self.repo_id + "/unet": list = self.enum_sd_unet(list) @@ -216,14 +220,15 @@ def enum_file_list( continue self.total_size += size - relative_path = path.relpath(name, self.repo_id) + relative_path = path.relpath(name, self.trim_repo(self.repo_id)) subfolder = path.dirname(relative_path).replace("\\", "/") filename = path.basename(relative_path) url = hf_hub_url( - repo_id=self.repo_id, subfolder=subfolder, filename=filename + repo_id=self.trim_repo(self.repo_id), subfolder=subfolder, filename=filename ) file_list.append(HFFileItem(relative_path, size, url)) + def enum_sd_unet(self, file_list: List[str | Dict[str, Any]]): cur_level = 0 first_model = None From e50f62de1db2ef097e6eda75255767413647a527 Mon Sep 17 00:00:00 2001 From: Florian Esser Date: Wed, 27 Nov 2024 12:15:46 +0100 Subject: [PATCH 2/5] Enable download dialog in principle switch model loading depending on backend type from settings In comfyUI case, return empty as mock implementation Signed-off-by: Florian Esser --- WebUI/src/assets/js/store/imageGeneration.ts | 34 ++++++++++++-------- WebUI/src/views/Create.vue | 25 ++++++++++++-- 2 files changed, 42 insertions(+), 17 deletions(-) diff --git a/WebUI/src/assets/js/store/imageGeneration.ts b/WebUI/src/assets/js/store/imageGeneration.ts index c85abb60..84048a17 100644 --- a/WebUI/src/assets/js/store/imageGeneration.ts +++ b/WebUI/src/assets/js/store/imageGeneration.ts @@ -254,22 +254,28 @@ export const useImageGeneration = defineStore("imageGeneration", () => { } async function getMissingModels() { - const checkList: CheckModelExistParam[] = [{ repo_id: imageModel.value, type: Const.MODEL_TYPE_STABLE_DIFFUSION }]; - if (lora.value !== "None") { - checkList.push({ repo_id: lora.value, type: Const.MODEL_TYPE_LORA }) - } - if (imagePreview.value) { - checkList.push({ repo_id: "madebyollin/taesd", type: Const.MODEL_TYPE_PREVIEW }) - checkList.push({ repo_id: "madebyollin/taesdxl", type: Const.MODEL_TYPE_PREVIEW }) - } - const result = await globalSetup.checkModelExists(checkList); - const downloadList: CheckModelExistParam[] = []; - for (const item of result) { - if (!item.exist) { - downloadList.push({ repo_id: item.repo_id, type: item.type }) + if (activeWorkflow.value.backend === 'default') { + const checkList: CheckModelExistParam[] = [{ repo_id: imageModel.value, type: Const.MODEL_TYPE_STABLE_DIFFUSION }]; + if (lora.value !== "None") { + checkList.push({ repo_id: lora.value, type: Const.MODEL_TYPE_LORA }) + } + if (imagePreview.value) { + checkList.push({ repo_id: "madebyollin/taesd", type: Const.MODEL_TYPE_PREVIEW }) + checkList.push({ repo_id: "madebyollin/taesdxl", type: Const.MODEL_TYPE_PREVIEW }) } + const result = await globalSetup.checkModelExists(checkList); + const downloadList: CheckModelExistParam[] = []; + for (const item of result) { + if (!item.exist) { + downloadList.push({ repo_id: item.repo_id, type: item.type }) + } + } + return downloadList; + } else { + console.error(`missing models for backend ${activeWorkflow.value.backend} cannot be loaded.`) + return [] } - return downloadList; + } function generate() { diff --git a/WebUI/src/views/Create.vue b/WebUI/src/views/Create.vue index 0ff1f4de..1fe2ed0a 100644 --- a/WebUI/src/views/Create.vue +++ b/WebUI/src/views/Create.vue @@ -102,11 +102,30 @@ const emits = defineEmits<{ (e: "postImageToEnhance", url: string): void }>(); -async function generateImage() { - reset(); - await imageGeneration.generate(); +async function generateImage() { + await ensureModelsAreAvailable(); + reset(); + await imageGeneration.generate(); } +async function ensureModelsAreAvailable() { + return new Promise(async (resolve, reject) => { + const downloadList = await imageGeneration.getMissingModels(); + if (downloadList.length > 0) { + emits( + "showDownloadModelConfirm", + downloadList, + resolve, + reject + ); + } else { + resolve && resolve(); + } + }); +} + + + function postImageToEnhance() { emits("postImageToEnhance", imageGeneration.imageUrls[imageGeneration.previewIdx]); } From 2e2a952f52c881ff53cce608f05b04792e615ce4 Mon Sep 17 00:00:00 2001 From: Florian Esser Date: Wed, 27 Nov 2024 16:53:00 +0100 Subject: [PATCH 3/5] Provide downloading functionality for comfyUI models expect users to declare them in the workflow file declaring the comfyUI type (vae/unet/...) along with the downloading coordinates. Download into separate dir (not service dir) as a POC Signed-off-by: Florian Esser --- WebUI/external/workflows/fluxQ4.json | 14 +++-- WebUI/src/assets/js/const.ts | 3 + WebUI/src/assets/js/store/imageGeneration.ts | 66 ++++++++++++++------ WebUI/src/assets/js/store/stableDiffusion.ts | 10 +-- WebUI/src/components/DownloadDialog.vue | 2 +- WebUI/src/env.d.ts | 9 ++- WebUI/src/views/Answer.vue | 4 +- WebUI/src/views/AppSettings.vue | 2 +- WebUI/src/views/Enhance.vue | 16 ++--- service/aipg_utils.py | 52 ++++++++++++--- service/model_config.py | 9 ++- service/model_download_adpater.py | 2 +- service/model_downloader.py | 40 +++++++----- service/web_api.py | 5 +- service/web_request_bodies.py | 4 ++ 15 files changed, 165 insertions(+), 73 deletions(-) create mode 100644 service/web_request_bodies.py diff --git a/WebUI/external/workflows/fluxQ4.json b/WebUI/external/workflows/fluxQ4.json index d5b4205e..78daa129 100644 --- a/WebUI/external/workflows/fluxQ4.json +++ b/WebUI/external/workflows/fluxQ4.json @@ -5,6 +5,12 @@ "Q4", "Fast" ], + "requiredModels": [ + "unet:city96/FLUX.1-schnell-gguf/flux1-schnell-Q4_K_S.gguf", + "clip:city96/t5-v1_1-xxl-encoder-gguf/t5-v1_1-xxl-encoder-Q3_K_M.gguf", + "clip:comfyanonymous/flux_text_encoders/clip_l.safetensors", + "vae:black-forest-labs/FLUX.1-schnell/ae.safetensors" + ], "requirements": [ "high-vram" ], @@ -72,7 +78,7 @@ }, "170": { "inputs": { - "unet_name": "flux1-schnell-Q4_K_S.gguf" + "unet_name": "city96---FLUX.1-schnell-gguf/flux1-schnell-Q4_K_S.gguf" }, "class_type": "UnetLoaderGGUF", "_meta": { @@ -81,7 +87,7 @@ }, "171": { "inputs": { - "vae_name": "ae.safetensors" + "vae_name": "vae:black-forest-labs/FLUX.1-schnell/ae.safetensors" }, "class_type": "VAELoader", "_meta": { @@ -192,8 +198,8 @@ }, "188": { "inputs": { - "clip_name1": "t5-v1_1-xxl-encoder-Q3_K_M.gguf", - "clip_name2": "clip_l.safetensors", + "clip_name1": "city96---t5-v1_1-xxl-encoder-gguf/t5-v1_1-xxl-encoder-Q3_K_M.gguf", + "clip_name2": "comfyanonymous---flux_text_encoders/clip_l.safetensors", "type": "flux" }, "class_type": "DualCLIPLoaderGGUF", diff --git a/WebUI/src/assets/js/const.ts b/WebUI/src/assets/js/const.ts index 11cf75ad..f404c1db 100644 --- a/WebUI/src/assets/js/const.ts +++ b/WebUI/src/assets/js/const.ts @@ -7,4 +7,7 @@ export module Const { export const MODEL_TYPE_EMBEDDING = 5; export const MODEL_TYPE_INPAINT = 6; export const MODEL_TYPE_PREVIEW = 7; + export const MODEL_TYPE_COMFY_UNET = 100; + export const MODEL_TYPE_COMFY_CLIP = 101; + export const MODEL_TYPE_COMFY_VAE = 102; } \ No newline at end of file diff --git a/WebUI/src/assets/js/store/imageGeneration.ts b/WebUI/src/assets/js/store/imageGeneration.ts index 84048a17..127f181c 100644 --- a/WebUI/src/assets/js/store/imageGeneration.ts +++ b/WebUI/src/assets/js/store/imageGeneration.ts @@ -5,6 +5,7 @@ import { useStableDiffusion } from "./stableDiffusion"; import { useI18N } from "./i18n"; import { Const } from "../const"; import { useGlobalSetup } from "./globalSetup"; +import {toast} from "@/assets/js/toast.ts"; export type StableDiffusionSettings = { resolution: 'standard' | 'hd' | 'manual', // ~ modelSettings.resolution 0, 1, 3 @@ -59,6 +60,7 @@ const WorkflowSchema = z.object({ name: z.string(), backend: z.enum(['default', 'comfyui']), tags: z.array(z.string()), + requiredModels: z.array(z.string()).optional(), requirements: z.array(WorkflowRequirementSchema), inputs: z.array(z.object({ name: z.string(), @@ -103,7 +105,7 @@ export const useImageGeneration = defineStore("imageGeneration", () => { const stableDiffusion = useStableDiffusion(); const globalSetup = useGlobalSetup(); const i18nState = useI18N().state; - + const hdWarningDismissed = ref(false); const workflows = ref(predefinedWorkflows); @@ -255,29 +257,57 @@ export const useImageGeneration = defineStore("imageGeneration", () => { async function getMissingModels() { if (activeWorkflow.value.backend === 'default') { - const checkList: CheckModelExistParam[] = [{ repo_id: imageModel.value, type: Const.MODEL_TYPE_STABLE_DIFFUSION }]; - if (lora.value !== "None") { - checkList.push({ repo_id: lora.value, type: Const.MODEL_TYPE_LORA }) - } - if (imagePreview.value) { - checkList.push({ repo_id: "madebyollin/taesd", type: Const.MODEL_TYPE_PREVIEW }) - checkList.push({ repo_id: "madebyollin/taesdxl", type: Const.MODEL_TYPE_PREVIEW }) - } - const result = await globalSetup.checkModelExists(checkList); - const downloadList: CheckModelExistParam[] = []; - for (const item of result) { - if (!item.exist) { - downloadList.push({ repo_id: item.repo_id, type: item.type }) - } - } - return downloadList; + return getMissingDefaultBackendModels() } else { - console.error(`missing models for backend ${activeWorkflow.value.backend} cannot be loaded.`) + return getMissingComfyuiBackendModels() + } + } + + async function getMissingComfyuiBackendModels() { + if (activeWorkflow.value?.requiredModels === undefined) { + toast.error('"Defined workflow did not specify required models. Please add "requiredModels" to workflowfile.'); return [] + } else { + function extractDownloadModelParamsFromString(modelParamString: string): CheckModelExistParam { + const [modelType, repoAddress] = modelParamString.split(":") + function modelTypeToId(type: string) { + switch (type) { + case "unet" : return Const.MODEL_TYPE_COMFY_UNET + case "clip" : return Const.MODEL_TYPE_COMFY_CLIP + case "vae" : return Const.MODEL_TYPE_COMFY_VAE + default: + console.warn("received unknown comfyUI type: ", type) + return -1 + } + } + return {type: modelTypeToId(modelType), repo_id: repoAddress, backend: "comfyui"} + } + const checkList: CheckModelExistParam[] = activeWorkflow.value.requiredModels.map( extractDownloadModelParamsFromString) + const result: CheckModelExistResult[] = await globalSetup.checkModelExists(checkList); + return result + .filter(checkModelExistsResult => !checkModelExistsResult.exist) + .map(item => ({ repo_id: item.repo_id, type: item.type, backend: item.backend })) } } + async function getMissingDefaultBackendModels() { + const checkList: CheckModelExistParam[] = [{ repo_id: imageModel.value, type: Const.MODEL_TYPE_STABLE_DIFFUSION, backend: activeWorkflow.value.backend }]; + if (lora.value !== "None") { + checkList.push({ repo_id: lora.value, type: Const.MODEL_TYPE_LORA, backend: activeWorkflow.value.backend }) + } + if (imagePreview.value) { + checkList.push({ repo_id: "madebyollin/taesd", type: Const.MODEL_TYPE_PREVIEW , backend: activeWorkflow.value.backend}) + checkList.push({ repo_id: "madebyollin/taesdxl", type: Const.MODEL_TYPE_PREVIEW , backend: activeWorkflow.value.backend}) + } + + const result = await globalSetup.checkModelExists(checkList); + return result + .filter(checkModelExistsResult => !checkModelExistsResult.exist) + .map(item => ({ repo_id: item.repo_id, type: item.type, backend: item.backend })) + + } + function generate() { generateIdx.value = 0; previewIdx.value = 0; diff --git a/WebUI/src/assets/js/store/stableDiffusion.ts b/WebUI/src/assets/js/store/stableDiffusion.ts index cae46041..9b298099 100644 --- a/WebUI/src/assets/js/store/stableDiffusion.ts +++ b/WebUI/src/assets/js/store/stableDiffusion.ts @@ -68,19 +68,19 @@ export const useStableDiffusion = defineStore("stableDiffusion", () => { async function checkModel() { return new Promise(async (resolve, reject) => { - const checkList: CheckModelExistParam[] = [{ repo_id: globalSetup.modelSettings.sd_model, type: Const.MODEL_TYPE_STABLE_DIFFUSION }]; + const checkList: CheckModelExistParam[] = [{ repo_id: globalSetup.modelSettings.sd_model, type: Const.MODEL_TYPE_STABLE_DIFFUSION, backend: "default" }]; if (globalSetup.modelSettings.lora != "None") { - checkList.push({ repo_id: globalSetup.modelSettings.lora, type: Const.MODEL_TYPE_LORA }) + checkList.push({ repo_id: globalSetup.modelSettings.lora, type: Const.MODEL_TYPE_LORA , backend: "default"}) } if (globalSetup.modelSettings.imagePreview) { - checkList.push({ repo_id: "madebyollin/taesd", type: Const.MODEL_TYPE_PREVIEW }) - checkList.push({ repo_id: "madebyollin/taesdxl", type: Const.MODEL_TYPE_PREVIEW }) + checkList.push({ repo_id: "madebyollin/taesd", type: Const.MODEL_TYPE_PREVIEW , backend: "default"}) + checkList.push({ repo_id: "madebyollin/taesdxl", type: Const.MODEL_TYPE_PREVIEW , backend: "default"}) } const result = await globalSetup.checkModelExists(checkList); const downloadList: CheckModelExistParam[] = []; for (const item of result) { if (!item.exist) { - downloadList.push({ repo_id: item.repo_id, type: item.type }) + downloadList.push({ repo_id: item.repo_id, type: item.type, backend: "default" }) } } await models.download(downloadList); diff --git a/WebUI/src/components/DownloadDialog.vue b/WebUI/src/components/DownloadDialog.vue index 11fee262..10581abf 100644 --- a/WebUI/src/components/DownloadDialog.vue +++ b/WebUI/src/components/DownloadDialog.vue @@ -174,7 +174,7 @@ async function showConfirm(downList: DownloadModelParam[], success?: () => void, hashError.value = false; percent.value = 0; downloadList.value = downList.map((item) => { - return { repo_id: item.repo_id, type: item.type, size: "???" } + return { repo_id: item.repo_id, type: item.type, size: "???", backend: item.backend } }); readTerms.value = false; downloadResolve = success; diff --git a/WebUI/src/env.d.ts b/WebUI/src/env.d.ts index 41b2f508..58795263 100644 --- a/WebUI/src/env.d.ts +++ b/WebUI/src/env.d.ts @@ -295,9 +295,12 @@ type DownloadFailedParams = { type: "error" | "cancelConfrim" | "cancelDownload" type CheckModelExistParam = { repo_id: string; - type: number + type: number; + backend: BackendType; } +type BackendType = "comfyui" | "default" + type DownloadModelParam = CheckModelExistParam type DownloadModelRender = { size: string, gated?: boolean } & CheckModelExistParam @@ -306,8 +309,4 @@ type CheckModelExistResult = { exist: boolean } & CheckModelExistParam -type CheckModelSizeResult = { - size: string -} & CheckModelExistParam - type SDGenerateState = "no_start" | "input_image" | "load_model" | "load_model_components" | "generating" | "image_out" | "error" \ No newline at end of file diff --git a/WebUI/src/views/Answer.vue b/WebUI/src/views/Answer.vue index 56c60963..a8412505 100644 --- a/WebUI/src/views/Answer.vue +++ b/WebUI/src/views/Answer.vue @@ -421,7 +421,7 @@ async function newPromptGenerate() { async function checkModel() { return new Promise(async (resolve, reject) => { - const checkList: CheckModelExistParam[] = [{ repo_id: globalSetup.modelSettings.llm_model, type: Const.MODEL_TYPE_LLM }]; + const checkList: CheckModelExistParam[] = [{ repo_id: globalSetup.modelSettings.llm_model, type: Const.MODEL_TYPE_LLM, backend: "default" }]; if (!(await globalSetup.checkModelExists(checkList))[0].exist) { emits( "showDownloadModelConfirm", @@ -550,7 +550,7 @@ async function toggleRag(value: boolean) { ragData.processEnable = true; try { if (value) { - var checkList = [{ repo_id: globalSetup.modelSettings.embedding, type: Const.MODEL_TYPE_EMBEDDING }]; + var checkList: CheckModelExistParam[] = [{ repo_id: globalSetup.modelSettings.embedding, type: Const.MODEL_TYPE_EMBEDDING, backend: "default" }]; if (!(await globalSetup.checkModelExists(checkList))[0].exist) { emits("showDownloadModelConfirm", checkList, diff --git a/WebUI/src/views/AppSettings.vue b/WebUI/src/views/AppSettings.vue index 54b3d98a..9ede71f5 100644 --- a/WebUI/src/views/AppSettings.vue +++ b/WebUI/src/views/AppSettings.vue @@ -391,7 +391,7 @@ function restorePresetModelSettings() { } function downloadModel(model_repo_id: string, type: number) { - const params = [{ repo_id: model_repo_id, type: type }]; + const params: CheckModelExistParam[] = [{ repo_id: model_repo_id, type: type, backend: "default" }]; globalSetup.checkModelExists(params) .then(exits => { if (exits[0].exist) { diff --git a/WebUI/src/views/Enhance.vue b/WebUI/src/views/Enhance.vue index eee90926..ef702ff6 100644 --- a/WebUI/src/views/Enhance.vue +++ b/WebUI/src/views/Enhance.vue @@ -437,25 +437,25 @@ async function checkModel() { return new Promise(async (resolve, reject) => { const checkList: CheckModelExistParam[] = []; if ([3, 4].includes(mode.value) && imageGeneration.inpaintModel != i18nState.ENHANCE_INPAINT_USE_IMAGE_MODEL) { - checkList.push({ repo_id: imageGeneration.inpaintModel, type: Const.MODEL_TYPE_INPAINT }); + checkList.push({ repo_id: imageGeneration.inpaintModel, type: Const.MODEL_TYPE_INPAINT, backend: imageGeneration.activeWorkflow.backend}); } else { - checkList.push({ repo_id: imageGeneration.imageModel, type: Const.MODEL_TYPE_STABLE_DIFFUSION }); + checkList.push({ repo_id: imageGeneration.imageModel, type: Const.MODEL_TYPE_STABLE_DIFFUSION, backend: imageGeneration.activeWorkflow.backend }); } if ([1, 3, 4].includes(mode.value)) { - checkList.push({ repo_id: "RealESRGAN_x2plus", type: Const.MODEL_TYPE_ESRGAN }) + checkList.push({ repo_id: "RealESRGAN_x2plus", type: Const.MODEL_TYPE_ESRGAN, backend: imageGeneration.activeWorkflow.backend }) } if (imageGeneration.lora != "None") { - checkList.push({ repo_id: imageGeneration.lora, type: Const.MODEL_TYPE_LORA }) + checkList.push({ repo_id: imageGeneration.lora, type: Const.MODEL_TYPE_LORA, backend: imageGeneration.activeWorkflow.backend }) } if (imageGeneration.imagePreview) { - checkList.push({ repo_id: "madebyollin/taesd", type: Const.MODEL_TYPE_PREVIEW }) - checkList.push({ repo_id: "madebyollin/taesdxl", type: Const.MODEL_TYPE_PREVIEW }) + checkList.push({ repo_id: "madebyollin/taesd", type: Const.MODEL_TYPE_PREVIEW, backend: imageGeneration.activeWorkflow.backend }) + checkList.push({ repo_id: "madebyollin/taesdxl", type: Const.MODEL_TYPE_PREVIEW, backend: imageGeneration.activeWorkflow.backend }) } const result = await globalSetup.checkModelExists(checkList); - const downloadList: CheckModelExistParam[] = []; + const downloadList: DownloadModelParam[] = []; for (const item of result) { if (!item.exist) { - downloadList.push({ repo_id: item.repo_id, type: item.type }) + downloadList.push({ repo_id: item.repo_id, type: item.type, backend: imageGeneration.activeWorkflow.backend }) } } if (downloadList.length > 0) { diff --git a/service/aipg_utils.py b/service/aipg_utils.py index 0ed506c6..6094f566 100644 --- a/service/aipg_utils.py +++ b/service/aipg_utils.py @@ -1,11 +1,14 @@ import base64 -import math -from typing import IO -from PIL import Image +import hashlib import io +import math import os -import hashlib +from typing import IO + import torch +from PIL import Image + +import model_config def image_to_base64(image: Image.Image): @@ -35,10 +38,31 @@ def get_image_shape_ceil(image: Image.Image): return get_shape_ceil(H, W) -def check_mmodel_exist(type: int, repo_id: str): +def check_mmodel_exist(type: int, repo_id: str, backend: str) -> bool: + match(backend): + case "default": return check_defaultbackend_mmodel_exist(type, repo_id) + case "comfyui": return check_comfyui_model_exists(type, repo_id) + case _: raise NameError("Unknown Backend") + +def check_comfyui_model_exists(type, repo_id) -> bool: + model_dir = model_config.comfyUIConfig.get(convert_model_type(type)) + dir_to_look_for = os.path.join(model_dir, repo_local_root_dir_name(repo_id), extract_model_id_pathsegments(repo_id)) + print(dir_to_look_for) + return os.path.exists(dir_to_look_for) + +def trim_repo(repo_id): + return "/".join(repo_id.split("/")[:2]) + +def extract_model_id_pathsegments(repo_id) -> str: + return "/".join(repo_id.split("/")[2:]) + +def repo_local_root_dir_name(repo_id): + return "---".join(repo_id.split("/")[:2]) + +def check_defaultbackend_mmodel_exist(type: int, repo_id: str) -> bool: import model_config - folder_name = repo_id.replace("/", "---") + folder_name = repo_local_root_dir_name(repo_id) if type == 0: dir = model_config.config.get("llm") return os.path.exists(os.path.join(dir, folder_name, "config.json")) @@ -105,14 +129,24 @@ def convert_model_type(type: int): return "inpaint" elif type == 7: return "preview" + + elif type == 100: + return "unet" + elif type == 101: + return "clip" + elif type == 102: + return "vae" else: raise Exception(f"uwnkown model type value {type}") -def get_model_path(type: int): - import model_config +def get_model_path(type: int, backend: str): + match backend: + case "default": + return model_config.config.get(convert_model_type(type)) + case "comfyui": + return model_config.comfyUIConfig.get(convert_model_type(type)) - return model_config.config.get(convert_model_type(type)) def calculate_md5(file_path: str): diff --git a/service/model_config.py b/service/model_config.py index a50221c0..5a1eb900 100644 --- a/service/model_config.py +++ b/service/model_config.py @@ -1,3 +1,4 @@ + # CONFIG_PATH = "./model_config.json" config = { @@ -11,5 +12,11 @@ "preview": "./models/stable_diffusion/preview", } +comfyUIConfig = { + "unet": "../ComfyUI/models/unet", + "clip": "../ComfyUI/models/clip", + "vae": "../ComfyUI/models/vae", +} + device = "xpu" -env_type = "arc" +env_type = "arc" \ No newline at end of file diff --git a/service/model_download_adpater.py b/service/model_download_adpater.py index 15430c8a..1a761ce1 100644 --- a/service/model_download_adpater.py +++ b/service/model_download_adpater.py @@ -124,7 +124,7 @@ def __start_download(self, list: list): ), ) else: - self.hf_downloader.download(item["repo_id"], item["type"]) + self.hf_downloader.download(item["repo_id"], item["type"], item["backend"]) self.put_msg({"type": "allComplete"}) self.finish = True except Exception as ex: diff --git a/service/model_downloader.py b/service/model_downloader.py index 4f20baa8..2efab81e 100644 --- a/service/model_downloader.py +++ b/service/model_downloader.py @@ -1,3 +1,5 @@ +import os + from huggingface_hub import HfFileSystem, hf_hub_url, model_info from huggingface_hub.utils import RepositoryNotFoundError from typing import Any, Callable, Dict, List @@ -84,28 +86,26 @@ def __init__(self, hf_token=None) -> None: self.thread_lock = Lock() self.hf_token = hf_token - def trim_repo(self, repo_id): - return "/".join(repo_id.split("/")[:2]) - def hf_url_exists(self, repo_id: str): try: - model_info(self.trim_repo(repo_id)) + model_info(utils.trim_repo(repo_id)) return True except RepositoryNotFoundError: return False def probe_type(self, repo_id : str): - return model_info(self.trim_repo(repo_id)).pipeline_tag + return model_info(utils.trim_repo(repo_id)).pipeline_tag def is_gated(self, repo_id: str): try: - info = model_info(self.trim_repo(repo_id)) + info = model_info(utils.trim_repo(repo_id)) return info.gated except Exception as ex: print(f"Error while trying to determine whether {repo_id} is gated: {ex}") return False - def download(self, repo_id: str, model_type: int, thread_count: int = 4): + def download(self, repo_id: str, model_type: int, backend: str, thread_count: int = 4): + print(f"at download {backend}") self.repo_id = repo_id self.total_size = 0 self.download_size = 0 @@ -113,7 +113,7 @@ def download(self, repo_id: str, model_type: int, thread_count: int = 4): self.download_stop = False self.completed = False self.error = None - self.save_path = path.join(utils.get_model_path(model_type)) + self.save_path = path.join(utils.get_model_path(model_type, backend)) self.save_path_tmp = path.abspath( path.join(self.save_path, repo_id.replace("/", "---") + "_tmp") ) @@ -220,11 +220,11 @@ def enum_file_list( continue self.total_size += size - relative_path = path.relpath(name, self.trim_repo(self.repo_id)) + relative_path = path.relpath(name, utils.trim_repo(self.repo_id)) subfolder = path.dirname(relative_path).replace("\\", "/") filename = path.basename(relative_path) url = hf_hub_url( - repo_id=self.trim_repo(self.repo_id), subfolder=subfolder, filename=filename + repo_id=utils.trim_repo(self.repo_id), subfolder=subfolder, filename=filename ) file_list.append(HFFileItem(relative_path, size, url)) @@ -268,16 +268,24 @@ def multiple_thread_downlod(self, thread_count: int): if self.on_download_completed is not None: self.on_download_completed(self.repo_id, self.error) if not self.download_stop and self.error is None: - rename( - self.save_path_tmp, - path.abspath( - path.join(self.save_path, self.repo_id.replace("/", "---")) - ), - ) + self.move_to_desired_position() else: # Download aborted shutil.rmtree(self.save_path_tmp) + def move_to_desired_position(self): + desired_repo_root_dir_name = os.path.join(self.save_path, utils.repo_local_root_dir_name(self.repo_id)) + if os.path.exists(desired_repo_root_dir_name): + for item in os.listdir(self.save_path_tmp): + shutil.move(os.path.join(self.save_path_tmp, item), desired_repo_root_dir_name) + shutil.rmtree(self.save_path_tmp) + else: + rename( + self.save_path_tmp, + path.abspath(desired_repo_root_dir_name) + ) + + def start_report_download_progress(self): thread = Thread(target=self.report_download_progress) thread.start() diff --git a/service/web_api.py b/service/web_api.py index d59c0868..785f607f 100644 --- a/service/web_api.py +++ b/service/web_api.py @@ -159,8 +159,9 @@ def check_model_exist(): for item in list: repo_id = item["repo_id"] type = item["type"] - exist = utils.check_mmodel_exist(type, repo_id) - result_list.append({"repo_id": repo_id, "type": type, "exist": exist}) + backend = item["backend"] + exist = utils.check_mmodel_exist(type, repo_id, backend) + result_list.append({"repo_id": repo_id, "type": type, "backend": backend, "exist": exist}) return jsonify({"code": 0, "message": "success", "exists": result_list}) diff --git a/service/web_request_bodies.py b/service/web_request_bodies.py new file mode 100644 index 00000000..f5e38cbf --- /dev/null +++ b/service/web_request_bodies.py @@ -0,0 +1,4 @@ +class DownloadList: + type: int + repo_id: str + invalidKey: str \ No newline at end of file From 48344ce2a6213c554636b337d77f5596529afd34 Mon Sep 17 00:00:00 2001 From: Florian Esser Date: Wed, 27 Nov 2024 17:26:05 +0100 Subject: [PATCH 4/5] Adjust alternative comfyUI workflow files to new downloading logic Signed-off-by: Florian Esser --- WebUI/external/workflows/fluxQ4.json | 8 +- WebUI/external/workflows/fluxQ8.json | 14 ++- WebUI/external/workflows/sd15.json | 139 --------------------------- 3 files changed, 14 insertions(+), 147 deletions(-) delete mode 100644 WebUI/external/workflows/sd15.json diff --git a/WebUI/external/workflows/fluxQ4.json b/WebUI/external/workflows/fluxQ4.json index 78daa129..bcd5dd91 100644 --- a/WebUI/external/workflows/fluxQ4.json +++ b/WebUI/external/workflows/fluxQ4.json @@ -78,7 +78,7 @@ }, "170": { "inputs": { - "unet_name": "city96---FLUX.1-schnell-gguf/flux1-schnell-Q4_K_S.gguf" + "unet_name": "city96---FLUX.1-schnell-gguf\\flux1-schnell-Q4_K_S.gguf" }, "class_type": "UnetLoaderGGUF", "_meta": { @@ -87,7 +87,7 @@ }, "171": { "inputs": { - "vae_name": "vae:black-forest-labs/FLUX.1-schnell/ae.safetensors" + "vae_name": "black-forest-labs/FLUX.1-schnell\\ae.safetensors" }, "class_type": "VAELoader", "_meta": { @@ -198,8 +198,8 @@ }, "188": { "inputs": { - "clip_name1": "city96---t5-v1_1-xxl-encoder-gguf/t5-v1_1-xxl-encoder-Q3_K_M.gguf", - "clip_name2": "comfyanonymous---flux_text_encoders/clip_l.safetensors", + "clip_name1": "city96---t5-v1_1-xxl-encoder-gguf\\t5-v1_1-xxl-encoder-Q3_K_M.gguf", + "clip_name2": "comfyanonymous---flux_text_encoders\\clip_l.safetensors", "type": "flux" }, "class_type": "DualCLIPLoaderGGUF", diff --git a/WebUI/external/workflows/fluxQ8.json b/WebUI/external/workflows/fluxQ8.json index 29f28752..d99e1013 100644 --- a/WebUI/external/workflows/fluxQ8.json +++ b/WebUI/external/workflows/fluxQ8.json @@ -5,6 +5,12 @@ "Q8", "Fast" ], + "requiredModels": [ + "unet:city96/FLUX.1-schnell-gguf/flux1-schnell-Q8_0.gguf", + "clip:city96/t5-v1_1-xxl-encoder-gguf/t5-v1_1-xxl-encoder-Q3_K_M.gguf", + "clip:comfyanonymous/flux_text_encoders/clip_l.safetensors", + "vae:black-forest-labs/FLUX.1-schnell/ae.safetensors" + ], "requirements": [ "high-vram" ], @@ -72,7 +78,7 @@ }, "170": { "inputs": { - "unet_name": "flux1-schnell-Q8_0.gguf" + "unet_name": "city96---FLUX.1-schnell-gguf\\flux1-schnell-Q8_0.gguf" }, "class_type": "UnetLoaderGGUF", "_meta": { @@ -81,7 +87,7 @@ }, "171": { "inputs": { - "vae_name": "ae.safetensors" + "vae_name": "black-forest-labs---FLUX.1-schnell\\ae.safetensors" }, "class_type": "VAELoader", "_meta": { @@ -192,8 +198,8 @@ }, "188": { "inputs": { - "clip_name1": "t5-v1_1-xxl-encoder-Q3_K_M.gguf", - "clip_name2": "clip_l.safetensors", + "clip_name1": "city96---t5-v1_1-xxl-encoder-gguf\\t5-v1_1-xxl-encoder-Q3_K_M.gguf", + "clip_name2": "comfyanonymous---flux_text_encoders\\clip_l.safetensors", "type": "flux" }, "class_type": "DualCLIPLoaderGGUF", diff --git a/WebUI/external/workflows/sd15.json b/WebUI/external/workflows/sd15.json deleted file mode 100644 index f4c7abee..00000000 --- a/WebUI/external/workflows/sd15.json +++ /dev/null @@ -1,139 +0,0 @@ -{ - "name": "SD1.5", - "backend": "comfyui", - "tags": [ - "comfyui", - "sd1.5" - ], - "requirements": [], - "inputs": [ - { - "name": "positive_prompt", - "type": "text" - } - ], - "outputs": [ - { - "name": "output_image", - "type": "image" - } - ], - "displayedSettings": [], - "modifiableSettings": [ - "seed", - "negativePrompt", - "batchSize", - "imagePreview", - "resolution", - "guidanceScale", - "scheduler", - "inferenceSteps" - ], - "comfyUiApiWorkflow": { - "3": { - "inputs": { - "seed": 1111600661499464, - "steps": 20, - "cfg": 8, - "sampler_name": "euler", - "scheduler": "normal", - "denoise": 1, - "model": [ - "4", - 0 - ], - "positive": [ - "6", - 0 - ], - "negative": [ - "7", - 0 - ], - "latent_image": [ - "5", - 0 - ] - }, - "class_type": "KSampler", - "_meta": { - "title": "KSampler" - } - }, - "4": { - "inputs": { - "ckpt_name": "v1-5-pruned-emaonly.ckpt" - }, - "class_type": "CheckpointLoaderSimple", - "_meta": { - "title": "Load Checkpoint" - } - }, - "5": { - "inputs": { - "width": 512, - "height": 512, - "batch_size": 1 - }, - "class_type": "EmptyLatentImage", - "_meta": { - "title": "Empty Latent Image" - } - }, - "6": { - "inputs": { - "text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle,", - "clip": [ - "4", - 1 - ] - }, - "class_type": "CLIPTextEncode", - "_meta": { - "title": "prompt" - } - }, - "7": { - "inputs": { - "text": "text, watermark", - "clip": [ - "4", - 1 - ] - }, - "class_type": "CLIPTextEncode", - "_meta": { - "title": "negativePrompt" - } - }, - "8": { - "inputs": { - "samples": [ - "3", - 0 - ], - "vae": [ - "4", - 2 - ] - }, - "class_type": "VAEDecode", - "_meta": { - "title": "VAE Decode" - } - }, - "9": { - "inputs": { - "filename_prefix": "ComfyUI", - "images": [ - "8", - 0 - ] - }, - "class_type": "SaveImage", - "_meta": { - "title": "Save Image" - } - } - } -} \ No newline at end of file From 71b4f0902817a88f140eb87c877fd4224b2b6cc0 Mon Sep 17 00:00:00 2001 From: Florian Esser Date: Tue, 26 Nov 2024 18:03:33 +0100 Subject: [PATCH 5/5] provide comfyUI build persist exact config and commands that lead to offline installer creation Only exception: do not check in intel_extension_for_pytorch-2.3.110+xpu-cp311-cp311-win_amd64.whl in requirementst-arc.txt, as this will only work on intel machines and not for developers. Instead, in dev build, reference a public intel_extension_for_pytorch Signed-off-by: Florian Esser fixup --- .gitignore | 3 ++- WebUI/build/arc-settings.json | 4 ++-- WebUI/build/build-config-offline.json | 21 ++++++++++++++++ .../scripts/fetch-python-package-resources.js | 24 +++++++++++++++++++ .../build/scripts/install-full-python-env.js | 13 +++++++--- WebUI/build/scripts/prepare-python-env.js | 1 + .../provide-electron-build-resources.js | 17 ++++++++++--- WebUI/package.json | 2 +- service/requirements-arc.txt | 1 - service/web_request_bodies.py | 4 ---- 10 files changed, 75 insertions(+), 15 deletions(-) delete mode 100644 service/web_request_bodies.py diff --git a/.gitignore b/.gitignore index 633c2e4b..29870908 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,8 @@ .vscode/ env/ package_res/ -python_package_res/ +build_resources/ +build_envs/ release/ WebUI/external/service/ diff --git a/WebUI/build/arc-settings.json b/WebUI/build/arc-settings.json index dc319c74..58077c22 100644 --- a/WebUI/build/arc-settings.json +++ b/WebUI/build/arc-settings.json @@ -1,5 +1,5 @@ { "envType": "arc", - "availableThemes": ["dark"], - "currentTheme": "dark" + "availableThemes": ["dark","lnl","bmg"], + "currentTheme": "bmg" } \ No newline at end of file diff --git a/WebUI/build/build-config-offline.json b/WebUI/build/build-config-offline.json index dda33d15..b49d5649 100644 --- a/WebUI/build/build-config-offline.json +++ b/WebUI/build/build-config-offline.json @@ -21,6 +21,27 @@ "*.bat" ] }, + { + "from": "external/ComfyUI", + "to": "ComfyUI", + "filter": [ + "**/*", + "!models/", + "!.vscode/", + "!__pycache__/", + "!test/", + "!db/", + "!cache/", + "!static/", + "!temp/", + "!dist/", + "*.bat" + ] + }, + { + "from": "external/workflows", + "to": "workflows" + }, { "from": "external/service/models/stable_diffusion/ESRGAN", "to": "service/models/stable_diffusion/ESRGAN" diff --git a/WebUI/build/scripts/fetch-python-package-resources.js b/WebUI/build/scripts/fetch-python-package-resources.js index e7847a41..7c45eca9 100644 --- a/WebUI/build/scripts/fetch-python-package-resources.js +++ b/WebUI/build/scripts/fetch-python-package-resources.js @@ -2,6 +2,7 @@ const https = require('https'); const fs = require('fs'); const path = require('path'); +const childProcess = require('child_process'); const argv = require('minimist')(process.argv.slice(2)); @@ -78,12 +79,35 @@ function copyLibuvDllsIfNotPresent() { } } +function fetchComfyUIIfNotPresent() { + const comfyUICloneDir = path.join(targetDir, 'ComfyUI') + if (fs.existsSync(comfyUICloneDir)) { + console.log(`omitting fetching of comfyUI as ${comfyUICloneDir} already exists`) + } else { + gitClone("https://github.com/comfyanonymous/ComfyUI.git", comfyUICloneDir) + gitClone("https://github.com/city96/ComfyUI-GGUF.git", path.join(comfyUICloneDir, 'custom_nodes', 'ComfyUI-GGUF')) + } +} + + +function gitClone(repoURL, targetDir) { + const gitClone = childProcess.spawnSync("git", ["clone", repoURL, targetDir]); + console.log(gitClone.stdout.toString()); + console.error(gitClone.stderr.toString()); + if (gitClone.status!== 0) { + console.error('Failed to clone repo: ', repoURL); + process.exit(1); + } + console.log('Successfully fetched: ', repoURL); +} + function main() { prepareTargetPath() fetchFileIfNotPresent(embeddablePythonUrl) fetchFileIfNotPresent(getPipScriptUrl) fetchFileIfNotPresent(sevenZrExeUrl) + fetchComfyUIIfNotPresent() copyLibuvDllsIfNotPresent() } diff --git a/WebUI/build/scripts/install-full-python-env.js b/WebUI/build/scripts/install-full-python-env.js index 8a97028a..79154d85 100644 --- a/WebUI/build/scripts/install-full-python-env.js +++ b/WebUI/build/scripts/install-full-python-env.js @@ -8,13 +8,15 @@ const childProcess = require('child_process'); const argv = require('minimist')(process.argv.slice(2)); const envDirArg = argv.env_dir const platformArg = argv.platform +const comfyUIDIrArg = argv.comfy_ui_dir -if (!envDirArg || !platformArg) { +if (!envDirArg || !platformArg || !comfyUIDIrArg) { console.error('Usage: node install-full-python-env.js --env_dir=$DIR ---platform=arc|ultra|ultra2\n'); process.exit(1); } const envDir = existingFileOrExit(path.resolve(envDirArg)); +const comfyUIDIr = existingFileOrExit(path.resolve(comfyUIDIrArg)); const platform = platformArg; function existingFileOrExit(filePath) { @@ -29,7 +31,7 @@ function existingFileOrExit(filePath) { function installPip(pythonExe, getPipFilePath) { const runGetPip = childProcess.spawnSync(pythonExe, [getPipFilePath]); console.log(runGetPip.stdout.toString()); - //console.error(runGetPip.stderr.toString()); + console.error(runGetPip.stderr.toString()); if (runGetPip.status!== 0) { console.error('Failed to install requirements'); process.exit(1); @@ -56,7 +58,7 @@ function copyToTargetDir(sourceDir, targetDir) { function prepareTargetDir(targetDir) { if (fs.existsSync(targetDir)) { - console.log("clearing previous env dir ${targetDir}") + console.log(`clearing previous env dir ${targetDir}`) fs.rmSync(targetDir, { recursive: true }); } } @@ -72,10 +74,15 @@ function main() { const platformSpecificRequirementsTxt = existingFileOrExit(path.join(__dirname, '..', '..','..', 'service', `requirements-${platform}.txt`)); const requirementsTxt = existingFileOrExit(path.join(__dirname, '..', '..', '..', 'service', `requirements.txt`)); + const comfyUiRequirementsTxt = existingFileOrExit(path.join(comfyUIDIr, `requirements.txt`)); + const ggufCostomNoderequirementsTxt = existingFileOrExit(path.join(comfyUIDIr, 'custom_nodes', 'ComfyUI-GGUF', `requirements.txt`)); + installPip(pythonExe, getPipFile) runPipInstall(pythonExe, platformSpecificRequirementsTxt) runPipInstall(pythonExe, requirementsTxt) + runPipInstall(pythonExe, comfyUiRequirementsTxt) + runPipInstall(pythonExe, ggufCostomNoderequirementsTxt) } main(); diff --git a/WebUI/build/scripts/prepare-python-env.js b/WebUI/build/scripts/prepare-python-env.js index 517e5509..1f14f8f4 100644 --- a/WebUI/build/scripts/prepare-python-env.js +++ b/WebUI/build/scripts/prepare-python-env.js @@ -71,6 +71,7 @@ function createPythonEnvFromEmbedabblePythonZip(targetDir, pythonEmbedZipFile) { python311.zip . ../service +../ComfyUI # Uncomment to run site.main() automatically import site diff --git a/WebUI/build/scripts/provide-electron-build-resources.js b/WebUI/build/scripts/provide-electron-build-resources.js index 268ef2bb..4e46f74b 100644 --- a/WebUI/build/scripts/provide-electron-build-resources.js +++ b/WebUI/build/scripts/provide-electron-build-resources.js @@ -48,14 +48,20 @@ function zipPythonEnv(sevenZipExe, pythonEnvDir, targetPath) { console.log('Offline env has been compressed to:', targetPath); } -function copyResources(targetDir, ...files) { +function copyFiles(targetDir, ...files) { for (const file of files) { - console.log(file) fs.copyFileSync(file, path.join(targetDir, path.basename(file))); console.log('Copied:', file, 'to:', path.join(targetDir, path.basename(file))); } } +function copyDirectories(targetDir, ...dirs) { + for (const dir of dirs) { + fs.cpSync(dir, path.join(targetDir, path.basename(dir)), { recursive: true }); + console.log('Copied:', dir, 'to:', path.join(targetDir, path.basename(dir))); + } +} + function clearPreviousZip(zipFilePath) { if (fs.existsSync(zipFilePath)) { @@ -66,14 +72,19 @@ function clearPreviousZip(zipFilePath) { function main() { const sevenZipExe = path.join(buildResourcesDir, '7zr.exe'); + const comfyUI = path.join(buildResourcesDir, 'ComfyUI'); clearPreviousZip(path.join(targetDir, `env.7z`)); zipPythonEnv(sevenZipExe, pythenEnvDir, path.join(targetDir, `env.7z`)); symlinkBackendDir(backendDir, path.join(targetDir, 'service')) - copyResources(targetDir, + copyFiles(targetDir, sevenZipExe ) + + copyDirectories(targetDir, + comfyUI + ) } main() diff --git a/WebUI/package.json b/WebUI/package.json index 5c8477ab..cac1a0ac 100644 --- a/WebUI/package.json +++ b/WebUI/package.json @@ -9,7 +9,7 @@ "fetch-build-resources": "cross-env node ./build/scripts/fetch-python-package-resources.js --target_dir=../build_resources", "prepare-python-env": "cross-env node ./build/scripts/prepare-python-env.js --build_resources_dir=../build_resources --target_dir=../build-envs/online/env", - "install-full-python-env": "cross-env node ./build/scripts/install-full-python-env.js --env_dir=../build-envs/online/env", + "install-full-python-env": "cross-env node ./build/scripts/install-full-python-env.js --env_dir=../build-envs/online/env --comfy_ui_dir=../build_resources/ComfyUI", "provide-electron-build-resources": "cross-env node build/scripts/provide-electron-build-resources.js --build_resources_dir=../build_resources --backend_dir=../service --target_dir=./external", "prepare-build:arc": "cross-env npm run prepare-python-env && npm run provide-electron-build-resources -- --python_env_dir=../build-envs/online/env && node ./build/scripts/render-template.js --online_installer --platform=arc", diff --git a/service/requirements-arc.txt b/service/requirements-arc.txt index 6b933d39..3a54b1c7 100644 --- a/service/requirements-arc.txt +++ b/service/requirements-arc.txt @@ -6,7 +6,6 @@ torch==2.3.1.post0+cxx11.abi torchvision==0.18.1.post0+cxx11.abi torchaudio==2.3.1.post0+cxx11.abi intel-extension-for-pytorch==2.3.110.post0+xpu -mkl-dpcpp==2024.2.1 # IPEX-LLM --pre diff --git a/service/web_request_bodies.py b/service/web_request_bodies.py deleted file mode 100644 index f5e38cbf..00000000 --- a/service/web_request_bodies.py +++ /dev/null @@ -1,4 +0,0 @@ -class DownloadList: - type: int - repo_id: str - invalidKey: str \ No newline at end of file