mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-21 21:22:11 -03:00
Compare commits
20 Commits
feature/la
...
v1.0.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2c810306fb | ||
|
|
dd94c6b31a | ||
|
|
1a0edec712 | ||
|
|
7ba9b998d3 | ||
|
|
8c5d5a8ca0 | ||
|
|
672e4cff90 | ||
|
|
c2716e3c39 | ||
|
|
b72cf7ba98 | ||
|
|
bde11b153f | ||
|
|
8b924b1551 | ||
|
|
ce08935b1e | ||
|
|
24fcbeaf76 | ||
|
|
c9e5ea42cb | ||
|
|
b005961ee5 | ||
|
|
ce03bbbc4e | ||
|
|
78b55d10ba | ||
|
|
77a2215e62 | ||
|
|
31901f1f0e | ||
|
|
12a789ef96 | ||
|
|
d50bbe71c2 |
31
.github/workflows/update-supporters.yml
vendored
Normal file
31
.github/workflows/update-supporters.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Update Supporters in README
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'data/supporters.json'
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch: # Allow manual trigger
|
||||
|
||||
jobs:
|
||||
update-readme:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Update README
|
||||
run: python scripts/update_supporters.py
|
||||
|
||||
- name: Commit and push changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "docs: auto-update supporters list in README"
|
||||
file_pattern: "README.md"
|
||||
627
data/supporters.json
Normal file
627
data/supporters.json
Normal file
@@ -0,0 +1,627 @@
|
||||
{
|
||||
"specialThanks": [
|
||||
"dispenser",
|
||||
"EbonEagle",
|
||||
"DanielMagPizza",
|
||||
"Scott R"
|
||||
],
|
||||
"allSupporters": [
|
||||
"Insomnia Art Designs",
|
||||
"megakirbs",
|
||||
"Brennok",
|
||||
"wackop",
|
||||
"2018cfh",
|
||||
"Takkan",
|
||||
"stone9k",
|
||||
"$MetaSamsara",
|
||||
"itismyelement",
|
||||
"onesecondinosaur",
|
||||
"Carl G.",
|
||||
"Rosenthal",
|
||||
"Francisco Tatis",
|
||||
"Tobi_Swagg",
|
||||
"Andrew Wilson",
|
||||
"Greybush",
|
||||
"Gooohokrbe",
|
||||
"Ricky Carter",
|
||||
"JongWon Han",
|
||||
"OldBones",
|
||||
"VantAI",
|
||||
"runte3221",
|
||||
"FreelancerZ",
|
||||
"Julian V",
|
||||
"Edgar Tejeda",
|
||||
"Birdy",
|
||||
"Liam MacDougal",
|
||||
"Fraser Cross",
|
||||
"Polymorphic Indeterminate",
|
||||
"Marc Whiffen",
|
||||
"Kiba",
|
||||
"Jorge Hussni",
|
||||
"Reno Lam",
|
||||
"Skalabananen",
|
||||
"esthe",
|
||||
"sig",
|
||||
"Christian Byrne",
|
||||
"DM",
|
||||
"Sen314",
|
||||
"Estragon",
|
||||
"J\\B/ 8r0wns0n",
|
||||
"Snaggwort",
|
||||
"Arlecchino Shion",
|
||||
"ClockDaemon",
|
||||
"KD",
|
||||
"Omnidex",
|
||||
"Tyler Trebuchon",
|
||||
"Release Cabrakan",
|
||||
"confiscated Zyra",
|
||||
"SG",
|
||||
"carozzz",
|
||||
"James Dooley",
|
||||
"zenbound",
|
||||
"Buzzard",
|
||||
"jmack",
|
||||
"Adam Shaw",
|
||||
"Tee Gee",
|
||||
"Mark Corneglio",
|
||||
"SarcasticHashtag",
|
||||
"Anthony Rizzo",
|
||||
"tarek helmi",
|
||||
"Cosmosis",
|
||||
"iamresist",
|
||||
"RedrockVP",
|
||||
"Wolffen",
|
||||
"FloPro4Sho",
|
||||
"James Todd",
|
||||
"Steven Pfeiffer",
|
||||
"Tim",
|
||||
"Timmy",
|
||||
"Johnny",
|
||||
"Lisster",
|
||||
"Michael Wong",
|
||||
"Illrigger",
|
||||
"whudunit",
|
||||
"Tom Corrigan",
|
||||
"JackieWang",
|
||||
"fnkylove",
|
||||
"Steven Owens",
|
||||
"Yushio",
|
||||
"Vik71it",
|
||||
"lh qwe",
|
||||
"Echo",
|
||||
"Lilleman",
|
||||
"Robert Stacey",
|
||||
"PM",
|
||||
"Todd Keck",
|
||||
"Briton Heilbrun",
|
||||
"Mozzel",
|
||||
"Gingko Biloba",
|
||||
"Felipe dos Santos",
|
||||
"Penfore",
|
||||
"BadassArabianMofo",
|
||||
"Sterilized",
|
||||
"Pascal Dahle",
|
||||
"Markus",
|
||||
"quarz",
|
||||
"Greg",
|
||||
"Douglas Gaspar",
|
||||
"JSST",
|
||||
"AlexDuKaNa",
|
||||
"George",
|
||||
"lmsupporter",
|
||||
"Phil",
|
||||
"Charles Blakemore",
|
||||
"IamAyam",
|
||||
"wfpearl",
|
||||
"Rob Williams",
|
||||
"Baekdoosixt",
|
||||
"Jonathan Ross",
|
||||
"Jack B Nimble",
|
||||
"Nazono_hito",
|
||||
"Melville Parrish",
|
||||
"daniel dove",
|
||||
"Lustre",
|
||||
"JW Sin",
|
||||
"contrite831",
|
||||
"Alex",
|
||||
"bh",
|
||||
"Marlon Daniels",
|
||||
"Starkselle",
|
||||
"Aaron Bleuer",
|
||||
"LacesOut!",
|
||||
"Graham Colehour",
|
||||
"M Postkasse",
|
||||
"Tomohiro Baba",
|
||||
"David Ortega",
|
||||
"ASLPro3D",
|
||||
"Jacob Hoehler",
|
||||
"FinalyFree",
|
||||
"Weasyl",
|
||||
"Lex Song",
|
||||
"Cory Paza",
|
||||
"Tak",
|
||||
"Gonzalo Andre Allendes Lopez",
|
||||
"Zach Gonser",
|
||||
"Big Red",
|
||||
"Jimmy Ledbetter",
|
||||
"Luc Job",
|
||||
"dl0901dm",
|
||||
"Philip Hempel",
|
||||
"corde",
|
||||
"Nick Walker",
|
||||
"Bishoujoker",
|
||||
"conner",
|
||||
"aai",
|
||||
"Yaboi",
|
||||
"Tori",
|
||||
"wildnut",
|
||||
"Princess Bright Eyes",
|
||||
"Damon Cunliffe",
|
||||
"CryptoTraderJK",
|
||||
"Davaitamin",
|
||||
"AbstractAss",
|
||||
"ViperC",
|
||||
"Aleksander Wujczyk",
|
||||
"AM Kuro",
|
||||
"jean jahren",
|
||||
"Ran C",
|
||||
"tedcor",
|
||||
"S Sang",
|
||||
"MagnaInsomnia",
|
||||
"Akira_HentAI",
|
||||
"Karl P.",
|
||||
"Gordon Cole",
|
||||
"yuxz69",
|
||||
"MadSpin",
|
||||
"andrew.tappan",
|
||||
"dw",
|
||||
"N/A",
|
||||
"The Spawn",
|
||||
"graysock",
|
||||
"Greenmoustache",
|
||||
"zounic",
|
||||
"Gamalonia",
|
||||
"fancypants",
|
||||
"Vir",
|
||||
"Joboshy",
|
||||
"Digital",
|
||||
"JaxMax",
|
||||
"takyamtom",
|
||||
"Bohemian Corporal",
|
||||
"奚明 刘",
|
||||
"Dan",
|
||||
"Seth Christensen",
|
||||
"Jwk0205",
|
||||
"Bro Xie",
|
||||
"Draven T",
|
||||
"yer fey",
|
||||
"batblue",
|
||||
"carey6409",
|
||||
"Olive",
|
||||
"太郎 ゲーム",
|
||||
"Some Guy Named Barry",
|
||||
"jinxedx",
|
||||
"Aquatic Coffee",
|
||||
"Max Marklund",
|
||||
"AELOX",
|
||||
"Dankin",
|
||||
"Nicfit23",
|
||||
"Noora",
|
||||
"ethanfel",
|
||||
"wamekukyouzin",
|
||||
"drum matthieu",
|
||||
"Dogmaster",
|
||||
"Matt Wenzel",
|
||||
"Mattssn",
|
||||
"Frank Nitty",
|
||||
"John Saveas",
|
||||
"Focuschannel",
|
||||
"Christopher Michel",
|
||||
"Serge Bekenkamp",
|
||||
"LeoZero",
|
||||
"Antonio Pontes",
|
||||
"ApathyJones",
|
||||
"nahinahi9",
|
||||
"Anthony Faxlandez",
|
||||
"Dustin Chen",
|
||||
"dan",
|
||||
"Blackfish95",
|
||||
"Mouthlessman",
|
||||
"Steam Steam",
|
||||
"Paul Kroll",
|
||||
"otaku fra",
|
||||
"semicolon drainpipe",
|
||||
"Thesharingbrother",
|
||||
"Fotek Design",
|
||||
"Bas Imagineer",
|
||||
"Pat Hen",
|
||||
"ResidentDeviant",
|
||||
"Adam Taylor",
|
||||
"JC",
|
||||
"Weird_With_A_Beard",
|
||||
"Prompt Pirate",
|
||||
"Pozadine1",
|
||||
"uwutismxd",
|
||||
"Qarob",
|
||||
"AIGooner",
|
||||
"inbijiburu",
|
||||
"decoy",
|
||||
"Luc",
|
||||
"ProtonPrince",
|
||||
"DiffDuck",
|
||||
"elu3199",
|
||||
"Nick “Loadstone” D",
|
||||
"Hasturkun",
|
||||
"Jon Sandman",
|
||||
"Ubivis",
|
||||
"CloudValley",
|
||||
"thesoftwaredruid",
|
||||
"wundershark",
|
||||
"mr_dinosaur",
|
||||
"Tyrswood",
|
||||
"linnfrey",
|
||||
"zenobeus",
|
||||
"Jackthemind",
|
||||
"Stryker",
|
||||
"Pkrsky",
|
||||
"raf8osz",
|
||||
"blikkies",
|
||||
"Josef Lanzl",
|
||||
"Griffin Dahlberg",
|
||||
"준희 김",
|
||||
"Error_Rule34_Not_found",
|
||||
"Gerald Welly",
|
||||
"Shock Shockor",
|
||||
"Roslynd",
|
||||
"Geolog",
|
||||
"Goldwaters",
|
||||
"Neco28",
|
||||
"Zude",
|
||||
"Cristian Vazquez",
|
||||
"Kyler",
|
||||
"Magic Noob",
|
||||
"aRtFuL_DodGeR",
|
||||
"X",
|
||||
"DougPeterson",
|
||||
"Jeff",
|
||||
"Bruce",
|
||||
"CrimsonDX",
|
||||
"Kevin John Duck",
|
||||
"Kevin Christopher",
|
||||
"Ouro Boros",
|
||||
"DarkSunset",
|
||||
"dd",
|
||||
"Billy Gladky",
|
||||
"Probis",
|
||||
"shrshpp",
|
||||
"Dušan Ryban",
|
||||
"ItsGeneralButtNaked",
|
||||
"sjon kreutz",
|
||||
"Nimess",
|
||||
"John Statham",
|
||||
"Youguang",
|
||||
"Nihongasuki",
|
||||
"Metryman55",
|
||||
"andrewzpong",
|
||||
"FrxzenSnxw",
|
||||
"BossGame",
|
||||
"Ray Wing",
|
||||
"Ranzitho",
|
||||
"Gus",
|
||||
"地獄の禄",
|
||||
"MJG",
|
||||
"David LaVallee",
|
||||
"ae",
|
||||
"Tr4shP4nda",
|
||||
"WRL_SPR",
|
||||
"capn",
|
||||
"Joseph",
|
||||
"lrdchs",
|
||||
"Mirko Katzula",
|
||||
"dan",
|
||||
"Piccio08",
|
||||
"kumakichi",
|
||||
"cppbel",
|
||||
"starbugx",
|
||||
"Moon Knight",
|
||||
"몽타주",
|
||||
"Kland",
|
||||
"Hailshem",
|
||||
"ryoma",
|
||||
"John Martin",
|
||||
"Chris",
|
||||
"Brian M",
|
||||
"Nerezza",
|
||||
"sanborondon",
|
||||
"moranqianlong",
|
||||
"Taylor Funk",
|
||||
"aezin",
|
||||
"Thought2Form",
|
||||
"jcay015",
|
||||
"Kevin Picco",
|
||||
"Erik Lopez",
|
||||
"Mateo Curić",
|
||||
"Haru Yotu",
|
||||
"Eris3D",
|
||||
"m",
|
||||
"Pierce McBride",
|
||||
"Joshua Gray",
|
||||
"Mikko Hemilä",
|
||||
"Matura Arbeit",
|
||||
"Jamie Ogletree",
|
||||
"TBitz33",
|
||||
"Emil Bernhoff",
|
||||
"a _",
|
||||
"SendingRavens",
|
||||
"James Coleman",
|
||||
"Martial",
|
||||
"battu",
|
||||
"Emil Andersson",
|
||||
"Chad Idk",
|
||||
"Michael Docherty",
|
||||
"Yuji Kaneko",
|
||||
"elitassj",
|
||||
"Jacob Winter",
|
||||
"Jordan Shaw",
|
||||
"Sam",
|
||||
"Rops Alot",
|
||||
"SRDB",
|
||||
"g unit",
|
||||
"Ace Ventura",
|
||||
"David",
|
||||
"Meilo",
|
||||
"Pen Bouryoung",
|
||||
"shinonomeiro",
|
||||
"Snille",
|
||||
"MaartenAlbers",
|
||||
"khanh duy",
|
||||
"xybrightsummer",
|
||||
"jreedatchison",
|
||||
"PhilW",
|
||||
"momokai",
|
||||
"Janik",
|
||||
"kudari",
|
||||
"Naomi Hale Danchi",
|
||||
"dc7431",
|
||||
"ken",
|
||||
"Inversity",
|
||||
"Crocket",
|
||||
"AIVORY3D",
|
||||
"epicgamer0020690",
|
||||
"Joshua Porrata",
|
||||
"Cruel",
|
||||
"keemun",
|
||||
"SuBu",
|
||||
"RedPIXel",
|
||||
"MRBlack",
|
||||
"Kevinj",
|
||||
"Wind",
|
||||
"Nexus",
|
||||
"Mitchell Robson",
|
||||
"Ramneek“Guy”Ashok",
|
||||
"squid_actually",
|
||||
"Nat_20",
|
||||
"Kiyoe",
|
||||
"Edward Weeks",
|
||||
"kyoumei",
|
||||
"RadStorm04",
|
||||
"JohnDoe42054",
|
||||
"BillyHill",
|
||||
"humptynutz",
|
||||
"emyth",
|
||||
"michael.isaza",
|
||||
"Kalnei",
|
||||
"chriphost",
|
||||
"KitKatM",
|
||||
"socrasteeze",
|
||||
"ResidentDeviant",
|
||||
"Scott",
|
||||
"gzmzmvp",
|
||||
"Welkor",
|
||||
"hayden",
|
||||
"Richard",
|
||||
"ahoystan",
|
||||
"Leland Saunders",
|
||||
"Andrew",
|
||||
"Bob Barker",
|
||||
"Robert Wegemund",
|
||||
"Littlehuggy",
|
||||
"Gregory Kozhemiak",
|
||||
"mrjuan",
|
||||
"Aeternyx",
|
||||
"Brian Buie",
|
||||
"YOU SINWOO",
|
||||
"Sadlip",
|
||||
"ja s",
|
||||
"Eric Whitney",
|
||||
"Doug Mason",
|
||||
"Joey Callahan",
|
||||
"Ivan Tadic",
|
||||
"y2Rxy7FdXzWo",
|
||||
"Jeremy Townsend",
|
||||
"Mike Simone",
|
||||
"Sean voets",
|
||||
"Owen Gwosdz",
|
||||
"Morgandel",
|
||||
"Thomas Wanner",
|
||||
"Kyron Mahan",
|
||||
"Theerat Jiramate",
|
||||
"Noah",
|
||||
"Jacob McDaniel",
|
||||
"kevin stoddard",
|
||||
"Sloan Steddy",
|
||||
"Jack Dole",
|
||||
"Ezokewn",
|
||||
"Temikus",
|
||||
"Artokun",
|
||||
"Michael Taylor",
|
||||
"Derek Baker",
|
||||
"Michael Anthony Scott",
|
||||
"Atilla Berke Pekduyar",
|
||||
"Maso",
|
||||
"Nathan",
|
||||
"Decx _",
|
||||
"Kevin Wallace",
|
||||
"Matheus Couto",
|
||||
"Paul Hartsuyker",
|
||||
"ChicRic",
|
||||
"mercur",
|
||||
"J C",
|
||||
"Distortik",
|
||||
"Yves Poezevara",
|
||||
"Teriak47",
|
||||
"Just me",
|
||||
"Raf Stahelin",
|
||||
"Вячеслав Маринин",
|
||||
"Cola Matthew",
|
||||
"OniNoKen",
|
||||
"Iain Wisely",
|
||||
"Zertens",
|
||||
"NOHOW",
|
||||
"Apo",
|
||||
"nekotxt",
|
||||
"choowkee",
|
||||
"Clusters",
|
||||
"ibrahim",
|
||||
"Highlandrise",
|
||||
"philcoraz",
|
||||
"mztn",
|
||||
"ImagineerNL",
|
||||
"MrAcrtosSursus",
|
||||
"al300680",
|
||||
"pixl",
|
||||
"Robin",
|
||||
"chahknoir",
|
||||
"Marcus thronico",
|
||||
"nd",
|
||||
"keno94d",
|
||||
"James Melzer",
|
||||
"Bartleby",
|
||||
"Renvertere",
|
||||
"Rahuy",
|
||||
"Hermann003",
|
||||
"D",
|
||||
"Foolish",
|
||||
"RevyHiep",
|
||||
"Captain_Swag",
|
||||
"obkircher",
|
||||
"Tree Tagger",
|
||||
"gwyar",
|
||||
"D",
|
||||
"edgecase",
|
||||
"Neoxena",
|
||||
"mrmhalo",
|
||||
"dg",
|
||||
"Whitepinetrader",
|
||||
"Maarten Harms",
|
||||
"OrganicArtifact",
|
||||
"四糸凜音",
|
||||
"MudkipMedkitz",
|
||||
"Israel",
|
||||
"deanbrian",
|
||||
"POPPIN",
|
||||
"Muratoraccio",
|
||||
"SelfishMedic",
|
||||
"Ginnie",
|
||||
"Alex Wortman",
|
||||
"Cody",
|
||||
"adderleighn",
|
||||
"Raku",
|
||||
"smart.edge5178",
|
||||
"emadsultan",
|
||||
"InformedViewz",
|
||||
"CHKeeho80",
|
||||
"Bubbafett",
|
||||
"leaf",
|
||||
"Menard",
|
||||
"Skyfire83",
|
||||
"Adam Rinehart",
|
||||
"D",
|
||||
"Pitpe11",
|
||||
"TheD1rtyD03",
|
||||
"EnragedAntelope",
|
||||
"moonpetal",
|
||||
"SomeDude",
|
||||
"g9p0o",
|
||||
"nanana",
|
||||
"TheHolySheep",
|
||||
"Monte Won",
|
||||
"SpringBootisTrash",
|
||||
"carsten",
|
||||
"ikok",
|
||||
"Buecyb99",
|
||||
"4IXplr0r3r",
|
||||
"Coeur+de+cochon",
|
||||
"David Schenck",
|
||||
"han b",
|
||||
"Nico",
|
||||
"Wolfe7D1",
|
||||
"Banana Joe",
|
||||
"_ G3n",
|
||||
"Donovan Jenkins",
|
||||
"Ink Temptation",
|
||||
"edk",
|
||||
"Michael Eid",
|
||||
"beersandbacon",
|
||||
"Maximilian Pyko",
|
||||
"Invis",
|
||||
"Kalli Core",
|
||||
"Justin Houston",
|
||||
"james",
|
||||
"elleshar666",
|
||||
"OrochiNights",
|
||||
"Michael Zhu",
|
||||
"ACTUALLY_the_Real_Willem_Dafoe",
|
||||
"gonzalo",
|
||||
"Seraphy",
|
||||
"雨の心 落",
|
||||
"AllTimeNoobie",
|
||||
"jumpd",
|
||||
"John C",
|
||||
"Kauffy",
|
||||
"Rim",
|
||||
"Dismem",
|
||||
"EpicElric",
|
||||
"John J Linehan",
|
||||
"Xan Dionysus",
|
||||
"Nathan lee",
|
||||
"Mewtora",
|
||||
"Elliot E",
|
||||
"Middo",
|
||||
"Forbidden Atelier",
|
||||
"Edward Kennedy",
|
||||
"Justin Blaylock",
|
||||
"Adictedtohumping",
|
||||
"Devil Lude",
|
||||
"Nick Kage",
|
||||
"Towelie",
|
||||
"Vane Holzer",
|
||||
"psytrax",
|
||||
"Cyrus Fett",
|
||||
"Jean-françois SEMA",
|
||||
"Kurt",
|
||||
"hexxish",
|
||||
"giani kidd",
|
||||
"CptNeo",
|
||||
"notedfakes",
|
||||
"Chase Kwon",
|
||||
"Goober719",
|
||||
"Eric Ketchum",
|
||||
"Chad Barnes",
|
||||
"NICHOLAS BAXLEY",
|
||||
"Michael Scott",
|
||||
"James Ming",
|
||||
"vanditking",
|
||||
"kripitonga",
|
||||
"Rizzi",
|
||||
"nimin",
|
||||
"OMAR LUCIANO",
|
||||
"Jo+Example",
|
||||
"BrentBertram",
|
||||
"eumelzocker",
|
||||
"dxjaymz",
|
||||
"L C",
|
||||
"Dude"
|
||||
],
|
||||
"totalCount": 620
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "Abbrechen",
|
||||
"confirm": "Bestätigen",
|
||||
"actions": {
|
||||
"save": "Speichern",
|
||||
"cancel": "Abbrechen",
|
||||
"confirm": "Bestätigen",
|
||||
"delete": "Löschen",
|
||||
"move": "Verschieben",
|
||||
"refresh": "Aktualisieren",
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "Legen Sie den Standard-Embedding-Stammordner für Downloads, Importe und Verschiebungen fest",
|
||||
"noDefault": "Kein Standard"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "Zusätzliche Ordnerpfade",
|
||||
"help": "Fügen Sie zusätzliche Modellordner außerhalb der Standardpfade von ComfyUI hinzu. Diese Pfade werden separat gespeichert und zusammen mit den Standardordnern gescannt.",
|
||||
"description": "Konfigurieren Sie zusätzliche Ordner zum Scannen von Modellen. Diese Pfade sind spezifisch für LoRA Manager und werden mit den Standardpfaden von ComfyUI zusammengeführt.",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA-Pfade",
|
||||
"checkpoint": "Checkpoint-Pfade",
|
||||
"unet": "Diffusionsmodell-Pfade",
|
||||
"embedding": "Embedding-Pfade"
|
||||
},
|
||||
"pathPlaceholder": "/pfad/zu/extra/modellen",
|
||||
"saveSuccess": "Zusätzliche Ordnerpfade aktualisiert.",
|
||||
"saveError": "Fehler beim Aktualisieren der zusätzlichen Ordnerpfade: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "Dieser Pfad ist bereits konfiguriert"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "Prioritäts-Tags",
|
||||
"description": "Passen Sie die Tag-Prioritätsreihenfolge für jeden Modelltyp an (z. B. character, concept, style(toon|toon_style))",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "Passwort (optional)",
|
||||
"proxyPasswordPlaceholder": "passwort",
|
||||
"proxyPasswordHelp": "Passwort für die Proxy-Authentifizierung (falls erforderlich)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "Zusätzliche Ordnerpfade",
|
||||
"help": "Fügen Sie zusätzliche Modellordner außerhalb der Standardpfade von ComfyUI hinzu. Diese Pfade werden separat gespeichert und zusammen mit den Standardordnern gescannt.",
|
||||
"description": "Konfigurieren Sie zusätzliche Ordner zum Scannen von Modellen. Diese Pfade sind spezifisch für LoRA Manager und werden mit den Standardpfaden von ComfyUI zusammengeführt.",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA-Pfade",
|
||||
"checkpoint": "Checkpoint-Pfade",
|
||||
"unet": "Diffusionsmodell-Pfade",
|
||||
"embedding": "Embedding-Pfade"
|
||||
},
|
||||
"pathPlaceholder": "/pfad/zu/extra/modellen",
|
||||
"saveSuccess": "Zusätzliche Ordnerpfade aktualisiert.",
|
||||
"saveError": "Fehler beim Aktualisieren der zusätzlichen Ordnerpfade: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "Dieser Pfad ist bereits konfiguriert"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "Im Listenmodus nicht verfügbar",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "Zielpfad für das Verschieben konnte nicht ermittelt werden.",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "Verschieben wird für dieses Element nicht unterstützt.",
|
||||
"createFolderHint": "Loslassen, um einen neuen Ordner zu erstellen",
|
||||
"newFolderName": "Neuer Ordnername",
|
||||
"folderNameHint": "Eingabetaste zum Bestätigen, Escape zum Abbrechen",
|
||||
"emptyFolderName": "Bitte geben Sie einen Ordnernamen ein",
|
||||
"invalidFolderName": "Ordnername enthält ungültige Zeichen",
|
||||
"noDragState": "Kein ausstehender Ziehvorgang gefunden"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "Keine Ordner gefunden",
|
||||
"dragHint": "Elemente hierher ziehen, um Ordner zu erstellen"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "WeChat QR-Code anzeigen",
|
||||
"hideWechatQR": "WeChat QR-Code ausblenden"
|
||||
},
|
||||
"footer": "Vielen Dank, dass Sie LoRA Manager verwenden! ❤️"
|
||||
"footer": "Vielen Dank, dass Sie LoRA Manager verwenden! ❤️",
|
||||
"supporters": {
|
||||
"title": "Danke an alle Unterstützer",
|
||||
"subtitle": "Danke an {count} Unterstützer, die dieses Projekt möglich gemacht haben",
|
||||
"specialThanks": "Besonderer Dank",
|
||||
"allSupporters": "Alle Unterstützer",
|
||||
"totalCount": "{count} Unterstützer insgesamt"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "Wiederholen"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "Cancel",
|
||||
"confirm": "Confirm",
|
||||
"actions": {
|
||||
"save": "Save",
|
||||
"cancel": "Cancel",
|
||||
"confirm": "Confirm",
|
||||
"delete": "Delete",
|
||||
"move": "Move",
|
||||
"refresh": "Refresh",
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "Not available in list view",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "Unable to determine destination path for move.",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "Move is not supported for this item.",
|
||||
"createFolderHint": "Release to create new folder",
|
||||
"newFolderName": "New folder name",
|
||||
"folderNameHint": "Press Enter to confirm, Escape to cancel",
|
||||
"emptyFolderName": "Please enter a folder name",
|
||||
"invalidFolderName": "Folder name contains invalid characters",
|
||||
"noDragState": "No pending drag operation found"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "No folders found",
|
||||
"dragHint": "Drag items here to create folders"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "Show WeChat QR Code",
|
||||
"hideWechatQR": "Hide WeChat QR Code"
|
||||
},
|
||||
"footer": "Thank you for using LoRA Manager! ❤️"
|
||||
"footer": "Thank you for using LoRA Manager! ❤️",
|
||||
"supporters": {
|
||||
"title": "Thank You To Our Supporters",
|
||||
"subtitle": "Thanks to {count} supporters who made this project possible",
|
||||
"specialThanks": "Special Thanks",
|
||||
"allSupporters": "All Supporters",
|
||||
"totalCount": "{count} supporters in total"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "Cancelar",
|
||||
"confirm": "Confirmar",
|
||||
"actions": {
|
||||
"save": "Guardar",
|
||||
"cancel": "Cancelar",
|
||||
"confirm": "Confirmar",
|
||||
"delete": "Eliminar",
|
||||
"move": "Mover",
|
||||
"refresh": "Actualizar",
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "Establecer el directorio raíz predeterminado de embedding para descargas, importaciones y movimientos",
|
||||
"noDefault": "Sin predeterminado"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "Rutas de carpetas adicionales",
|
||||
"help": "Agregue carpetas de modelos adicionales fuera de las rutas estándar de ComfyUI. Estas rutas se almacenan por separado y se escanean junto con las carpetas predeterminadas.",
|
||||
"description": "Configure carpetas adicionales para escanear modelos. Estas rutas son específicas de LoRA Manager y se fusionarán con las rutas predeterminadas de ComfyUI.",
|
||||
"modelTypes": {
|
||||
"lora": "Rutas de LoRA",
|
||||
"checkpoint": "Rutas de Checkpoint",
|
||||
"unet": "Rutas de modelo de difusión",
|
||||
"embedding": "Rutas de Embedding"
|
||||
},
|
||||
"pathPlaceholder": "/ruta/a/modelos/extra",
|
||||
"saveSuccess": "Rutas de carpetas adicionales actualizadas.",
|
||||
"saveError": "Error al actualizar las rutas de carpetas adicionales: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "Esta ruta ya está configurada"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "Etiquetas prioritarias",
|
||||
"description": "Personaliza el orden de prioridad de etiquetas para cada tipo de modelo (p. ej., character, concept, style(toon|toon_style))",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "Contraseña (opcional)",
|
||||
"proxyPasswordPlaceholder": "contraseña",
|
||||
"proxyPasswordHelp": "Contraseña para autenticación de proxy (si es necesario)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "Rutas de carpetas adicionales",
|
||||
"help": "Agregue carpetas de modelos adicionales fuera de las rutas estándar de ComfyUI. Estas rutas se almacenan por separado y se escanean junto con las carpetas predeterminadas.",
|
||||
"description": "Configure carpetas adicionales para escanear modelos. Estas rutas son específicas de LoRA Manager y se fusionarán con las rutas predeterminadas de ComfyUI.",
|
||||
"modelTypes": {
|
||||
"lora": "Rutas de LoRA",
|
||||
"checkpoint": "Rutas de Checkpoint",
|
||||
"unet": "Rutas de modelo de difusión",
|
||||
"embedding": "Rutas de Embedding"
|
||||
},
|
||||
"pathPlaceholder": "/ruta/a/modelos/extra",
|
||||
"saveSuccess": "Rutas de carpetas adicionales actualizadas.",
|
||||
"saveError": "Error al actualizar las rutas de carpetas adicionales: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "Esta ruta ya está configurada"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "No disponible en vista de lista",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "No se puede determinar la ruta de destino para el movimiento.",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "El movimiento no es compatible con este elemento.",
|
||||
"createFolderHint": "Suelta para crear una nueva carpeta",
|
||||
"newFolderName": "Nombre de la nueva carpeta",
|
||||
"folderNameHint": "Presiona Enter para confirmar, Escape para cancelar",
|
||||
"emptyFolderName": "Por favor, introduce un nombre de carpeta",
|
||||
"invalidFolderName": "El nombre de la carpeta contiene caracteres no válidos",
|
||||
"noDragState": "No se encontró ninguna operación de arrastre pendiente"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "No se encontraron carpetas",
|
||||
"dragHint": "Arrastra elementos aquí para crear carpetas"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "Mostrar código QR de WeChat",
|
||||
"hideWechatQR": "Ocultar código QR de WeChat"
|
||||
},
|
||||
"footer": "¡Gracias por usar el gestor de LoRA! ❤️"
|
||||
"footer": "¡Gracias por usar el gestor de LoRA! ❤️",
|
||||
"supporters": {
|
||||
"title": "Gracias a todos los seguidores",
|
||||
"subtitle": "Gracias a {count} seguidores que hicieron este proyecto posible",
|
||||
"specialThanks": "Agradecimientos especiales",
|
||||
"allSupporters": "Todos los seguidores",
|
||||
"totalCount": "{count} seguidores en total"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "Reintentar"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "Annuler",
|
||||
"confirm": "Confirmer",
|
||||
"actions": {
|
||||
"save": "Enregistrer",
|
||||
"cancel": "Annuler",
|
||||
"confirm": "Confirmer",
|
||||
"delete": "Supprimer",
|
||||
"move": "Déplacer",
|
||||
"refresh": "Actualiser",
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "Définir le répertoire racine embedding par défaut pour les téléchargements, imports et déplacements",
|
||||
"noDefault": "Aucun par défaut"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "Chemins de dossiers supplémentaires",
|
||||
"help": "Ajoutez des dossiers de modèles supplémentaires en dehors des chemins standard de ComfyUI. Ces chemins sont stockés séparément et analysés aux côtés des dossiers par défaut.",
|
||||
"description": "Configurez des dossiers supplémentaires pour l'analyse de modèles. Ces chemins sont spécifiques à LoRA Manager et seront fusionnés avec les chemins par défaut de ComfyUI.",
|
||||
"modelTypes": {
|
||||
"lora": "Chemins LoRA",
|
||||
"checkpoint": "Chemins Checkpoint",
|
||||
"unet": "Chemins de modèle de diffusion",
|
||||
"embedding": "Chemins Embedding"
|
||||
},
|
||||
"pathPlaceholder": "/chemin/vers/modèles/supplémentaires",
|
||||
"saveSuccess": "Chemins de dossiers supplémentaires mis à jour.",
|
||||
"saveError": "Échec de la mise à jour des chemins de dossiers supplémentaires: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "Ce chemin est déjà configuré"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "Étiquettes prioritaires",
|
||||
"description": "Personnalisez l'ordre de priorité des étiquettes pour chaque type de modèle (par ex. : character, concept, style(toon|toon_style))",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "Mot de passe (optionnel)",
|
||||
"proxyPasswordPlaceholder": "mot_de_passe",
|
||||
"proxyPasswordHelp": "Mot de passe pour l'authentification proxy (si nécessaire)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "Chemins de dossiers supplémentaires",
|
||||
"help": "Ajoutez des dossiers de modèles supplémentaires en dehors des chemins standard de ComfyUI. Ces chemins sont stockés séparément et analysés aux côtés des dossiers par défaut.",
|
||||
"description": "Configurez des dossiers supplémentaires pour l'analyse de modèles. Ces chemins sont spécifiques à LoRA Manager et seront fusionnés avec les chemins par défaut de ComfyUI.",
|
||||
"modelTypes": {
|
||||
"lora": "Chemins LoRA",
|
||||
"checkpoint": "Chemins Checkpoint",
|
||||
"unet": "Chemins de modèle de diffusion",
|
||||
"embedding": "Chemins Embedding"
|
||||
},
|
||||
"pathPlaceholder": "/chemin/vers/modèles/supplémentaires",
|
||||
"saveSuccess": "Chemins de dossiers supplémentaires mis à jour.",
|
||||
"saveError": "Échec de la mise à jour des chemins de dossiers supplémentaires: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "Ce chemin est déjà configuré"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "Non disponible en vue liste",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "Impossible de déterminer le chemin de destination pour le déplacement.",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "Le déplacement n'est pas pris en charge pour cet élément.",
|
||||
"createFolderHint": "Relâcher pour créer un nouveau dossier",
|
||||
"newFolderName": "Nom du nouveau dossier",
|
||||
"folderNameHint": "Appuyez sur Entrée pour confirmer, Échap pour annuler",
|
||||
"emptyFolderName": "Veuillez saisir un nom de dossier",
|
||||
"invalidFolderName": "Le nom du dossier contient des caractères invalides",
|
||||
"noDragState": "Aucune opération de glissement en attente trouvée"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "Aucun dossier trouvé",
|
||||
"dragHint": "Faites glisser des éléments ici pour créer des dossiers"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "Afficher le QR Code WeChat",
|
||||
"hideWechatQR": "Masquer le QR Code WeChat"
|
||||
},
|
||||
"footer": "Merci d'utiliser le Gestionnaire LoRA ! ❤️"
|
||||
"footer": "Merci d'utiliser le Gestionnaire LoRA ! ❤️",
|
||||
"supporters": {
|
||||
"title": "Merci à tous les supporters",
|
||||
"subtitle": "Merci aux {count} supporters qui ont rendu ce projet possible",
|
||||
"specialThanks": "Remerciements spéciaux",
|
||||
"allSupporters": "Tous les supporters",
|
||||
"totalCount": "{count} supporters au total"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "Réessayer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "ביטול",
|
||||
"confirm": "אישור",
|
||||
"actions": {
|
||||
"save": "שמור",
|
||||
"save": "שמירה",
|
||||
"cancel": "ביטול",
|
||||
"delete": "מחק",
|
||||
"move": "העבר",
|
||||
"refresh": "רענן",
|
||||
"back": "חזור",
|
||||
"confirm": "אישור",
|
||||
"delete": "מחיקה",
|
||||
"move": "העברה",
|
||||
"refresh": "רענון",
|
||||
"back": "חזרה",
|
||||
"next": "הבא",
|
||||
"backToTop": "חזור למעלה",
|
||||
"backToTop": "חזרה למעלה",
|
||||
"settings": "הגדרות",
|
||||
"help": "עזרה",
|
||||
"add": "הוסף"
|
||||
"add": "הוספה"
|
||||
},
|
||||
"status": {
|
||||
"loading": "טוען...",
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "הגדר את ספריית השורש המוגדרת כברירת מחדל של embedding להורדות, ייבוא והעברות",
|
||||
"noDefault": "אין ברירת מחדל"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "נתיבי תיקיות נוספים",
|
||||
"help": "הוסף תיקיות מודלים נוספות מחוץ לנתיבים הסטנדרטיים של ComfyUI. נתיבים אלה נשמרים בנפרד ונסרקים לצד תיקיות ברירת המחדל.",
|
||||
"description": "הגדר תיקיות נוספות לסריקת מודלים. נתיבים אלה ספציפיים ל-LoRA Manager וימוזגו עם נתיבי ברירת המחדל של ComfyUI.",
|
||||
"modelTypes": {
|
||||
"lora": "נתיבי LoRA",
|
||||
"checkpoint": "נתיבי Checkpoint",
|
||||
"unet": "נתיבי מודל דיפוזיה",
|
||||
"embedding": "נתיבי Embedding"
|
||||
},
|
||||
"pathPlaceholder": "/נתיב/למודלים/נוספים",
|
||||
"saveSuccess": "נתיבי תיקיות נוספים עודכנו.",
|
||||
"saveError": "נכשל בעדכון נתיבי תיקיות נוספים: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "נתיב זה כבר מוגדר"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "תגיות עדיפות",
|
||||
"description": "התאם את סדר העדיפות של התגיות עבור כל סוג מודל (לדוגמה: character, concept, style(toon|toon_style))",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "סיסמה (אופציונלי)",
|
||||
"proxyPasswordPlaceholder": "password",
|
||||
"proxyPasswordHelp": "סיסמה לאימות מול הפרוקסי (אם נדרש)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "נתיבי תיקיות נוספים",
|
||||
"help": "הוסף תיקיות מודלים נוספות מחוץ לנתיבים הסטנדרטיים של ComfyUI. נתיבים אלה נשמרים בנפרד ונסרקים לצד תיקיות ברירת המחדל.",
|
||||
"description": "הגדר תיקיות נוספות לסריקת מודלים. נתיבים אלה ספציפיים ל-LoRA Manager וימוזגו עם נתיבי ברירת המחדל של ComfyUI.",
|
||||
"modelTypes": {
|
||||
"lora": "נתיבי LoRA",
|
||||
"checkpoint": "נתיבי Checkpoint",
|
||||
"unet": "נתיבי מודל דיפוזיה",
|
||||
"embedding": "נתיבי Embedding"
|
||||
},
|
||||
"pathPlaceholder": "/נתיב/למודלים/נוספים",
|
||||
"saveSuccess": "נתיבי תיקיות נוספים עודכנו.",
|
||||
"saveError": "נכשל בעדכון נתיבי תיקיות נוספים: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "נתיב זה כבר מוגדר"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "לא זמין בתצוגת רשימה",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "לא ניתן לקבוע את נתיב היעד להעברה.",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "העברה אינה נתמכת עבור פריט זה.",
|
||||
"createFolderHint": "שחרר כדי ליצור תיקייה חדשה",
|
||||
"newFolderName": "שם תיקייה חדשה",
|
||||
"folderNameHint": "הקש Enter לאישור, Escape לביטול",
|
||||
"emptyFolderName": "אנא הזן שם תיקייה",
|
||||
"invalidFolderName": "שם התיקייה מכיל תווים לא חוקיים",
|
||||
"noDragState": "לא נמצאה פעולת גרירה ממתינה"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "לא נמצאו תיקיות",
|
||||
"dragHint": "גרור פריטים לכאן כדי ליצור תיקיות"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "הצג קוד QR של WeChat",
|
||||
"hideWechatQR": "הסתר קוד QR של WeChat"
|
||||
},
|
||||
"footer": "תודה על השימוש במנהל LoRA! ❤️"
|
||||
"footer": "תודה על השימוש במנהל LoRA! ❤️",
|
||||
"supporters": {
|
||||
"title": "תודה לכל התומכים",
|
||||
"subtitle": "תודה ל־{count} תומכים שהפכו את הפרויקט הזה לאפשרי",
|
||||
"specialThanks": "תודה מיוחדת",
|
||||
"allSupporters": "כל התומכים",
|
||||
"totalCount": "{count} תומכים בסך הכל"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "נסה שוב"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "キャンセル",
|
||||
"confirm": "確認",
|
||||
"actions": {
|
||||
"save": "保存",
|
||||
"cancel": "キャンセル",
|
||||
"confirm": "確認",
|
||||
"delete": "削除",
|
||||
"move": "移動",
|
||||
"refresh": "更新",
|
||||
"back": "戻る",
|
||||
"next": "次へ",
|
||||
"backToTop": "トップに戻る",
|
||||
"backToTop": "トップへ戻る",
|
||||
"settings": "設定",
|
||||
"help": "ヘルプ",
|
||||
"add": "追加"
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "ダウンロード、インポート、移動用のデフォルトembeddingルートディレクトリを設定",
|
||||
"noDefault": "デフォルトなし"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "追加フォルダーパス",
|
||||
"help": "ComfyUIの標準パスの外部に追加のモデルフォルダを追加します。これらのパスは別々に保存され、デフォルトのフォルダと一緒にスキャンされます。",
|
||||
"description": "モデルをスキャンするための追加フォルダを設定します。これらのパスはLoRA Manager固有であり、ComfyUIのデフォルトパスとマージされます。",
|
||||
"modelTypes": {
|
||||
"lora": "LoRAパス",
|
||||
"checkpoint": "Checkpointパス",
|
||||
"unet": "Diffusionモデルパス",
|
||||
"embedding": "Embeddingパス"
|
||||
},
|
||||
"pathPlaceholder": "/追加モデルへのパス",
|
||||
"saveSuccess": "追加フォルダーパスを更新しました。",
|
||||
"saveError": "追加フォルダーパスの更新に失敗しました: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "このパスはすでに設定されています"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "優先タグ",
|
||||
"description": "各モデルタイプのタグ優先順位をカスタマイズします (例: character, concept, style(toon|toon_style))",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "パスワード(任意)",
|
||||
"proxyPasswordPlaceholder": "パスワード",
|
||||
"proxyPasswordHelp": "プロキシ認証用のパスワード(必要な場合)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "追加フォルダーパス",
|
||||
"help": "ComfyUIの標準パスの外部に追加のモデルフォルダを追加します。これらのパスは別々に保存され、デフォルトのフォルダと一緒にスキャンされます。",
|
||||
"description": "モデルをスキャンするための追加フォルダを設定します。これらのパスはLoRA Manager固有であり、ComfyUIのデフォルトパスとマージされます。",
|
||||
"modelTypes": {
|
||||
"lora": "LoRAパス",
|
||||
"checkpoint": "Checkpointパス",
|
||||
"unet": "Diffusionモデルパス",
|
||||
"embedding": "Embeddingパス"
|
||||
},
|
||||
"pathPlaceholder": "/追加モデルへのパス",
|
||||
"saveSuccess": "追加フォルダーパスを更新しました。",
|
||||
"saveError": "追加フォルダーパスの更新に失敗しました: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "このパスはすでに設定されています"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "リストビューでは利用できません",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "移動先のパスを特定できません。",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "この項目の移動はサポートされていません。",
|
||||
"createFolderHint": "放して新しいフォルダを作成",
|
||||
"newFolderName": "新しいフォルダ名",
|
||||
"folderNameHint": "Enterで確定、Escでキャンセル",
|
||||
"emptyFolderName": "フォルダ名を入力してください",
|
||||
"invalidFolderName": "フォルダ名に無効な文字が含まれています",
|
||||
"noDragState": "保留中のドラッグ操作が見つかりません"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "フォルダが見つかりません",
|
||||
"dragHint": "ここへアイテムをドラッグしてフォルダを作成します"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "WeChat QRコードを表示",
|
||||
"hideWechatQR": "WeChat QRコードを非表示"
|
||||
},
|
||||
"footer": "LoRA Managerをご利用いただきありがとうございます! ❤️"
|
||||
"footer": "LoRA Managerをご利用いただきありがとうございます! ❤️",
|
||||
"supporters": {
|
||||
"title": "サポーターの皆様に感謝",
|
||||
"subtitle": "{count} 名のサポーターの皆様に、このプロジェクトを実現していただきありがとうございます",
|
||||
"specialThanks": "特別感謝",
|
||||
"allSupporters": "全サポーター",
|
||||
"totalCount": "サポーター {count} 名"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "再試行"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "취소",
|
||||
"confirm": "확인",
|
||||
"actions": {
|
||||
"save": "저장",
|
||||
"cancel": "취소",
|
||||
"confirm": "확인",
|
||||
"delete": "삭제",
|
||||
"move": "이동",
|
||||
"refresh": "새로고침",
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "다운로드, 가져오기 및 이동을 위한 기본 Embedding 루트 디렉토리를 설정합니다",
|
||||
"noDefault": "기본값 없음"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "추가 폴다 경로",
|
||||
"help": "ComfyUI의 표준 경로 외부에 추가 모델 폴드를 추가하세요. 이러한 경로는 별도로 저장되며 기본 폴와 함께 스캔됩니다.",
|
||||
"description": "모델을 스캔하기 위한 추가 폴를 설정하세요. 이러한 경로는 LoRA Manager 특유의 것이며 ComfyUI의 기본 경로와 병합됩니다.",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA 경로",
|
||||
"checkpoint": "Checkpoint 경로",
|
||||
"unet": "Diffusion 모델 경로",
|
||||
"embedding": "Embedding 경로"
|
||||
},
|
||||
"pathPlaceholder": "/추가/모델/경로",
|
||||
"saveSuccess": "추가 폴다 경로가 업데이트되었습니다.",
|
||||
"saveError": "추가 폴다 경로 업데이트 실패: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "이 경로는 이미 구성되어 있습니다"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "우선순위 태그",
|
||||
"description": "모델 유형별 태그 우선순위를 사용자 지정합니다(예: character, concept, style(toon|toon_style)).",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "비밀번호 (선택사항)",
|
||||
"proxyPasswordPlaceholder": "password",
|
||||
"proxyPasswordHelp": "프록시 인증에 필요한 비밀번호 (필요한 경우)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "추가 폴다 경로",
|
||||
"help": "ComfyUI의 표준 경로 외부에 추가 모델 폴드를 추가하세요. 이러한 경로는 별도로 저장되며 기본 폴와 함께 스캔됩니다.",
|
||||
"description": "모델을 스캔하기 위한 추가 폴를 설정하세요. 이러한 경로는 LoRA Manager 특유의 것이며 ComfyUI의 기본 경로와 병합됩니다.",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA 경로",
|
||||
"checkpoint": "Checkpoint 경로",
|
||||
"unet": "Diffusion 모델 경로",
|
||||
"embedding": "Embedding 경로"
|
||||
},
|
||||
"pathPlaceholder": "/추가/모델/경로",
|
||||
"saveSuccess": "추가 폴다 경로가 업데이트되었습니다.",
|
||||
"saveError": "추가 폴다 경로 업데이트 실패: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "이 경로는 이미 구성되어 있습니다"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "목록 보기에서는 사용할 수 없습니다",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "이동할 대상 경로를 확인할 수 없습니다.",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "이 항목은 이동을 지원하지 않습니다.",
|
||||
"createFolderHint": "놓아서 새 폴더 만들기",
|
||||
"newFolderName": "새 폴더 이름",
|
||||
"folderNameHint": "Enter를 눌러 확인, Escape를 눌러 취소",
|
||||
"emptyFolderName": "폴더 이름을 입력하세요",
|
||||
"invalidFolderName": "폴더 이름에 잘못된 문자가 포함되어 있습니다",
|
||||
"noDragState": "보류 중인 드래그 작업을 찾을 수 없습니다"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "폴더를 찾을 수 없습니다",
|
||||
"dragHint": "항목을 여기로 드래그하여 폴더를 만듭니다"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "WeChat QR 코드 표시",
|
||||
"hideWechatQR": "WeChat QR 코드 숨기기"
|
||||
},
|
||||
"footer": "LoRA Manager를 사용해주셔서 감사합니다! ❤️"
|
||||
"footer": "LoRA Manager를 사용해주셔서 감사합니다! ❤️",
|
||||
"supporters": {
|
||||
"title": "후원자 분들께 감사드립니다",
|
||||
"subtitle": "이 프로젝트를 가능하게 해준 {count}명의 후원자분들께 감사드립니다",
|
||||
"specialThanks": "특별 감사",
|
||||
"allSupporters": "모든 후원자",
|
||||
"totalCount": "총 {count}명의 후원자"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "다시 시도"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "Отмена",
|
||||
"confirm": "Подтвердить",
|
||||
"actions": {
|
||||
"save": "Сохранить",
|
||||
"cancel": "Отмена",
|
||||
"confirm": "Подтвердить",
|
||||
"delete": "Удалить",
|
||||
"move": "Переместить",
|
||||
"refresh": "Обновить",
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "Установить корневую папку embedding по умолчанию для загрузок, импорта и перемещений",
|
||||
"noDefault": "Не задано"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "Дополнительные пути к папкам",
|
||||
"help": "Добавьте дополнительные папки моделей за пределами стандартных путей ComfyUI. Эти пути хранятся отдельно и сканируются вместе с папками по умолчанию.",
|
||||
"description": "Настройте дополнительные папки для сканирования моделей. Эти пути специфичны для LoRA Manager и будут объединены с путями по умолчанию ComfyUI.",
|
||||
"modelTypes": {
|
||||
"lora": "Пути LoRA",
|
||||
"checkpoint": "Пути Checkpoint",
|
||||
"unet": "Пути моделей диффузии",
|
||||
"embedding": "Пути Embedding"
|
||||
},
|
||||
"pathPlaceholder": "/путь/к/дополнительным/моделям",
|
||||
"saveSuccess": "Дополнительные пути к папкам обновлены.",
|
||||
"saveError": "Не удалось обновить дополнительные пути к папкам: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "Этот путь уже настроен"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "Приоритетные теги",
|
||||
"description": "Настройте порядок приоритетов тегов для каждого типа моделей (например, character, concept, style(toon|toon_style)).",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "Пароль (необязательно)",
|
||||
"proxyPasswordPlaceholder": "пароль",
|
||||
"proxyPasswordHelp": "Пароль для аутентификации на прокси (если требуется)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "Дополнительные пути к папкам",
|
||||
"help": "Добавьте дополнительные папки моделей за пределами стандартных путей ComfyUI. Эти пути хранятся отдельно и сканируются вместе с папками по умолчанию.",
|
||||
"description": "Настройте дополнительные папки для сканирования моделей. Эти пути специфичны для LoRA Manager и будут объединены с путями по умолчанию ComfyUI.",
|
||||
"modelTypes": {
|
||||
"lora": "Пути LoRA",
|
||||
"checkpoint": "Пути Checkpoint",
|
||||
"unet": "Пути моделей диффузии",
|
||||
"embedding": "Пути Embedding"
|
||||
},
|
||||
"pathPlaceholder": "/путь/к/дополнительным/моделям",
|
||||
"saveSuccess": "Дополнительные пути к папкам обновлены.",
|
||||
"saveError": "Не удалось обновить дополнительные пути к папкам: {message}",
|
||||
"validation": {
|
||||
"duplicatePath": "Этот путь уже настроен"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "Недоступно в виде списка",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "Не удалось определить путь назначения для перемещения.",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "Перемещение этого элемента не поддерживается.",
|
||||
"createFolderHint": "Отпустите, чтобы создать новую папку",
|
||||
"newFolderName": "Имя новой папки",
|
||||
"folderNameHint": "Нажмите Enter для подтверждения, Escape для отмены",
|
||||
"emptyFolderName": "Пожалуйста, введите имя папки",
|
||||
"invalidFolderName": "Имя папки содержит недопустимые символы",
|
||||
"noDragState": "Ожидающая операция перетаскивания не найдена"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "Папки не найдены",
|
||||
"dragHint": "Перетащите элементы сюда, чтобы создать папки"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "Показать QR-код WeChat",
|
||||
"hideWechatQR": "Скрыть QR-код WeChat"
|
||||
},
|
||||
"footer": "Спасибо за использование LoRA Manager! ❤️"
|
||||
"footer": "Спасибо за использование LoRA Manager! ❤️",
|
||||
"supporters": {
|
||||
"title": "Спасибо всем сторонникам",
|
||||
"subtitle": "Спасибо {count} сторонникам, которые сделали этот проект возможным",
|
||||
"specialThanks": "Особая благодарность",
|
||||
"allSupporters": "Все сторонники",
|
||||
"totalCount": "Всего {count} сторонников"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "Повторить"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "取消",
|
||||
"confirm": "确认",
|
||||
"actions": {
|
||||
"save": "保存",
|
||||
"cancel": "取消",
|
||||
"confirm": "确认",
|
||||
"delete": "删除",
|
||||
"move": "移动",
|
||||
"refresh": "刷新",
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "设置下载、导入和移动时的默认 Embedding 根目录",
|
||||
"noDefault": "无默认"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "额外文件夹路径",
|
||||
"help": "在 ComfyUI 的标准路径之外添加额外的模型文件夹。这些路径单独存储,并与默认文件夹一起扫描。",
|
||||
"description": "配置额外的文件夹以扫描模型。这些路径是 LoRA Manager 特有的,将与 ComfyUI 的默认路径合并。",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA 路径",
|
||||
"checkpoint": "Checkpoint 路径",
|
||||
"unet": "Diffusion 模型路径",
|
||||
"embedding": "Embedding 路径"
|
||||
},
|
||||
"pathPlaceholder": "/额外/模型/路径",
|
||||
"saveSuccess": "额外文件夹路径已更新。",
|
||||
"saveError": "更新额外文件夹路径失败:{message}",
|
||||
"validation": {
|
||||
"duplicatePath": "此路径已配置"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "优先标签",
|
||||
"description": "为每种模型类型自定义标签优先级顺序 (例如: character, concept, style(toon|toon_style))",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "密码 (可选)",
|
||||
"proxyPasswordPlaceholder": "密码",
|
||||
"proxyPasswordHelp": "代理认证的密码 (如果需要)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "额外文件夹路径",
|
||||
"help": "在 ComfyUI 的标准路径之外添加额外的模型文件夹。这些路径单独存储,并与默认文件夹一起扫描。",
|
||||
"description": "配置额外的文件夹以扫描模型。这些路径是 LoRA Manager 特有的,将与 ComfyUI 的默认路径合并。",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA 路径",
|
||||
"checkpoint": "Checkpoint 路径",
|
||||
"unet": "Diffusion 模型路径",
|
||||
"embedding": "Embedding 路径"
|
||||
},
|
||||
"pathPlaceholder": "/额外/模型/路径",
|
||||
"saveSuccess": "额外文件夹路径已更新。",
|
||||
"saveError": "更新额外文件夹路径失败:{message}",
|
||||
"validation": {
|
||||
"duplicatePath": "此路径已配置"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "列表视图下不可用",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "无法确定移动的目标路径。",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "Move is not supported for this item.",
|
||||
"createFolderHint": "释放以创建新文件夹",
|
||||
"newFolderName": "新文件夹名称",
|
||||
"folderNameHint": "按 Enter 确认,Escape 取消",
|
||||
"emptyFolderName": "请输入文件夹名称",
|
||||
"invalidFolderName": "文件夹名称包含无效字符",
|
||||
"noDragState": "未找到待处理的拖放操作"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "未找到文件夹",
|
||||
"dragHint": "拖拽项目到此处以创建文件夹"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "显示微信二维码",
|
||||
"hideWechatQR": "隐藏微信二维码"
|
||||
},
|
||||
"footer": "感谢使用 LoRA 管理器!❤️"
|
||||
"footer": "感谢使用 LoRA 管理器!❤️",
|
||||
"supporters": {
|
||||
"title": "感谢所有支持者",
|
||||
"subtitle": "感谢 {count} 位支持者让这个项目成为可能",
|
||||
"specialThanks": "特别感谢",
|
||||
"allSupporters": "所有支持者",
|
||||
"totalCount": "共 {count} 位支持者"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "重试"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"common": {
|
||||
"cancel": "取消",
|
||||
"confirm": "確認",
|
||||
"actions": {
|
||||
"save": "儲存",
|
||||
"cancel": "取消",
|
||||
"confirm": "確認",
|
||||
"delete": "刪除",
|
||||
"move": "移動",
|
||||
"refresh": "重新整理",
|
||||
@@ -361,6 +364,23 @@
|
||||
"defaultEmbeddingRootHelp": "設定下載、匯入和移動時的預設 Embedding 根目錄",
|
||||
"noDefault": "未設定預設"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "額外資料夾路徑",
|
||||
"help": "在 ComfyUI 的標準路徑之外新增額外的模型資料夾。這些路徑單獨儲存,並與預設資料夾一起掃描。",
|
||||
"description": "設定額外的資料夾以掃描模型。這些路徑是 LoRA Manager 特有的,將與 ComfyUI 的預設路徑合併。",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA 路徑",
|
||||
"checkpoint": "Checkpoint 路徑",
|
||||
"unet": "Diffusion 模型路徑",
|
||||
"embedding": "Embedding 路徑"
|
||||
},
|
||||
"pathPlaceholder": "/額外/模型/路徑",
|
||||
"saveSuccess": "額外資料夾路徑已更新。",
|
||||
"saveError": "更新額外資料夾路徑失敗:{message}",
|
||||
"validation": {
|
||||
"duplicatePath": "此路徑已設定"
|
||||
}
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "優先標籤",
|
||||
"description": "為每種模型類型自訂標籤的優先順序 (例如: character, concept, style(toon|toon_style))",
|
||||
@@ -485,23 +505,6 @@
|
||||
"proxyPassword": "密碼(選填)",
|
||||
"proxyPasswordPlaceholder": "password",
|
||||
"proxyPasswordHelp": "代理驗證所需的密碼(如有需要)"
|
||||
},
|
||||
"extraFolderPaths": {
|
||||
"title": "額外資料夾路徑",
|
||||
"help": "在 ComfyUI 的標準路徑之外新增額外的模型資料夾。這些路徑單獨儲存,並與預設資料夾一起掃描。",
|
||||
"description": "設定額外的資料夾以掃描模型。這些路徑是 LoRA Manager 特有的,將與 ComfyUI 的預設路徑合併。",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA 路徑",
|
||||
"checkpoint": "Checkpoint 路徑",
|
||||
"unet": "Diffusion 模型路徑",
|
||||
"embedding": "Embedding 路徑"
|
||||
},
|
||||
"pathPlaceholder": "/額外/模型/路徑",
|
||||
"saveSuccess": "額外資料夾路徑已更新。",
|
||||
"saveError": "更新額外資料夾路徑失敗:{message}",
|
||||
"validation": {
|
||||
"duplicatePath": "此路徑已設定"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -750,7 +753,17 @@
|
||||
"collapseAllDisabled": "列表檢視下不可用",
|
||||
"dragDrop": {
|
||||
"unableToResolveRoot": "無法確定移動的目標路徑。",
|
||||
"moveUnsupported": "Move is not supported for this item."
|
||||
"moveUnsupported": "Move is not supported for this item.",
|
||||
"createFolderHint": "放開以建立新資料夾",
|
||||
"newFolderName": "新資料夾名稱",
|
||||
"folderNameHint": "按 Enter 確認,Escape 取消",
|
||||
"emptyFolderName": "請輸入資料夾名稱",
|
||||
"invalidFolderName": "資料夾名稱包含無效字元",
|
||||
"noDragState": "未找到待處理的拖放操作"
|
||||
},
|
||||
"empty": {
|
||||
"noFolders": "未找到資料夾",
|
||||
"dragHint": "將項目拖到此處以建立資料夾"
|
||||
}
|
||||
},
|
||||
"statistics": {
|
||||
@@ -1342,7 +1355,14 @@
|
||||
"showWechatQR": "顯示微信二維碼",
|
||||
"hideWechatQR": "隱藏微信二維碼"
|
||||
},
|
||||
"footer": "感謝您使用 LoRA 管理器!❤️"
|
||||
"footer": "感謝您使用 LoRA 管理器!❤️",
|
||||
"supporters": {
|
||||
"title": "感謝所有支持者",
|
||||
"subtitle": "感謝 {count} 位支持者讓這個專案成為可能",
|
||||
"specialThanks": "特別感謝",
|
||||
"allSupporters": "所有支持者",
|
||||
"totalCount": "共 {count} 位支持者"
|
||||
}
|
||||
},
|
||||
"toast": {
|
||||
"general": {
|
||||
@@ -1651,4 +1671,4 @@
|
||||
"retry": "重試"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
308
py/config.py
308
py/config.py
@@ -2,7 +2,7 @@ import os
|
||||
import platform
|
||||
import threading
|
||||
from pathlib import Path
|
||||
import folder_paths # type: ignore
|
||||
import folder_paths # type: ignore
|
||||
from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Tuple
|
||||
import logging
|
||||
import json
|
||||
@@ -10,16 +10,23 @@ import urllib.parse
|
||||
import time
|
||||
|
||||
from .utils.cache_paths import CacheType, get_cache_file_path, get_legacy_cache_paths
|
||||
from .utils.settings_paths import ensure_settings_file, get_settings_dir, load_settings_template
|
||||
from .utils.settings_paths import (
|
||||
ensure_settings_file,
|
||||
get_settings_dir,
|
||||
load_settings_template,
|
||||
)
|
||||
|
||||
# Use an environment variable to control standalone mode
|
||||
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||
standalone_mode = (
|
||||
os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1"
|
||||
or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _normalize_folder_paths_for_comparison(
|
||||
folder_paths: Mapping[str, Iterable[str]]
|
||||
folder_paths: Mapping[str, Iterable[str]],
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""Normalize folder paths for comparison across libraries."""
|
||||
|
||||
@@ -49,7 +56,7 @@ def _normalize_folder_paths_for_comparison(
|
||||
|
||||
|
||||
def _normalize_library_folder_paths(
|
||||
library_payload: Mapping[str, Any]
|
||||
library_payload: Mapping[str, Any],
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""Return normalized folder paths extracted from a library payload."""
|
||||
|
||||
@@ -74,11 +81,17 @@ def _get_template_folder_paths() -> Dict[str, Set[str]]:
|
||||
|
||||
class Config:
|
||||
"""Global configuration for LoRA Manager"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.templates_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates')
|
||||
self.static_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'static')
|
||||
self.i18n_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'locales')
|
||||
self.templates_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)), "templates"
|
||||
)
|
||||
self.static_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)), "static"
|
||||
)
|
||||
self.i18n_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)), "locales"
|
||||
)
|
||||
# Path mapping dictionary, target to link mapping
|
||||
self._path_mappings: Dict[str, str] = {}
|
||||
# Normalized preview root directories used to validate preview access
|
||||
@@ -98,7 +111,7 @@ class Config:
|
||||
self.extra_embeddings_roots: List[str] = []
|
||||
# Scan symbolic links during initialization
|
||||
self._initialize_symlink_mappings()
|
||||
|
||||
|
||||
if not standalone_mode:
|
||||
# Save the paths to settings.json when running in ComfyUI mode
|
||||
self.save_folder_paths_to_settings()
|
||||
@@ -152,17 +165,21 @@ class Config:
|
||||
default_library = libraries.get("default", {})
|
||||
|
||||
target_folder_paths = {
|
||||
'loras': list(self.loras_roots),
|
||||
'checkpoints': list(self.checkpoints_roots or []),
|
||||
'unet': list(self.unet_roots or []),
|
||||
'embeddings': list(self.embeddings_roots or []),
|
||||
"loras": list(self.loras_roots),
|
||||
"checkpoints": list(self.checkpoints_roots or []),
|
||||
"unet": list(self.unet_roots or []),
|
||||
"embeddings": list(self.embeddings_roots or []),
|
||||
}
|
||||
|
||||
normalized_target_paths = _normalize_folder_paths_for_comparison(target_folder_paths)
|
||||
normalized_target_paths = _normalize_folder_paths_for_comparison(
|
||||
target_folder_paths
|
||||
)
|
||||
|
||||
normalized_default_paths: Optional[Dict[str, Set[str]]] = None
|
||||
if isinstance(default_library, Mapping):
|
||||
normalized_default_paths = _normalize_library_folder_paths(default_library)
|
||||
normalized_default_paths = _normalize_library_folder_paths(
|
||||
default_library
|
||||
)
|
||||
|
||||
if (
|
||||
not comfy_library
|
||||
@@ -185,13 +202,19 @@ class Config:
|
||||
default_lora_root = self.loras_roots[0]
|
||||
|
||||
default_checkpoint_root = comfy_library.get("default_checkpoint_root", "")
|
||||
if (not default_checkpoint_root and self.checkpoints_roots and
|
||||
len(self.checkpoints_roots) == 1):
|
||||
if (
|
||||
not default_checkpoint_root
|
||||
and self.checkpoints_roots
|
||||
and len(self.checkpoints_roots) == 1
|
||||
):
|
||||
default_checkpoint_root = self.checkpoints_roots[0]
|
||||
|
||||
default_embedding_root = comfy_library.get("default_embedding_root", "")
|
||||
if (not default_embedding_root and self.embeddings_roots and
|
||||
len(self.embeddings_roots) == 1):
|
||||
if (
|
||||
not default_embedding_root
|
||||
and self.embeddings_roots
|
||||
and len(self.embeddings_roots) == 1
|
||||
):
|
||||
default_embedding_root = self.embeddings_roots[0]
|
||||
|
||||
metadata = dict(comfy_library.get("metadata", {}))
|
||||
@@ -216,11 +239,12 @@ class Config:
|
||||
try:
|
||||
if os.path.islink(path):
|
||||
return True
|
||||
if platform.system() == 'Windows':
|
||||
if platform.system() == "Windows":
|
||||
try:
|
||||
import ctypes
|
||||
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
attrs = ctypes.windll.kernel32.GetFileAttributesW(str(path))
|
||||
attrs = ctypes.windll.kernel32.GetFileAttributesW(str(path)) # type: ignore[attr-defined]
|
||||
return attrs != -1 and (attrs & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking Windows reparse point: {e}")
|
||||
@@ -233,18 +257,19 @@ class Config:
|
||||
"""Check if a directory entry is a symlink, including Windows junctions."""
|
||||
if entry.is_symlink():
|
||||
return True
|
||||
if platform.system() == 'Windows':
|
||||
if platform.system() == "Windows":
|
||||
try:
|
||||
import ctypes
|
||||
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
attrs = ctypes.windll.kernel32.GetFileAttributesW(entry.path)
|
||||
attrs = ctypes.windll.kernel32.GetFileAttributesW(entry.path) # type: ignore[attr-defined]
|
||||
return attrs != -1 and (attrs & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
def _normalize_path(self, path: str) -> str:
|
||||
return os.path.normpath(path).replace(os.sep, '/')
|
||||
return os.path.normpath(path).replace(os.sep, "/")
|
||||
|
||||
def _get_symlink_cache_path(self) -> Path:
|
||||
canonical_path = get_cache_file_path(CacheType.SYMLINK, create_dir=True)
|
||||
@@ -278,19 +303,18 @@ class Config:
|
||||
if self._entry_is_symlink(entry):
|
||||
try:
|
||||
target = os.path.realpath(entry.path)
|
||||
direct_symlinks.append([
|
||||
self._normalize_path(entry.path),
|
||||
self._normalize_path(target)
|
||||
])
|
||||
direct_symlinks.append(
|
||||
[
|
||||
self._normalize_path(entry.path),
|
||||
self._normalize_path(target),
|
||||
]
|
||||
)
|
||||
except OSError:
|
||||
pass
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
|
||||
return {
|
||||
"roots": unique_roots,
|
||||
"direct_symlinks": sorted(direct_symlinks)
|
||||
}
|
||||
return {"roots": unique_roots, "direct_symlinks": sorted(direct_symlinks)}
|
||||
|
||||
def _initialize_symlink_mappings(self) -> None:
|
||||
start = time.perf_counter()
|
||||
@@ -307,10 +331,14 @@ class Config:
|
||||
cached_fingerprint = self._cached_fingerprint
|
||||
|
||||
# Check 1: First-level symlinks unchanged (catches new symlinks at root)
|
||||
fingerprint_valid = cached_fingerprint and current_fingerprint == cached_fingerprint
|
||||
fingerprint_valid = (
|
||||
cached_fingerprint and current_fingerprint == cached_fingerprint
|
||||
)
|
||||
|
||||
# Check 2: All cached mappings still valid (catches changes at any depth)
|
||||
mappings_valid = self._validate_cached_mappings() if fingerprint_valid else False
|
||||
mappings_valid = (
|
||||
self._validate_cached_mappings() if fingerprint_valid else False
|
||||
)
|
||||
|
||||
if fingerprint_valid and mappings_valid:
|
||||
return
|
||||
@@ -370,7 +398,9 @@ class Config:
|
||||
for target, link in cached_mappings.items():
|
||||
if not isinstance(target, str) or not isinstance(link, str):
|
||||
continue
|
||||
normalized_mappings[self._normalize_path(target)] = self._normalize_path(link)
|
||||
normalized_mappings[self._normalize_path(target)] = self._normalize_path(
|
||||
link
|
||||
)
|
||||
|
||||
self._path_mappings = normalized_mappings
|
||||
|
||||
@@ -391,7 +421,9 @@ class Config:
|
||||
parent_dir = loaded_path.parent
|
||||
if parent_dir.name == "cache" and not any(parent_dir.iterdir()):
|
||||
parent_dir.rmdir()
|
||||
logger.info("Removed empty legacy cache directory: %s", parent_dir)
|
||||
logger.info(
|
||||
"Removed empty legacy cache directory: %s", parent_dir
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -402,7 +434,9 @@ class Config:
|
||||
exc,
|
||||
)
|
||||
else:
|
||||
logger.info("Symlink cache loaded with %d mappings", len(self._path_mappings))
|
||||
logger.info(
|
||||
"Symlink cache loaded with %d mappings", len(self._path_mappings)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -414,7 +448,7 @@ class Config:
|
||||
"""
|
||||
for target, link in self._path_mappings.items():
|
||||
# Convert normalized paths back to OS paths
|
||||
link_path = link.replace('/', os.sep)
|
||||
link_path = link.replace("/", os.sep)
|
||||
|
||||
# Check if symlink still exists
|
||||
if not self._is_link(link_path):
|
||||
@@ -427,7 +461,9 @@ class Config:
|
||||
if actual_target != target:
|
||||
logger.debug(
|
||||
"Symlink target changed: %s -> %s (cached: %s)",
|
||||
link_path, actual_target, target
|
||||
link_path,
|
||||
actual_target,
|
||||
target,
|
||||
)
|
||||
return False
|
||||
except OSError:
|
||||
@@ -446,7 +482,11 @@ class Config:
|
||||
try:
|
||||
with cache_path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(payload, handle, ensure_ascii=False, indent=2)
|
||||
logger.debug("Symlink cache saved to %s with %d mappings", cache_path, len(self._path_mappings))
|
||||
logger.debug(
|
||||
"Symlink cache saved to %s with %d mappings",
|
||||
cache_path,
|
||||
len(self._path_mappings),
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.info("Failed to write symlink cache %s: %s", cache_path, exc)
|
||||
|
||||
@@ -458,7 +498,7 @@ class Config:
|
||||
at the root level only (not nested symlinks in subdirectories).
|
||||
"""
|
||||
start = time.perf_counter()
|
||||
|
||||
|
||||
# Reset mappings before rescanning to avoid stale entries
|
||||
self._path_mappings.clear()
|
||||
self._seed_root_symlink_mappings()
|
||||
@@ -472,7 +512,7 @@ class Config:
|
||||
|
||||
def _scan_first_level_symlinks(self, root: str):
|
||||
"""Scan only the first level of a directory for symlinks.
|
||||
|
||||
|
||||
This avoids traversing the entire directory tree which can be extremely
|
||||
slow for large model collections. Only symlinks directly under the root
|
||||
are detected.
|
||||
@@ -494,13 +534,13 @@ class Config:
|
||||
self.add_path_mapping(entry.path, target_path)
|
||||
except Exception as inner_exc:
|
||||
logger.debug(
|
||||
"Error processing directory entry %s: %s", entry.path, inner_exc
|
||||
"Error processing directory entry %s: %s",
|
||||
entry.path,
|
||||
inner_exc,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error scanning links in {root}: {e}")
|
||||
|
||||
|
||||
|
||||
def add_path_mapping(self, link_path: str, target_path: str):
|
||||
"""Add a symbolic link path mapping
|
||||
target_path: actual target path
|
||||
@@ -594,41 +634,46 @@ class Config:
|
||||
preview_roots.update(self._expand_preview_root(target))
|
||||
preview_roots.update(self._expand_preview_root(link))
|
||||
|
||||
self._preview_root_paths = {path for path in preview_roots if path.is_absolute()}
|
||||
self._preview_root_paths = {
|
||||
path for path in preview_roots if path.is_absolute()
|
||||
}
|
||||
logger.debug(
|
||||
"Preview roots rebuilt: %d paths from %d lora roots (%d extra), %d checkpoint roots (%d extra), %d embedding roots (%d extra), %d symlink mappings",
|
||||
len(self._preview_root_paths),
|
||||
len(self.loras_roots or []), len(self.extra_loras_roots or []),
|
||||
len(self.base_models_roots or []), len(self.extra_checkpoints_roots or []),
|
||||
len(self.embeddings_roots or []), len(self.extra_embeddings_roots or []),
|
||||
len(self.loras_roots or []),
|
||||
len(self.extra_loras_roots or []),
|
||||
len(self.base_models_roots or []),
|
||||
len(self.extra_checkpoints_roots or []),
|
||||
len(self.embeddings_roots or []),
|
||||
len(self.extra_embeddings_roots or []),
|
||||
len(self._path_mappings),
|
||||
)
|
||||
|
||||
def map_path_to_link(self, path: str) -> str:
|
||||
"""Map a target path back to its symbolic link path"""
|
||||
normalized_path = os.path.normpath(path).replace(os.sep, '/')
|
||||
normalized_path = os.path.normpath(path).replace(os.sep, "/")
|
||||
# Check if the path is contained in any mapped target path
|
||||
for target_path, link_path in self._path_mappings.items():
|
||||
# Match whole path components to avoid prefix collisions (e.g., /a/b vs /a/bc)
|
||||
if normalized_path == target_path:
|
||||
return link_path
|
||||
|
||||
if normalized_path.startswith(target_path + '/'):
|
||||
|
||||
if normalized_path.startswith(target_path + "/"):
|
||||
# If the path starts with the target path, replace with link path
|
||||
mapped_path = normalized_path.replace(target_path, link_path, 1)
|
||||
return mapped_path
|
||||
return normalized_path
|
||||
|
||||
|
||||
def map_link_to_path(self, link_path: str) -> str:
|
||||
"""Map a symbolic link path back to the actual path"""
|
||||
normalized_link = os.path.normpath(link_path).replace(os.sep, '/')
|
||||
normalized_link = os.path.normpath(link_path).replace(os.sep, "/")
|
||||
# Check if the path is contained in any mapped target path
|
||||
for target_path, link_path_mapped in self._path_mappings.items():
|
||||
# Match whole path components
|
||||
if normalized_link == link_path_mapped:
|
||||
return target_path
|
||||
|
||||
if normalized_link.startswith(link_path_mapped + '/'):
|
||||
if normalized_link.startswith(link_path_mapped + "/"):
|
||||
# If the path starts with the link path, replace with actual path
|
||||
mapped_path = normalized_link.replace(link_path_mapped, target_path, 1)
|
||||
return mapped_path
|
||||
@@ -641,8 +686,8 @@ class Config:
|
||||
continue
|
||||
if not os.path.exists(path):
|
||||
continue
|
||||
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
|
||||
normalized = os.path.normpath(path).replace(os.sep, '/')
|
||||
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, "/")
|
||||
normalized = os.path.normpath(path).replace(os.sep, "/")
|
||||
if real_path not in dedup:
|
||||
dedup[real_path] = normalized
|
||||
return dedup
|
||||
@@ -652,7 +697,9 @@ class Config:
|
||||
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
|
||||
|
||||
for original_path in unique_paths:
|
||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(
|
||||
os.sep, "/"
|
||||
)
|
||||
if real_path != original_path:
|
||||
self.add_path_mapping(original_path, real_path)
|
||||
|
||||
@@ -674,7 +721,7 @@ class Config:
|
||||
"Please fix your ComfyUI path configuration to separate these folders. "
|
||||
"Falling back to 'checkpoints' for backward compatibility. "
|
||||
"Overlapping real paths: %s",
|
||||
[checkpoint_map.get(rp, rp) for rp in overlapping_real_paths]
|
||||
[checkpoint_map.get(rp, rp) for rp in overlapping_real_paths],
|
||||
)
|
||||
# Remove overlapping paths from unet_map to prioritize checkpoints
|
||||
for rp in overlapping_real_paths:
|
||||
@@ -694,7 +741,9 @@ class Config:
|
||||
self.unet_roots = [p for p in unique_paths if p in unet_values]
|
||||
|
||||
for original_path in unique_paths:
|
||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(
|
||||
os.sep, "/"
|
||||
)
|
||||
if real_path != original_path:
|
||||
self.add_path_mapping(original_path, real_path)
|
||||
|
||||
@@ -705,7 +754,9 @@ class Config:
|
||||
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
|
||||
|
||||
for original_path in unique_paths:
|
||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(
|
||||
os.sep, "/"
|
||||
)
|
||||
if real_path != original_path:
|
||||
self.add_path_mapping(original_path, real_path)
|
||||
|
||||
@@ -719,28 +770,66 @@ class Config:
|
||||
self._path_mappings.clear()
|
||||
self._preview_root_paths = set()
|
||||
|
||||
lora_paths = folder_paths.get('loras', []) or []
|
||||
checkpoint_paths = folder_paths.get('checkpoints', []) or []
|
||||
unet_paths = folder_paths.get('unet', []) or []
|
||||
embedding_paths = folder_paths.get('embeddings', []) or []
|
||||
lora_paths = folder_paths.get("loras", []) or []
|
||||
checkpoint_paths = folder_paths.get("checkpoints", []) or []
|
||||
unet_paths = folder_paths.get("unet", []) or []
|
||||
embedding_paths = folder_paths.get("embeddings", []) or []
|
||||
|
||||
self.loras_roots = self._prepare_lora_paths(lora_paths)
|
||||
self.base_models_roots = self._prepare_checkpoint_paths(checkpoint_paths, unet_paths)
|
||||
self.base_models_roots = self._prepare_checkpoint_paths(
|
||||
checkpoint_paths, unet_paths
|
||||
)
|
||||
self.embeddings_roots = self._prepare_embedding_paths(embedding_paths)
|
||||
|
||||
# Process extra paths (only for LoRA Manager, not shared with ComfyUI)
|
||||
extra_paths = extra_folder_paths or {}
|
||||
extra_lora_paths = extra_paths.get('loras', []) or []
|
||||
extra_checkpoint_paths = extra_paths.get('checkpoints', []) or []
|
||||
extra_unet_paths = extra_paths.get('unet', []) or []
|
||||
extra_embedding_paths = extra_paths.get('embeddings', []) or []
|
||||
extra_lora_paths = extra_paths.get("loras", []) or []
|
||||
extra_checkpoint_paths = extra_paths.get("checkpoints", []) or []
|
||||
extra_unet_paths = extra_paths.get("unet", []) or []
|
||||
extra_embedding_paths = extra_paths.get("embeddings", []) or []
|
||||
|
||||
self.extra_loras_roots = self._prepare_lora_paths(extra_lora_paths)
|
||||
self.extra_checkpoints_roots = self._prepare_checkpoint_paths(extra_checkpoint_paths, extra_unet_paths)
|
||||
self.extra_embeddings_roots = self._prepare_embedding_paths(extra_embedding_paths)
|
||||
# extra_unet_roots is set by _prepare_checkpoint_paths (access unet_roots before it's reset)
|
||||
unet_roots_value: List[str] = getattr(self, 'unet_roots', None) or []
|
||||
self.extra_unet_roots = unet_roots_value
|
||||
# Save main paths before processing extra paths ( _prepare_checkpoint_paths overwrites them)
|
||||
saved_checkpoints_roots = self.checkpoints_roots
|
||||
saved_unet_roots = self.unet_roots
|
||||
self.extra_checkpoints_roots = self._prepare_checkpoint_paths(
|
||||
extra_checkpoint_paths, extra_unet_paths
|
||||
)
|
||||
self.extra_unet_roots = (
|
||||
self.unet_roots if self.unet_roots is not None else []
|
||||
) # unet_roots was set by _prepare_checkpoint_paths
|
||||
# Restore main paths
|
||||
self.checkpoints_roots = saved_checkpoints_roots
|
||||
self.unet_roots = saved_unet_roots
|
||||
self.extra_embeddings_roots = self._prepare_embedding_paths(
|
||||
extra_embedding_paths
|
||||
)
|
||||
|
||||
# Log extra folder paths
|
||||
if self.extra_loras_roots:
|
||||
logger.info(
|
||||
"Found extra LoRA roots:"
|
||||
+ "\n - "
|
||||
+ "\n - ".join(self.extra_loras_roots)
|
||||
)
|
||||
if self.extra_checkpoints_roots:
|
||||
logger.info(
|
||||
"Found extra checkpoint roots:"
|
||||
+ "\n - "
|
||||
+ "\n - ".join(self.extra_checkpoints_roots)
|
||||
)
|
||||
if self.extra_unet_roots:
|
||||
logger.info(
|
||||
"Found extra diffusion model roots:"
|
||||
+ "\n - "
|
||||
+ "\n - ".join(self.extra_unet_roots)
|
||||
)
|
||||
if self.extra_embeddings_roots:
|
||||
logger.info(
|
||||
"Found extra embedding roots:"
|
||||
+ "\n - "
|
||||
+ "\n - ".join(self.extra_embeddings_roots)
|
||||
)
|
||||
|
||||
self._initialize_symlink_mappings()
|
||||
|
||||
@@ -749,7 +838,10 @@ class Config:
|
||||
try:
|
||||
raw_paths = folder_paths.get_folder_paths("loras")
|
||||
unique_paths = self._prepare_lora_paths(raw_paths)
|
||||
logger.info("Found LoRA roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
|
||||
logger.info(
|
||||
"Found LoRA roots:"
|
||||
+ ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]")
|
||||
)
|
||||
|
||||
if not unique_paths:
|
||||
logger.warning("No valid loras folders found in ComfyUI configuration")
|
||||
@@ -765,12 +857,19 @@ class Config:
|
||||
try:
|
||||
raw_checkpoint_paths = folder_paths.get_folder_paths("checkpoints")
|
||||
raw_unet_paths = folder_paths.get_folder_paths("unet")
|
||||
unique_paths = self._prepare_checkpoint_paths(raw_checkpoint_paths, raw_unet_paths)
|
||||
unique_paths = self._prepare_checkpoint_paths(
|
||||
raw_checkpoint_paths, raw_unet_paths
|
||||
)
|
||||
|
||||
logger.info("Found checkpoint roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
|
||||
logger.info(
|
||||
"Found checkpoint roots:"
|
||||
+ ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]")
|
||||
)
|
||||
|
||||
if not unique_paths:
|
||||
logger.warning("No valid checkpoint folders found in ComfyUI configuration")
|
||||
logger.warning(
|
||||
"No valid checkpoint folders found in ComfyUI configuration"
|
||||
)
|
||||
return []
|
||||
|
||||
return unique_paths
|
||||
@@ -783,10 +882,15 @@ class Config:
|
||||
try:
|
||||
raw_paths = folder_paths.get_folder_paths("embeddings")
|
||||
unique_paths = self._prepare_embedding_paths(raw_paths)
|
||||
logger.info("Found embedding roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
|
||||
logger.info(
|
||||
"Found embedding roots:"
|
||||
+ ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]")
|
||||
)
|
||||
|
||||
if not unique_paths:
|
||||
logger.warning("No valid embeddings folders found in ComfyUI configuration")
|
||||
logger.warning(
|
||||
"No valid embeddings folders found in ComfyUI configuration"
|
||||
)
|
||||
return []
|
||||
|
||||
return unique_paths
|
||||
@@ -798,13 +902,13 @@ class Config:
|
||||
if not preview_path:
|
||||
return ""
|
||||
|
||||
normalized = os.path.normpath(preview_path).replace(os.sep, '/')
|
||||
encoded_path = urllib.parse.quote(normalized, safe='')
|
||||
return f'/api/lm/previews?path={encoded_path}'
|
||||
normalized = os.path.normpath(preview_path).replace(os.sep, "/")
|
||||
encoded_path = urllib.parse.quote(normalized, safe="")
|
||||
return f"/api/lm/previews?path={encoded_path}"
|
||||
|
||||
def is_preview_path_allowed(self, preview_path: str) -> bool:
|
||||
"""Return ``True`` if ``preview_path`` is within an allowed directory.
|
||||
|
||||
|
||||
If the path is initially rejected, attempts to discover deep symlinks
|
||||
that were not scanned during initialization. If a symlink is found,
|
||||
updates the in-memory path mappings and retries the check.
|
||||
@@ -875,14 +979,18 @@ class Config:
|
||||
normalized_link = self._normalize_path(str(current))
|
||||
|
||||
self._path_mappings[normalized_target] = normalized_link
|
||||
self._preview_root_paths.update(self._expand_preview_root(normalized_target))
|
||||
self._preview_root_paths.update(self._expand_preview_root(normalized_link))
|
||||
self._preview_root_paths.update(
|
||||
self._expand_preview_root(normalized_target)
|
||||
)
|
||||
self._preview_root_paths.update(
|
||||
self._expand_preview_root(normalized_link)
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Discovered deep symlink: %s -> %s (preview path: %s)",
|
||||
normalized_link,
|
||||
normalized_target,
|
||||
preview_path
|
||||
preview_path,
|
||||
)
|
||||
|
||||
return True
|
||||
@@ -900,8 +1008,16 @@ class Config:
|
||||
|
||||
def apply_library_settings(self, library_config: Mapping[str, object]) -> None:
|
||||
"""Update runtime paths to match the provided library configuration."""
|
||||
folder_paths = library_config.get('folder_paths') if isinstance(library_config, Mapping) else {}
|
||||
extra_folder_paths = library_config.get('extra_folder_paths') if isinstance(library_config, Mapping) else None
|
||||
folder_paths = (
|
||||
library_config.get("folder_paths")
|
||||
if isinstance(library_config, Mapping)
|
||||
else {}
|
||||
)
|
||||
extra_folder_paths = (
|
||||
library_config.get("extra_folder_paths")
|
||||
if isinstance(library_config, Mapping)
|
||||
else None
|
||||
)
|
||||
if not isinstance(folder_paths, Mapping):
|
||||
folder_paths = {}
|
||||
if not isinstance(extra_folder_paths, Mapping):
|
||||
@@ -911,9 +1027,12 @@ class Config:
|
||||
|
||||
logger.info(
|
||||
"Applied library settings with %d lora roots (%d extra), %d checkpoint roots (%d extra), and %d embedding roots (%d extra)",
|
||||
len(self.loras_roots or []), len(self.extra_loras_roots or []),
|
||||
len(self.base_models_roots or []), len(self.extra_checkpoints_roots or []),
|
||||
len(self.embeddings_roots or []), len(self.extra_embeddings_roots or []),
|
||||
len(self.loras_roots or []),
|
||||
len(self.extra_loras_roots or []),
|
||||
len(self.base_models_roots or []),
|
||||
len(self.extra_checkpoints_roots or []),
|
||||
len(self.embeddings_roots or []),
|
||||
len(self.extra_embeddings_roots or []),
|
||||
)
|
||||
|
||||
def get_library_registry_snapshot(self) -> Dict[str, object]:
|
||||
@@ -933,5 +1052,6 @@ class Config:
|
||||
logger.debug("Failed to collect library registry snapshot: %s", exc)
|
||||
return {"active_library": "", "libraries": {}}
|
||||
|
||||
|
||||
# Global config instance
|
||||
config = Config()
|
||||
|
||||
@@ -5,16 +5,22 @@ import logging
|
||||
from .utils.logging_config import setup_logging
|
||||
|
||||
# Check if we're in standalone mode
|
||||
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||
standalone_mode = (
|
||||
os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1"
|
||||
or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||
)
|
||||
|
||||
# Only setup logging prefix if not in standalone mode
|
||||
if not standalone_mode:
|
||||
setup_logging()
|
||||
|
||||
from server import PromptServer # type: ignore
|
||||
from server import PromptServer # type: ignore
|
||||
|
||||
from .config import config
|
||||
from .services.model_service_factory import ModelServiceFactory, register_default_model_types
|
||||
from .services.model_service_factory import (
|
||||
ModelServiceFactory,
|
||||
register_default_model_types,
|
||||
)
|
||||
from .routes.recipe_routes import RecipeRoutes
|
||||
from .routes.stats_routes import StatsRoutes
|
||||
from .routes.update_routes import UpdateRoutes
|
||||
@@ -61,9 +67,10 @@ class _SettingsProxy:
|
||||
|
||||
settings = _SettingsProxy()
|
||||
|
||||
|
||||
class LoraManager:
|
||||
"""Main entry point for LoRA Manager plugin"""
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_routes(cls):
|
||||
"""Initialize and register all routes using the new refactored architecture"""
|
||||
@@ -76,7 +83,8 @@ class LoraManager:
|
||||
(
|
||||
idx
|
||||
for idx, middleware in enumerate(app.middlewares)
|
||||
if getattr(middleware, "__name__", "") == "block_external_middleware"
|
||||
if getattr(middleware, "__name__", "")
|
||||
== "block_external_middleware"
|
||||
),
|
||||
None,
|
||||
)
|
||||
@@ -84,7 +92,9 @@ class LoraManager:
|
||||
if block_middleware_index is None:
|
||||
app.middlewares.append(relax_csp_for_remote_media)
|
||||
else:
|
||||
app.middlewares.insert(block_middleware_index, relax_csp_for_remote_media)
|
||||
app.middlewares.insert(
|
||||
block_middleware_index, relax_csp_for_remote_media
|
||||
)
|
||||
|
||||
# Increase allowed header sizes so browsers with large localhost cookie
|
||||
# jars (multiple UIs on 127.0.0.1) don't trip aiohttp's 8KB default
|
||||
@@ -105,7 +115,7 @@ class LoraManager:
|
||||
app._handler_args = updated_handler_args
|
||||
|
||||
# Configure aiohttp access logger to be less verbose
|
||||
logging.getLogger('aiohttp.access').setLevel(logging.WARNING)
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
|
||||
# Add specific suppression for connection reset errors
|
||||
class ConnectionResetFilter(logging.Filter):
|
||||
@@ -124,50 +134,89 @@ class LoraManager:
|
||||
asyncio_logger.addFilter(ConnectionResetFilter())
|
||||
|
||||
# Add static route for example images if the path exists in settings
|
||||
example_images_path = settings.get('example_images_path')
|
||||
example_images_path = settings.get("example_images_path")
|
||||
logger.info(f"Example images path: {example_images_path}")
|
||||
if example_images_path and os.path.exists(example_images_path):
|
||||
app.router.add_static('/example_images_static', example_images_path)
|
||||
logger.info(f"Added static route for example images: /example_images_static -> {example_images_path}")
|
||||
app.router.add_static("/example_images_static", example_images_path)
|
||||
logger.info(
|
||||
f"Added static route for example images: /example_images_static -> {example_images_path}"
|
||||
)
|
||||
|
||||
# Add static route for locales JSON files
|
||||
if os.path.exists(config.i18n_path):
|
||||
app.router.add_static('/locales', config.i18n_path)
|
||||
logger.info(f"Added static route for locales: /locales -> {config.i18n_path}")
|
||||
app.router.add_static("/locales", config.i18n_path)
|
||||
logger.info(
|
||||
f"Added static route for locales: /locales -> {config.i18n_path}"
|
||||
)
|
||||
|
||||
# Add static route for plugin assets
|
||||
app.router.add_static('/loras_static', config.static_path)
|
||||
|
||||
app.router.add_static("/loras_static", config.static_path)
|
||||
|
||||
# Register default model types with the factory
|
||||
register_default_model_types()
|
||||
|
||||
|
||||
# Setup all model routes using the factory
|
||||
ModelServiceFactory.setup_all_routes(app)
|
||||
|
||||
|
||||
# Setup non-model-specific routes
|
||||
stats_routes = StatsRoutes()
|
||||
stats_routes.setup_routes(app)
|
||||
RecipeRoutes.setup_routes(app)
|
||||
UpdateRoutes.setup_routes(app)
|
||||
UpdateRoutes.setup_routes(app)
|
||||
MiscRoutes.setup_routes(app)
|
||||
ExampleImagesRoutes.setup_routes(app, ws_manager=ws_manager)
|
||||
PreviewRoutes.setup_routes(app)
|
||||
|
||||
|
||||
# Setup WebSocket routes that are shared across all model types
|
||||
app.router.add_get('/ws/fetch-progress', ws_manager.handle_connection)
|
||||
app.router.add_get('/ws/download-progress', ws_manager.handle_download_connection)
|
||||
app.router.add_get('/ws/init-progress', ws_manager.handle_init_connection)
|
||||
|
||||
# Schedule service initialization
|
||||
app.router.add_get("/ws/fetch-progress", ws_manager.handle_connection)
|
||||
app.router.add_get(
|
||||
"/ws/download-progress", ws_manager.handle_download_connection
|
||||
)
|
||||
app.router.add_get("/ws/init-progress", ws_manager.handle_init_connection)
|
||||
|
||||
# Schedule service initialization
|
||||
app.on_startup.append(lambda app: cls._initialize_services())
|
||||
|
||||
|
||||
# Add cleanup
|
||||
app.on_shutdown.append(cls._cleanup)
|
||||
|
||||
|
||||
@classmethod
|
||||
async def _initialize_services(cls):
|
||||
"""Initialize all services using the ServiceRegistry"""
|
||||
try:
|
||||
# Apply library settings to load extra folder paths before scanning
|
||||
# Only apply if extra paths haven't been loaded yet (preserves test mocks)
|
||||
try:
|
||||
from .services.settings_manager import get_settings_manager
|
||||
|
||||
settings_manager = get_settings_manager()
|
||||
library_name = settings_manager.get_active_library_name()
|
||||
libraries = settings_manager.get_libraries()
|
||||
if library_name and library_name in libraries:
|
||||
library_config = libraries[library_name]
|
||||
# Only apply settings if extra paths are not already configured
|
||||
# This preserves values set by tests via monkeypatch
|
||||
extra_paths = library_config.get("extra_folder_paths", {})
|
||||
has_extra_paths = (
|
||||
config.extra_loras_roots
|
||||
or config.extra_checkpoints_roots
|
||||
or config.extra_unet_roots
|
||||
or config.extra_embeddings_roots
|
||||
)
|
||||
if not has_extra_paths and any(extra_paths.values()):
|
||||
config.apply_library_settings(library_config)
|
||||
logger.info(
|
||||
"Applied library settings for '%s' with extra paths: loras=%s, checkpoints=%s, embeddings=%s",
|
||||
library_name,
|
||||
extra_paths.get("loras", []),
|
||||
extra_paths.get("checkpoints", []),
|
||||
extra_paths.get("embeddings", []),
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Failed to apply library settings during initialization: %s", exc
|
||||
)
|
||||
|
||||
# Initialize CivitaiClient first to ensure it's ready for other services
|
||||
await ServiceRegistry.get_civitai_client()
|
||||
|
||||
@@ -175,163 +224,200 @@ class LoraManager:
|
||||
await ServiceRegistry.get_download_manager()
|
||||
|
||||
from .services.metadata_service import initialize_metadata_providers
|
||||
|
||||
await initialize_metadata_providers()
|
||||
|
||||
|
||||
# Initialize WebSocket manager
|
||||
await ServiceRegistry.get_websocket_manager()
|
||||
|
||||
|
||||
# Initialize scanners in background
|
||||
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
||||
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
||||
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
|
||||
|
||||
|
||||
# Initialize recipe scanner if needed
|
||||
recipe_scanner = await ServiceRegistry.get_recipe_scanner()
|
||||
|
||||
|
||||
# Create low-priority initialization tasks
|
||||
init_tasks = [
|
||||
asyncio.create_task(lora_scanner.initialize_in_background(), name='lora_cache_init'),
|
||||
asyncio.create_task(checkpoint_scanner.initialize_in_background(), name='checkpoint_cache_init'),
|
||||
asyncio.create_task(embedding_scanner.initialize_in_background(), name='embedding_cache_init'),
|
||||
asyncio.create_task(recipe_scanner.initialize_in_background(), name='recipe_cache_init')
|
||||
asyncio.create_task(
|
||||
lora_scanner.initialize_in_background(), name="lora_cache_init"
|
||||
),
|
||||
asyncio.create_task(
|
||||
checkpoint_scanner.initialize_in_background(),
|
||||
name="checkpoint_cache_init",
|
||||
),
|
||||
asyncio.create_task(
|
||||
embedding_scanner.initialize_in_background(),
|
||||
name="embedding_cache_init",
|
||||
),
|
||||
asyncio.create_task(
|
||||
recipe_scanner.initialize_in_background(), name="recipe_cache_init"
|
||||
),
|
||||
]
|
||||
|
||||
await ExampleImagesMigration.check_and_run_migrations()
|
||||
|
||||
|
||||
# Schedule post-initialization tasks to run after scanners complete
|
||||
asyncio.create_task(
|
||||
cls._run_post_initialization_tasks(init_tasks),
|
||||
name='post_init_tasks'
|
||||
cls._run_post_initialization_tasks(init_tasks), name="post_init_tasks"
|
||||
)
|
||||
|
||||
logger.debug("LoRA Manager: All services initialized and background tasks scheduled")
|
||||
|
||||
|
||||
logger.debug(
|
||||
"LoRA Manager: All services initialized and background tasks scheduled"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"LoRA Manager: Error initializing services: {e}", exc_info=True)
|
||||
|
||||
logger.error(
|
||||
f"LoRA Manager: Error initializing services: {e}", exc_info=True
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def _run_post_initialization_tasks(cls, init_tasks):
|
||||
"""Run post-initialization tasks after all scanners complete"""
|
||||
try:
|
||||
logger.debug("LoRA Manager: Waiting for scanner initialization to complete...")
|
||||
|
||||
logger.debug(
|
||||
"LoRA Manager: Waiting for scanner initialization to complete..."
|
||||
)
|
||||
|
||||
# Wait for all scanner initialization tasks to complete
|
||||
await asyncio.gather(*init_tasks, return_exceptions=True)
|
||||
|
||||
logger.debug("LoRA Manager: Scanner initialization completed, starting post-initialization tasks...")
|
||||
|
||||
logger.debug(
|
||||
"LoRA Manager: Scanner initialization completed, starting post-initialization tasks..."
|
||||
)
|
||||
|
||||
# Run post-initialization tasks
|
||||
post_tasks = [
|
||||
asyncio.create_task(cls._cleanup_backup_files(), name='cleanup_bak_files'),
|
||||
asyncio.create_task(
|
||||
cls._cleanup_backup_files(), name="cleanup_bak_files"
|
||||
),
|
||||
# Add more post-initialization tasks here as needed
|
||||
# asyncio.create_task(cls._another_post_task(), name='another_task'),
|
||||
]
|
||||
|
||||
|
||||
# Run all post-initialization tasks
|
||||
results = await asyncio.gather(*post_tasks, return_exceptions=True)
|
||||
|
||||
|
||||
# Log results
|
||||
for i, result in enumerate(results):
|
||||
task_name = post_tasks[i].get_name()
|
||||
if isinstance(result, Exception):
|
||||
logger.error(f"Post-initialization task '{task_name}' failed: {result}")
|
||||
logger.error(
|
||||
f"Post-initialization task '{task_name}' failed: {result}"
|
||||
)
|
||||
else:
|
||||
logger.debug(f"Post-initialization task '{task_name}' completed successfully")
|
||||
|
||||
logger.debug(
|
||||
f"Post-initialization task '{task_name}' completed successfully"
|
||||
)
|
||||
|
||||
logger.debug("LoRA Manager: All post-initialization tasks completed")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"LoRA Manager: Error in post-initialization tasks: {e}", exc_info=True)
|
||||
|
||||
logger.error(
|
||||
f"LoRA Manager: Error in post-initialization tasks: {e}", exc_info=True
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def _cleanup_backup_files(cls):
|
||||
"""Clean up .bak files in all model roots"""
|
||||
try:
|
||||
logger.debug("Starting cleanup of .bak files in model directories...")
|
||||
|
||||
|
||||
# Collect all model roots
|
||||
all_roots = set()
|
||||
all_roots.update(config.loras_roots)
|
||||
all_roots.update(config.base_models_roots)
|
||||
all_roots.update(config.embeddings_roots)
|
||||
|
||||
all_roots.update(config.base_models_roots or [])
|
||||
all_roots.update(config.embeddings_roots or [])
|
||||
|
||||
total_deleted = 0
|
||||
total_size_freed = 0
|
||||
|
||||
|
||||
for root_path in all_roots:
|
||||
if not os.path.exists(root_path):
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
deleted_count, size_freed = await cls._cleanup_backup_files_in_directory(root_path)
|
||||
(
|
||||
deleted_count,
|
||||
size_freed,
|
||||
) = await cls._cleanup_backup_files_in_directory(root_path)
|
||||
total_deleted += deleted_count
|
||||
total_size_freed += size_freed
|
||||
|
||||
|
||||
if deleted_count > 0:
|
||||
logger.debug(f"Cleaned up {deleted_count} .bak files in {root_path} (freed {size_freed / (1024*1024):.2f} MB)")
|
||||
|
||||
logger.debug(
|
||||
f"Cleaned up {deleted_count} .bak files in {root_path} (freed {size_freed / (1024 * 1024):.2f} MB)"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up .bak files in {root_path}: {e}")
|
||||
|
||||
|
||||
# Yield control periodically
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
|
||||
if total_deleted > 0:
|
||||
logger.debug(f"Backup cleanup completed: removed {total_deleted} .bak files, freed {total_size_freed / (1024*1024):.2f} MB total")
|
||||
logger.debug(
|
||||
f"Backup cleanup completed: removed {total_deleted} .bak files, freed {total_size_freed / (1024 * 1024):.2f} MB total"
|
||||
)
|
||||
else:
|
||||
logger.debug("Backup cleanup completed: no .bak files found")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during backup file cleanup: {e}", exc_info=True)
|
||||
|
||||
|
||||
@classmethod
|
||||
async def _cleanup_backup_files_in_directory(cls, directory_path: str):
|
||||
"""Clean up .bak files in a specific directory recursively
|
||||
|
||||
|
||||
Args:
|
||||
directory_path: Path to the directory to clean
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple[int, int]: (number of files deleted, total size freed in bytes)
|
||||
"""
|
||||
deleted_count = 0
|
||||
size_freed = 0
|
||||
visited_paths = set()
|
||||
|
||||
|
||||
def cleanup_recursive(path):
|
||||
nonlocal deleted_count, size_freed
|
||||
|
||||
|
||||
try:
|
||||
real_path = os.path.realpath(path)
|
||||
if real_path in visited_paths:
|
||||
return
|
||||
visited_paths.add(real_path)
|
||||
|
||||
|
||||
with os.scandir(path) as it:
|
||||
for entry in it:
|
||||
try:
|
||||
if entry.is_file(follow_symlinks=True) and entry.name.endswith('.bak'):
|
||||
if entry.is_file(
|
||||
follow_symlinks=True
|
||||
) and entry.name.endswith(".bak"):
|
||||
file_size = entry.stat().st_size
|
||||
os.remove(entry.path)
|
||||
deleted_count += 1
|
||||
size_freed += file_size
|
||||
logger.debug(f"Deleted .bak file: {entry.path}")
|
||||
|
||||
|
||||
elif entry.is_dir(follow_symlinks=True):
|
||||
cleanup_recursive(entry.path)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not delete .bak file {entry.path}: {e}")
|
||||
|
||||
logger.warning(
|
||||
f"Could not delete .bak file {entry.path}: {e}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error scanning directory {path} for .bak files: {e}")
|
||||
|
||||
|
||||
# Run the recursive cleanup in a thread pool to avoid blocking
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(None, cleanup_recursive, directory_path)
|
||||
|
||||
|
||||
return deleted_count, size_freed
|
||||
|
||||
|
||||
@classmethod
|
||||
async def _cleanup_example_images_folders(cls):
|
||||
"""Invoke the example images cleanup service for manual execution."""
|
||||
@@ -339,21 +425,21 @@ class LoraManager:
|
||||
service = ExampleImagesCleanupService()
|
||||
result = await service.cleanup_example_image_folders()
|
||||
|
||||
if result.get('success'):
|
||||
if result.get("success"):
|
||||
logger.debug(
|
||||
"Manual example images cleanup completed: moved=%s",
|
||||
result.get('moved_total'),
|
||||
result.get("moved_total"),
|
||||
)
|
||||
elif result.get('partial_success'):
|
||||
elif result.get("partial_success"):
|
||||
logger.warning(
|
||||
"Manual example images cleanup partially succeeded: moved=%s failures=%s",
|
||||
result.get('moved_total'),
|
||||
result.get('move_failures'),
|
||||
result.get("moved_total"),
|
||||
result.get("move_failures"),
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
"Manual example images cleanup skipped or failed: %s",
|
||||
result.get('error', 'no changes'),
|
||||
result.get("error", "no changes"),
|
||||
)
|
||||
|
||||
return result
|
||||
@@ -361,9 +447,9 @@ class LoraManager:
|
||||
except Exception as e: # pragma: no cover - defensive guard
|
||||
logger.error(f"Error during example images cleanup: {e}", exc_info=True)
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e),
|
||||
'error_code': 'unexpected_error',
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"error_code": "unexpected_error",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -371,6 +457,6 @@ class LoraManager:
|
||||
"""Cleanup resources using ServiceRegistry"""
|
||||
try:
|
||||
logger.info("LoRA Manager: Cleaning up services")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during cleanup: {e}", exc_info=True)
|
||||
|
||||
@@ -4,7 +4,10 @@ import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Check if running in standalone mode
|
||||
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||
standalone_mode = (
|
||||
os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1"
|
||||
or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||
)
|
||||
|
||||
if not standalone_mode:
|
||||
from .metadata_hook import MetadataHook
|
||||
@@ -13,13 +16,13 @@ if not standalone_mode:
|
||||
def init():
|
||||
# Install hooks to collect metadata during execution
|
||||
MetadataHook.install()
|
||||
|
||||
|
||||
# Initialize registry
|
||||
registry = MetadataRegistry()
|
||||
|
||||
|
||||
logger.info("ComfyUI Metadata Collector initialized")
|
||||
|
||||
def get_metadata(prompt_id=None):
|
||||
|
||||
def get_metadata(prompt_id=None): # type: ignore[no-redef]
|
||||
"""Helper function to get metadata from the registry"""
|
||||
registry = MetadataRegistry()
|
||||
return registry.get_metadata(prompt_id)
|
||||
@@ -27,7 +30,7 @@ else:
|
||||
# Standalone mode - provide dummy implementations
|
||||
def init():
|
||||
logger.info("ComfyUI Metadata Collector disabled in standalone mode")
|
||||
|
||||
def get_metadata(prompt_id=None):
|
||||
|
||||
def get_metadata(prompt_id=None): # type: ignore[no-redef]
|
||||
"""Dummy implementation for standalone mode"""
|
||||
return {}
|
||||
|
||||
@@ -1,50 +1,54 @@
|
||||
import time
|
||||
from nodes import NODE_CLASS_MAPPINGS
|
||||
from nodes import NODE_CLASS_MAPPINGS # type: ignore
|
||||
from .node_extractors import NODE_EXTRACTORS, GenericNodeExtractor
|
||||
from .constants import METADATA_CATEGORIES, IMAGES
|
||||
|
||||
|
||||
class MetadataRegistry:
|
||||
"""A singleton registry to store and retrieve workflow metadata"""
|
||||
|
||||
_instance = None
|
||||
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
cls._instance._reset()
|
||||
return cls._instance
|
||||
|
||||
|
||||
def _reset(self):
|
||||
self.current_prompt_id = None
|
||||
self.current_prompt = None
|
||||
self.metadata = {}
|
||||
self.prompt_metadata = {}
|
||||
self.executed_nodes = set()
|
||||
|
||||
|
||||
# Node-level cache for metadata
|
||||
self.node_cache = {}
|
||||
|
||||
|
||||
# Limit the number of stored prompts
|
||||
self.max_prompt_history = 3
|
||||
|
||||
|
||||
# Categories we want to track and retrieve from cache
|
||||
self.metadata_categories = METADATA_CATEGORIES
|
||||
|
||||
|
||||
def _clean_old_prompts(self):
|
||||
"""Clean up old prompt metadata, keeping only recent ones"""
|
||||
if len(self.prompt_metadata) <= self.max_prompt_history:
|
||||
return
|
||||
|
||||
|
||||
# Sort all prompt_ids by timestamp
|
||||
sorted_prompts = sorted(
|
||||
self.prompt_metadata.keys(),
|
||||
key=lambda pid: self.prompt_metadata[pid].get("timestamp", 0)
|
||||
key=lambda pid: self.prompt_metadata[pid].get("timestamp", 0),
|
||||
)
|
||||
|
||||
|
||||
# Remove oldest records
|
||||
prompts_to_remove = sorted_prompts[:len(sorted_prompts) - self.max_prompt_history]
|
||||
prompts_to_remove = sorted_prompts[
|
||||
: len(sorted_prompts) - self.max_prompt_history
|
||||
]
|
||||
for pid in prompts_to_remove:
|
||||
del self.prompt_metadata[pid]
|
||||
|
||||
|
||||
def start_collection(self, prompt_id):
|
||||
"""Begin metadata collection for a new prompt"""
|
||||
self.current_prompt_id = prompt_id
|
||||
@@ -53,90 +57,96 @@ class MetadataRegistry:
|
||||
category: {} for category in METADATA_CATEGORIES
|
||||
}
|
||||
# Add additional metadata fields
|
||||
self.prompt_metadata[prompt_id].update({
|
||||
"execution_order": [],
|
||||
"current_prompt": None, # Will store the prompt object
|
||||
"timestamp": time.time()
|
||||
})
|
||||
|
||||
self.prompt_metadata[prompt_id].update(
|
||||
{
|
||||
"execution_order": [],
|
||||
"current_prompt": None, # Will store the prompt object
|
||||
"timestamp": time.time(),
|
||||
}
|
||||
)
|
||||
|
||||
# Clean up old prompt data
|
||||
self._clean_old_prompts()
|
||||
|
||||
|
||||
def set_current_prompt(self, prompt):
|
||||
"""Set the current prompt object reference"""
|
||||
self.current_prompt = prompt
|
||||
if self.current_prompt_id and self.current_prompt_id in self.prompt_metadata:
|
||||
# Store the prompt in the metadata for later relationship tracing
|
||||
self.prompt_metadata[self.current_prompt_id]["current_prompt"] = prompt
|
||||
|
||||
|
||||
def get_metadata(self, prompt_id=None):
|
||||
"""Get collected metadata for a prompt"""
|
||||
key = prompt_id if prompt_id is not None else self.current_prompt_id
|
||||
if key not in self.prompt_metadata:
|
||||
return {}
|
||||
|
||||
|
||||
metadata = self.prompt_metadata[key]
|
||||
|
||||
|
||||
# If we have a current prompt object, check for non-executed nodes
|
||||
prompt_obj = metadata.get("current_prompt")
|
||||
if prompt_obj and hasattr(prompt_obj, "original_prompt"):
|
||||
original_prompt = prompt_obj.original_prompt
|
||||
|
||||
|
||||
# Fill in missing metadata from cache for nodes that weren't executed
|
||||
self._fill_missing_metadata(key, original_prompt)
|
||||
|
||||
|
||||
return self.prompt_metadata.get(key, {})
|
||||
|
||||
|
||||
def _fill_missing_metadata(self, prompt_id, original_prompt):
|
||||
"""Fill missing metadata from cache for non-executed nodes"""
|
||||
if not original_prompt:
|
||||
return
|
||||
|
||||
|
||||
executed_nodes = self.executed_nodes
|
||||
metadata = self.prompt_metadata[prompt_id]
|
||||
|
||||
|
||||
# Iterate through nodes in the original prompt
|
||||
for node_id, node_data in original_prompt.items():
|
||||
# Skip if already executed in this run
|
||||
if node_id in executed_nodes:
|
||||
continue
|
||||
|
||||
|
||||
# Get the node type from the prompt (this is the key in NODE_CLASS_MAPPINGS)
|
||||
prompt_class_type = node_data.get("class_type")
|
||||
if not prompt_class_type:
|
||||
continue
|
||||
|
||||
|
||||
# Convert to actual class name (which is what we use in our cache)
|
||||
class_type = prompt_class_type
|
||||
if prompt_class_type in NODE_CLASS_MAPPINGS:
|
||||
class_obj = NODE_CLASS_MAPPINGS[prompt_class_type]
|
||||
class_type = class_obj.__name__
|
||||
|
||||
|
||||
# Create cache key using the actual class name
|
||||
cache_key = f"{node_id}:{class_type}"
|
||||
|
||||
|
||||
# Check if this node type is relevant for metadata collection
|
||||
if class_type in NODE_EXTRACTORS:
|
||||
# Check if we have cached metadata for this node
|
||||
if cache_key in self.node_cache:
|
||||
cached_data = self.node_cache[cache_key]
|
||||
|
||||
|
||||
# Apply cached metadata to the current metadata
|
||||
for category in self.metadata_categories:
|
||||
if category in cached_data and node_id in cached_data[category]:
|
||||
if node_id not in metadata[category]:
|
||||
metadata[category][node_id] = cached_data[category][node_id]
|
||||
|
||||
metadata[category][node_id] = cached_data[category][
|
||||
node_id
|
||||
]
|
||||
|
||||
def record_node_execution(self, node_id, class_type, inputs, outputs):
|
||||
"""Record information about a node's execution"""
|
||||
if not self.current_prompt_id:
|
||||
return
|
||||
|
||||
|
||||
# Add to execution order and mark as executed
|
||||
if node_id not in self.executed_nodes:
|
||||
self.executed_nodes.add(node_id)
|
||||
self.prompt_metadata[self.current_prompt_id]["execution_order"].append(node_id)
|
||||
|
||||
self.prompt_metadata[self.current_prompt_id]["execution_order"].append(
|
||||
node_id
|
||||
)
|
||||
|
||||
# Process inputs to simplify working with them
|
||||
processed_inputs = {}
|
||||
for input_name, input_values in inputs.items():
|
||||
@@ -145,63 +155,61 @@ class MetadataRegistry:
|
||||
processed_inputs[input_name] = input_values[0]
|
||||
else:
|
||||
processed_inputs[input_name] = input_values
|
||||
|
||||
|
||||
# Extract node-specific metadata
|
||||
extractor = NODE_EXTRACTORS.get(class_type, GenericNodeExtractor)
|
||||
extractor.extract(
|
||||
node_id,
|
||||
processed_inputs,
|
||||
outputs,
|
||||
self.prompt_metadata[self.current_prompt_id]
|
||||
node_id,
|
||||
processed_inputs,
|
||||
outputs,
|
||||
self.prompt_metadata[self.current_prompt_id],
|
||||
)
|
||||
|
||||
|
||||
# Cache this node's metadata
|
||||
self._cache_node_metadata(node_id, class_type)
|
||||
|
||||
|
||||
def update_node_execution(self, node_id, class_type, outputs):
|
||||
"""Update node metadata with output information"""
|
||||
if not self.current_prompt_id:
|
||||
return
|
||||
|
||||
|
||||
# Process outputs to make them more usable
|
||||
processed_outputs = outputs
|
||||
|
||||
|
||||
# Use the same extractor to update with outputs
|
||||
extractor = NODE_EXTRACTORS.get(class_type, GenericNodeExtractor)
|
||||
if hasattr(extractor, 'update'):
|
||||
if hasattr(extractor, "update"):
|
||||
extractor.update(
|
||||
node_id,
|
||||
processed_outputs,
|
||||
self.prompt_metadata[self.current_prompt_id]
|
||||
node_id, processed_outputs, self.prompt_metadata[self.current_prompt_id]
|
||||
)
|
||||
|
||||
|
||||
# Update the cached metadata for this node
|
||||
self._cache_node_metadata(node_id, class_type)
|
||||
|
||||
|
||||
def _cache_node_metadata(self, node_id, class_type):
|
||||
"""Cache the metadata for a specific node"""
|
||||
if not self.current_prompt_id or not node_id or not class_type:
|
||||
return
|
||||
|
||||
|
||||
# Create a cache key combining node_id and class_type
|
||||
cache_key = f"{node_id}:{class_type}"
|
||||
|
||||
|
||||
# Create a shallow copy of the node's metadata
|
||||
node_metadata = {}
|
||||
current_metadata = self.prompt_metadata[self.current_prompt_id]
|
||||
|
||||
|
||||
for category in self.metadata_categories:
|
||||
if category in current_metadata and node_id in current_metadata[category]:
|
||||
if category not in node_metadata:
|
||||
node_metadata[category] = {}
|
||||
node_metadata[category][node_id] = current_metadata[category][node_id]
|
||||
|
||||
|
||||
# Save new metadata or clear stale cache entries when metadata is empty
|
||||
if any(node_metadata.values()):
|
||||
self.node_cache[cache_key] = node_metadata
|
||||
else:
|
||||
self.node_cache.pop(cache_key, None)
|
||||
|
||||
|
||||
def clear_unused_cache(self):
|
||||
"""Clean up node_cache entries that are no longer in use"""
|
||||
# Collect all node_ids currently in prompt_metadata
|
||||
@@ -210,18 +218,18 @@ class MetadataRegistry:
|
||||
for category in self.metadata_categories:
|
||||
if category in prompt_data:
|
||||
active_node_ids.update(prompt_data[category].keys())
|
||||
|
||||
|
||||
# Find cache keys that are no longer needed
|
||||
keys_to_remove = []
|
||||
for cache_key in self.node_cache:
|
||||
node_id = cache_key.split(':')[0]
|
||||
node_id = cache_key.split(":")[0]
|
||||
if node_id not in active_node_ids:
|
||||
keys_to_remove.append(cache_key)
|
||||
|
||||
|
||||
# Remove cache entries that are no longer needed
|
||||
for key in keys_to_remove:
|
||||
del self.node_cache[key]
|
||||
|
||||
|
||||
def clear_metadata(self, prompt_id=None):
|
||||
"""Clear metadata for a specific prompt or reset all data"""
|
||||
if prompt_id is not None:
|
||||
@@ -232,25 +240,25 @@ class MetadataRegistry:
|
||||
else:
|
||||
# Reset all data
|
||||
self._reset()
|
||||
|
||||
|
||||
def get_first_decoded_image(self, prompt_id=None):
|
||||
"""Get the first decoded image result"""
|
||||
key = prompt_id if prompt_id is not None else self.current_prompt_id
|
||||
if key not in self.prompt_metadata:
|
||||
return None
|
||||
|
||||
|
||||
metadata = self.prompt_metadata[key]
|
||||
if IMAGES in metadata and "first_decode" in metadata[IMAGES]:
|
||||
image_data = metadata[IMAGES]["first_decode"]["image"]
|
||||
|
||||
|
||||
# If it's an image batch or tuple, handle various formats
|
||||
if isinstance(image_data, (list, tuple)) and len(image_data) > 0:
|
||||
# Return first element of list/tuple
|
||||
return image_data[0]
|
||||
|
||||
|
||||
# If it's a tensor, return as is for processing in the route handler
|
||||
return image_data
|
||||
|
||||
|
||||
# If no image is found in the current metadata, try to find it in the cache
|
||||
# This handles the case where VAEDecode was cached by ComfyUI and not executed
|
||||
prompt_obj = metadata.get("current_prompt")
|
||||
@@ -270,8 +278,11 @@ class MetadataRegistry:
|
||||
if IMAGES in cached_data and node_id in cached_data[IMAGES]:
|
||||
image_data = cached_data[IMAGES][node_id]["image"]
|
||||
# Handle different image formats
|
||||
if isinstance(image_data, (list, tuple)) and len(image_data) > 0:
|
||||
if (
|
||||
isinstance(image_data, (list, tuple))
|
||||
and len(image_data) > 0
|
||||
):
|
||||
return image_data[0]
|
||||
return image_data
|
||||
|
||||
|
||||
return None
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Dict, Optional
|
||||
import numpy as np
|
||||
import folder_paths # type: ignore
|
||||
import folder_paths # type: ignore
|
||||
from ..services.service_registry import ServiceRegistry
|
||||
from ..metadata_collector.metadata_processor import MetadataProcessor
|
||||
from ..metadata_collector import get_metadata
|
||||
@@ -12,6 +13,7 @@ import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SaveImageLM:
|
||||
NAME = "Save Image (LoraManager)"
|
||||
CATEGORY = "Lora Manager/utils"
|
||||
@@ -23,42 +25,60 @@ class SaveImageLM:
|
||||
self.prefix_append = ""
|
||||
self.compress_level = 4
|
||||
self.counter = 0
|
||||
|
||||
|
||||
# Add pattern format regex for filename substitution
|
||||
pattern_format = re.compile(r"(%[^%]+%)")
|
||||
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(cls):
|
||||
return {
|
||||
"required": {
|
||||
"images": ("IMAGE",),
|
||||
"filename_prefix": ("STRING", {
|
||||
"default": "ComfyUI",
|
||||
"tooltip": "Base filename for saved images. Supports format patterns like %seed%, %width%, %height%, %model%, etc."
|
||||
}),
|
||||
"file_format": (["png", "jpeg", "webp"], {
|
||||
"tooltip": "Image format to save as. PNG preserves quality, JPEG is smaller, WebP balances size and quality."
|
||||
}),
|
||||
"filename_prefix": (
|
||||
"STRING",
|
||||
{
|
||||
"default": "ComfyUI",
|
||||
"tooltip": "Base filename for saved images. Supports format patterns like %seed%, %width%, %height%, %model%, etc.",
|
||||
},
|
||||
),
|
||||
"file_format": (
|
||||
["png", "jpeg", "webp"],
|
||||
{
|
||||
"tooltip": "Image format to save as. PNG preserves quality, JPEG is smaller, WebP balances size and quality."
|
||||
},
|
||||
),
|
||||
},
|
||||
"optional": {
|
||||
"lossless_webp": ("BOOLEAN", {
|
||||
"default": False,
|
||||
"tooltip": "When enabled, saves WebP images with lossless compression. Results in larger files but no quality loss."
|
||||
}),
|
||||
"quality": ("INT", {
|
||||
"default": 100,
|
||||
"min": 1,
|
||||
"max": 100,
|
||||
"tooltip": "Compression quality for JPEG and lossy WebP formats (1-100). Higher values mean better quality but larger files."
|
||||
}),
|
||||
"embed_workflow": ("BOOLEAN", {
|
||||
"default": False,
|
||||
"tooltip": "Embeds the complete workflow data into the image metadata. Only works with PNG and WebP formats."
|
||||
}),
|
||||
"add_counter_to_filename": ("BOOLEAN", {
|
||||
"default": True,
|
||||
"tooltip": "Adds an incremental counter to filenames to prevent overwriting previous images."
|
||||
}),
|
||||
"lossless_webp": (
|
||||
"BOOLEAN",
|
||||
{
|
||||
"default": False,
|
||||
"tooltip": "When enabled, saves WebP images with lossless compression. Results in larger files but no quality loss.",
|
||||
},
|
||||
),
|
||||
"quality": (
|
||||
"INT",
|
||||
{
|
||||
"default": 100,
|
||||
"min": 1,
|
||||
"max": 100,
|
||||
"tooltip": "Compression quality for JPEG and lossy WebP formats (1-100). Higher values mean better quality but larger files.",
|
||||
},
|
||||
),
|
||||
"embed_workflow": (
|
||||
"BOOLEAN",
|
||||
{
|
||||
"default": False,
|
||||
"tooltip": "Embeds the complete workflow data into the image metadata. Only works with PNG and WebP formats.",
|
||||
},
|
||||
),
|
||||
"add_counter_to_filename": (
|
||||
"BOOLEAN",
|
||||
{
|
||||
"default": True,
|
||||
"tooltip": "Adds an incremental counter to filenames to prevent overwriting previous images.",
|
||||
},
|
||||
),
|
||||
},
|
||||
"hidden": {
|
||||
"id": "UNIQUE_ID",
|
||||
@@ -75,57 +95,59 @@ class SaveImageLM:
|
||||
def get_lora_hash(self, lora_name):
|
||||
"""Get the lora hash from cache"""
|
||||
scanner = ServiceRegistry.get_service_sync("lora_scanner")
|
||||
|
||||
|
||||
# Use the new direct filename lookup method
|
||||
hash_value = scanner.get_hash_by_filename(lora_name)
|
||||
if hash_value:
|
||||
return hash_value
|
||||
|
||||
if scanner is not None:
|
||||
hash_value = scanner.get_hash_by_filename(lora_name)
|
||||
if hash_value:
|
||||
return hash_value
|
||||
|
||||
return None
|
||||
|
||||
def get_checkpoint_hash(self, checkpoint_path):
|
||||
"""Get the checkpoint hash from cache"""
|
||||
scanner = ServiceRegistry.get_service_sync("checkpoint_scanner")
|
||||
|
||||
|
||||
if not checkpoint_path:
|
||||
return None
|
||||
|
||||
|
||||
# Extract basename without extension
|
||||
checkpoint_name = os.path.basename(checkpoint_path)
|
||||
checkpoint_name = os.path.splitext(checkpoint_name)[0]
|
||||
|
||||
|
||||
# Try direct filename lookup first
|
||||
hash_value = scanner.get_hash_by_filename(checkpoint_name)
|
||||
if hash_value:
|
||||
return hash_value
|
||||
|
||||
if scanner is not None:
|
||||
hash_value = scanner.get_hash_by_filename(checkpoint_name)
|
||||
if hash_value:
|
||||
return hash_value
|
||||
|
||||
return None
|
||||
|
||||
def format_metadata(self, metadata_dict):
|
||||
"""Format metadata in the requested format similar to userComment example"""
|
||||
if not metadata_dict:
|
||||
return ""
|
||||
|
||||
|
||||
# Helper function to only add parameter if value is not None
|
||||
def add_param_if_not_none(param_list, label, value):
|
||||
if value is not None:
|
||||
param_list.append(f"{label}: {value}")
|
||||
|
||||
|
||||
# Extract the prompt and negative prompt
|
||||
prompt = metadata_dict.get('prompt', '')
|
||||
negative_prompt = metadata_dict.get('negative_prompt', '')
|
||||
|
||||
prompt = metadata_dict.get("prompt", "")
|
||||
negative_prompt = metadata_dict.get("negative_prompt", "")
|
||||
|
||||
# Extract loras from the prompt if present
|
||||
loras_text = metadata_dict.get('loras', '')
|
||||
loras_text = metadata_dict.get("loras", "")
|
||||
lora_hashes = {}
|
||||
|
||||
|
||||
# If loras are found, add them on a new line after the prompt
|
||||
if loras_text:
|
||||
prompt_with_loras = f"{prompt}\n{loras_text}"
|
||||
|
||||
|
||||
# Extract lora names from the format <lora:name:strength>
|
||||
lora_matches = re.findall(r'<lora:([^:]+):([^>]+)>', loras_text)
|
||||
|
||||
lora_matches = re.findall(r"<lora:([^:]+):([^>]+)>", loras_text)
|
||||
|
||||
# Get hash for each lora
|
||||
for lora_name, strength in lora_matches:
|
||||
hash_value = self.get_lora_hash(lora_name)
|
||||
@@ -133,112 +155,114 @@ class SaveImageLM:
|
||||
lora_hashes[lora_name] = hash_value
|
||||
else:
|
||||
prompt_with_loras = prompt
|
||||
|
||||
|
||||
# Format the first part (prompt and loras)
|
||||
metadata_parts = [prompt_with_loras]
|
||||
|
||||
|
||||
# Add negative prompt
|
||||
if negative_prompt:
|
||||
metadata_parts.append(f"Negative prompt: {negative_prompt}")
|
||||
|
||||
|
||||
# Format the second part (generation parameters)
|
||||
params = []
|
||||
|
||||
|
||||
# Add standard parameters in the correct order
|
||||
if 'steps' in metadata_dict:
|
||||
add_param_if_not_none(params, "Steps", metadata_dict.get('steps'))
|
||||
|
||||
if "steps" in metadata_dict:
|
||||
add_param_if_not_none(params, "Steps", metadata_dict.get("steps"))
|
||||
|
||||
# Combine sampler and scheduler information
|
||||
sampler_name = None
|
||||
scheduler_name = None
|
||||
|
||||
if 'sampler' in metadata_dict:
|
||||
sampler = metadata_dict.get('sampler')
|
||||
|
||||
if "sampler" in metadata_dict:
|
||||
sampler = metadata_dict.get("sampler")
|
||||
# Convert ComfyUI sampler names to user-friendly names
|
||||
sampler_mapping = {
|
||||
'euler': 'Euler',
|
||||
'euler_ancestral': 'Euler a',
|
||||
'dpm_2': 'DPM2',
|
||||
'dpm_2_ancestral': 'DPM2 a',
|
||||
'heun': 'Heun',
|
||||
'dpm_fast': 'DPM fast',
|
||||
'dpm_adaptive': 'DPM adaptive',
|
||||
'lms': 'LMS',
|
||||
'dpmpp_2s_ancestral': 'DPM++ 2S a',
|
||||
'dpmpp_sde': 'DPM++ SDE',
|
||||
'dpmpp_sde_gpu': 'DPM++ SDE',
|
||||
'dpmpp_2m': 'DPM++ 2M',
|
||||
'dpmpp_2m_sde': 'DPM++ 2M SDE',
|
||||
'dpmpp_2m_sde_gpu': 'DPM++ 2M SDE',
|
||||
'ddim': 'DDIM'
|
||||
"euler": "Euler",
|
||||
"euler_ancestral": "Euler a",
|
||||
"dpm_2": "DPM2",
|
||||
"dpm_2_ancestral": "DPM2 a",
|
||||
"heun": "Heun",
|
||||
"dpm_fast": "DPM fast",
|
||||
"dpm_adaptive": "DPM adaptive",
|
||||
"lms": "LMS",
|
||||
"dpmpp_2s_ancestral": "DPM++ 2S a",
|
||||
"dpmpp_sde": "DPM++ SDE",
|
||||
"dpmpp_sde_gpu": "DPM++ SDE",
|
||||
"dpmpp_2m": "DPM++ 2M",
|
||||
"dpmpp_2m_sde": "DPM++ 2M SDE",
|
||||
"dpmpp_2m_sde_gpu": "DPM++ 2M SDE",
|
||||
"ddim": "DDIM",
|
||||
}
|
||||
sampler_name = sampler_mapping.get(sampler, sampler)
|
||||
|
||||
if 'scheduler' in metadata_dict:
|
||||
scheduler = metadata_dict.get('scheduler')
|
||||
|
||||
if "scheduler" in metadata_dict:
|
||||
scheduler = metadata_dict.get("scheduler")
|
||||
scheduler_mapping = {
|
||||
'normal': 'Simple',
|
||||
'karras': 'Karras',
|
||||
'exponential': 'Exponential',
|
||||
'sgm_uniform': 'SGM Uniform',
|
||||
'sgm_quadratic': 'SGM Quadratic'
|
||||
"normal": "Simple",
|
||||
"karras": "Karras",
|
||||
"exponential": "Exponential",
|
||||
"sgm_uniform": "SGM Uniform",
|
||||
"sgm_quadratic": "SGM Quadratic",
|
||||
}
|
||||
scheduler_name = scheduler_mapping.get(scheduler, scheduler)
|
||||
|
||||
|
||||
# Add combined sampler and scheduler information
|
||||
if sampler_name:
|
||||
if scheduler_name:
|
||||
params.append(f"Sampler: {sampler_name} {scheduler_name}")
|
||||
else:
|
||||
params.append(f"Sampler: {sampler_name}")
|
||||
|
||||
|
||||
# CFG scale (Use guidance if available, otherwise fall back to cfg_scale or cfg)
|
||||
if 'guidance' in metadata_dict:
|
||||
add_param_if_not_none(params, "CFG scale", metadata_dict.get('guidance'))
|
||||
elif 'cfg_scale' in metadata_dict:
|
||||
add_param_if_not_none(params, "CFG scale", metadata_dict.get('cfg_scale'))
|
||||
elif 'cfg' in metadata_dict:
|
||||
add_param_if_not_none(params, "CFG scale", metadata_dict.get('cfg'))
|
||||
|
||||
if "guidance" in metadata_dict:
|
||||
add_param_if_not_none(params, "CFG scale", metadata_dict.get("guidance"))
|
||||
elif "cfg_scale" in metadata_dict:
|
||||
add_param_if_not_none(params, "CFG scale", metadata_dict.get("cfg_scale"))
|
||||
elif "cfg" in metadata_dict:
|
||||
add_param_if_not_none(params, "CFG scale", metadata_dict.get("cfg"))
|
||||
|
||||
# Seed
|
||||
if 'seed' in metadata_dict:
|
||||
add_param_if_not_none(params, "Seed", metadata_dict.get('seed'))
|
||||
|
||||
if "seed" in metadata_dict:
|
||||
add_param_if_not_none(params, "Seed", metadata_dict.get("seed"))
|
||||
|
||||
# Size
|
||||
if 'size' in metadata_dict:
|
||||
add_param_if_not_none(params, "Size", metadata_dict.get('size'))
|
||||
|
||||
if "size" in metadata_dict:
|
||||
add_param_if_not_none(params, "Size", metadata_dict.get("size"))
|
||||
|
||||
# Model info
|
||||
if 'checkpoint' in metadata_dict:
|
||||
if "checkpoint" in metadata_dict:
|
||||
# Ensure checkpoint is a string before processing
|
||||
checkpoint = metadata_dict.get('checkpoint')
|
||||
checkpoint = metadata_dict.get("checkpoint")
|
||||
if checkpoint is not None:
|
||||
# Get model hash
|
||||
model_hash = self.get_checkpoint_hash(checkpoint)
|
||||
|
||||
|
||||
# Extract basename without path
|
||||
checkpoint_name = os.path.basename(checkpoint)
|
||||
# Remove extension if present
|
||||
checkpoint_name = os.path.splitext(checkpoint_name)[0]
|
||||
|
||||
|
||||
# Add model hash if available
|
||||
if model_hash:
|
||||
params.append(f"Model hash: {model_hash[:10]}, Model: {checkpoint_name}")
|
||||
params.append(
|
||||
f"Model hash: {model_hash[:10]}, Model: {checkpoint_name}"
|
||||
)
|
||||
else:
|
||||
params.append(f"Model: {checkpoint_name}")
|
||||
|
||||
|
||||
# Add LoRA hashes if available
|
||||
if lora_hashes:
|
||||
lora_hash_parts = []
|
||||
for lora_name, hash_value in lora_hashes.items():
|
||||
lora_hash_parts.append(f"{lora_name}: {hash_value[:10]}")
|
||||
|
||||
|
||||
if lora_hash_parts:
|
||||
params.append(f"Lora hashes: \"{', '.join(lora_hash_parts)}\"")
|
||||
|
||||
params.append(f'Lora hashes: "{", ".join(lora_hash_parts)}"')
|
||||
|
||||
# Combine all parameters with commas
|
||||
metadata_parts.append(", ".join(params))
|
||||
|
||||
|
||||
# Join all parts with a new line
|
||||
return "\n".join(metadata_parts)
|
||||
|
||||
@@ -248,36 +272,36 @@ class SaveImageLM:
|
||||
"""Format filename with metadata values"""
|
||||
if not metadata_dict:
|
||||
return filename
|
||||
|
||||
|
||||
result = re.findall(self.pattern_format, filename)
|
||||
for segment in result:
|
||||
parts = segment.replace("%", "").split(":")
|
||||
key = parts[0]
|
||||
|
||||
if key == "seed" and 'seed' in metadata_dict:
|
||||
filename = filename.replace(segment, str(metadata_dict.get('seed', '')))
|
||||
elif key == "width" and 'size' in metadata_dict:
|
||||
size = metadata_dict.get('size', 'x')
|
||||
w = size.split('x')[0] if isinstance(size, str) else size[0]
|
||||
|
||||
if key == "seed" and "seed" in metadata_dict:
|
||||
filename = filename.replace(segment, str(metadata_dict.get("seed", "")))
|
||||
elif key == "width" and "size" in metadata_dict:
|
||||
size = metadata_dict.get("size", "x")
|
||||
w = size.split("x")[0] if isinstance(size, str) else size[0]
|
||||
filename = filename.replace(segment, str(w))
|
||||
elif key == "height" and 'size' in metadata_dict:
|
||||
size = metadata_dict.get('size', 'x')
|
||||
h = size.split('x')[1] if isinstance(size, str) else size[1]
|
||||
elif key == "height" and "size" in metadata_dict:
|
||||
size = metadata_dict.get("size", "x")
|
||||
h = size.split("x")[1] if isinstance(size, str) else size[1]
|
||||
filename = filename.replace(segment, str(h))
|
||||
elif key == "pprompt" and 'prompt' in metadata_dict:
|
||||
prompt = metadata_dict.get('prompt', '').replace("\n", " ")
|
||||
elif key == "pprompt" and "prompt" in metadata_dict:
|
||||
prompt = metadata_dict.get("prompt", "").replace("\n", " ")
|
||||
if len(parts) >= 2:
|
||||
length = int(parts[1])
|
||||
prompt = prompt[:length]
|
||||
filename = filename.replace(segment, prompt.strip())
|
||||
elif key == "nprompt" and 'negative_prompt' in metadata_dict:
|
||||
prompt = metadata_dict.get('negative_prompt', '').replace("\n", " ")
|
||||
elif key == "nprompt" and "negative_prompt" in metadata_dict:
|
||||
prompt = metadata_dict.get("negative_prompt", "").replace("\n", " ")
|
||||
if len(parts) >= 2:
|
||||
length = int(parts[1])
|
||||
prompt = prompt[:length]
|
||||
filename = filename.replace(segment, prompt.strip())
|
||||
elif key == "model":
|
||||
model_value = metadata_dict.get('checkpoint')
|
||||
model_value = metadata_dict.get("checkpoint")
|
||||
if isinstance(model_value, (bytes, os.PathLike)):
|
||||
model_value = str(model_value)
|
||||
|
||||
@@ -291,6 +315,7 @@ class SaveImageLM:
|
||||
filename = filename.replace(segment, model)
|
||||
elif key == "date":
|
||||
from datetime import datetime
|
||||
|
||||
now = datetime.now()
|
||||
date_table = {
|
||||
"yyyy": f"{now.year:04d}",
|
||||
@@ -311,46 +336,62 @@ class SaveImageLM:
|
||||
for k, v in date_table.items():
|
||||
date_format = date_format.replace(k, v)
|
||||
filename = filename.replace(segment, date_format)
|
||||
|
||||
|
||||
return filename
|
||||
|
||||
def save_images(self, images, filename_prefix, file_format, id, prompt=None, extra_pnginfo=None,
|
||||
lossless_webp=True, quality=100, embed_workflow=False, add_counter_to_filename=True):
|
||||
def save_images(
|
||||
self,
|
||||
images,
|
||||
filename_prefix,
|
||||
file_format,
|
||||
id,
|
||||
prompt=None,
|
||||
extra_pnginfo=None,
|
||||
lossless_webp=True,
|
||||
quality=100,
|
||||
embed_workflow=False,
|
||||
add_counter_to_filename=True,
|
||||
):
|
||||
"""Save images with metadata"""
|
||||
results = []
|
||||
|
||||
# Get metadata using the metadata collector
|
||||
raw_metadata = get_metadata()
|
||||
metadata_dict = MetadataProcessor.to_dict(raw_metadata, id)
|
||||
|
||||
|
||||
metadata = self.format_metadata(metadata_dict)
|
||||
|
||||
|
||||
# Process filename_prefix with pattern substitution
|
||||
filename_prefix = self.format_filename(filename_prefix, metadata_dict)
|
||||
|
||||
|
||||
# Get initial save path info once for the batch
|
||||
full_output_folder, filename, counter, subfolder, processed_prefix = folder_paths.get_save_image_path(
|
||||
filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]
|
||||
full_output_folder, filename, counter, subfolder, processed_prefix = (
|
||||
folder_paths.get_save_image_path(
|
||||
filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# Create directory if it doesn't exist
|
||||
if not os.path.exists(full_output_folder):
|
||||
os.makedirs(full_output_folder, exist_ok=True)
|
||||
|
||||
|
||||
# Process each image with incrementing counter
|
||||
for i, image in enumerate(images):
|
||||
# Convert the tensor image to numpy array
|
||||
img = 255. * image.cpu().numpy()
|
||||
img = 255.0 * image.cpu().numpy()
|
||||
img = Image.fromarray(np.clip(img, 0, 255).astype(np.uint8))
|
||||
|
||||
|
||||
# Generate filename with counter if needed
|
||||
base_filename = filename
|
||||
if add_counter_to_filename:
|
||||
# Use counter + i to ensure unique filenames for all images in batch
|
||||
current_counter = counter + i
|
||||
base_filename += f"_{current_counter:05}_"
|
||||
|
||||
|
||||
# Set file extension and prepare saving parameters
|
||||
file: str
|
||||
save_kwargs: Dict[str, Any]
|
||||
pnginfo: Optional[PngImagePlugin.PngInfo] = None
|
||||
if file_format == "png":
|
||||
file = base_filename + ".png"
|
||||
file_extension = ".png"
|
||||
@@ -362,17 +403,24 @@ class SaveImageLM:
|
||||
file_extension = ".jpg"
|
||||
save_kwargs = {"quality": quality, "optimize": True}
|
||||
elif file_format == "webp":
|
||||
file = base_filename + ".webp"
|
||||
file = base_filename + ".webp"
|
||||
file_extension = ".webp"
|
||||
# Add optimization param to control performance
|
||||
save_kwargs = {"quality": quality, "lossless": lossless_webp, "method": 0}
|
||||
|
||||
save_kwargs = {
|
||||
"quality": quality,
|
||||
"lossless": lossless_webp,
|
||||
"method": 0,
|
||||
}
|
||||
else:
|
||||
raise ValueError(f"Unsupported file format: {file_format}")
|
||||
|
||||
# Full save path
|
||||
file_path = os.path.join(full_output_folder, file)
|
||||
|
||||
|
||||
# Save the image with metadata
|
||||
try:
|
||||
if file_format == "png":
|
||||
assert pnginfo is not None
|
||||
if metadata:
|
||||
pnginfo.add_text("parameters", metadata)
|
||||
if embed_workflow and extra_pnginfo is not None:
|
||||
@@ -384,7 +432,12 @@ class SaveImageLM:
|
||||
# For JPEG, use piexif
|
||||
if metadata:
|
||||
try:
|
||||
exif_dict = {'Exif': {piexif.ExifIFD.UserComment: b'UNICODE\0' + metadata.encode('utf-16be')}}
|
||||
exif_dict = {
|
||||
"Exif": {
|
||||
piexif.ExifIFD.UserComment: b"UNICODE\0"
|
||||
+ metadata.encode("utf-16be")
|
||||
}
|
||||
}
|
||||
exif_bytes = piexif.dump(exif_dict)
|
||||
save_kwargs["exif"] = exif_bytes
|
||||
except Exception as e:
|
||||
@@ -396,37 +449,52 @@ class SaveImageLM:
|
||||
exif_dict = {}
|
||||
|
||||
if metadata:
|
||||
exif_dict['Exif'] = {piexif.ExifIFD.UserComment: b'UNICODE\0' + metadata.encode('utf-16be')}
|
||||
|
||||
exif_dict["Exif"] = {
|
||||
piexif.ExifIFD.UserComment: b"UNICODE\0"
|
||||
+ metadata.encode("utf-16be")
|
||||
}
|
||||
|
||||
# Add workflow if needed
|
||||
if embed_workflow and extra_pnginfo is not None:
|
||||
workflow_json = json.dumps(extra_pnginfo["workflow"])
|
||||
exif_dict['0th'] = {piexif.ImageIFD.ImageDescription: "Workflow:" + workflow_json}
|
||||
|
||||
workflow_json = json.dumps(extra_pnginfo["workflow"])
|
||||
exif_dict["0th"] = {
|
||||
piexif.ImageIFD.ImageDescription: "Workflow:"
|
||||
+ workflow_json
|
||||
}
|
||||
|
||||
exif_bytes = piexif.dump(exif_dict)
|
||||
save_kwargs["exif"] = exif_bytes
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding EXIF data: {e}")
|
||||
|
||||
|
||||
img.save(file_path, format="WEBP", **save_kwargs)
|
||||
|
||||
results.append({
|
||||
"filename": file,
|
||||
"subfolder": subfolder,
|
||||
"type": self.type
|
||||
})
|
||||
|
||||
|
||||
results.append(
|
||||
{"filename": file, "subfolder": subfolder, "type": self.type}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving image: {e}")
|
||||
|
||||
|
||||
return results
|
||||
|
||||
def process_image(self, images, id, filename_prefix="ComfyUI", file_format="png", prompt=None, extra_pnginfo=None,
|
||||
lossless_webp=True, quality=100, embed_workflow=False, add_counter_to_filename=True):
|
||||
def process_image(
|
||||
self,
|
||||
images,
|
||||
id,
|
||||
filename_prefix="ComfyUI",
|
||||
file_format="png",
|
||||
prompt=None,
|
||||
extra_pnginfo=None,
|
||||
lossless_webp=True,
|
||||
quality=100,
|
||||
embed_workflow=False,
|
||||
add_counter_to_filename=True,
|
||||
):
|
||||
"""Process and save image with metadata"""
|
||||
# Make sure the output directory exists
|
||||
os.makedirs(self.output_dir, exist_ok=True)
|
||||
|
||||
|
||||
# If images is already a list or array of images, do nothing; otherwise, convert to list
|
||||
if isinstance(images, (list, np.ndarray)):
|
||||
pass
|
||||
@@ -436,19 +504,19 @@ class SaveImageLM:
|
||||
images = [images]
|
||||
else: # Multiple images (batch, height, width, channels)
|
||||
images = [img for img in images]
|
||||
|
||||
|
||||
# Save all images
|
||||
results = self.save_images(
|
||||
images,
|
||||
filename_prefix,
|
||||
file_format,
|
||||
images,
|
||||
filename_prefix,
|
||||
file_format,
|
||||
id,
|
||||
prompt,
|
||||
prompt,
|
||||
extra_pnginfo,
|
||||
lossless_webp,
|
||||
quality,
|
||||
embed_workflow,
|
||||
add_counter_to_filename
|
||||
add_counter_to_filename,
|
||||
)
|
||||
|
||||
|
||||
return (images,)
|
||||
|
||||
@@ -1,33 +1,35 @@
|
||||
class AnyType(str):
|
||||
"""A special class that is always equal in not equal comparisons. Credit to pythongosssss"""
|
||||
"""A special class that is always equal in not equal comparisons. Credit to pythongosssss"""
|
||||
|
||||
def __ne__(self, __value: object) -> bool:
|
||||
return False
|
||||
|
||||
def __ne__(self, __value: object) -> bool:
|
||||
return False
|
||||
|
||||
# Credit to Regis Gaughan, III (rgthree)
|
||||
class FlexibleOptionalInputType(dict):
|
||||
"""A special class to make flexible nodes that pass data to our python handlers.
|
||||
"""A special class to make flexible nodes that pass data to our python handlers.
|
||||
|
||||
Enables both flexible/dynamic input types (like for Any Switch) or a dynamic number of inputs
|
||||
(like for Any Switch, Context Switch, Context Merge, Power Lora Loader, etc).
|
||||
Enables both flexible/dynamic input types (like for Any Switch) or a dynamic number of inputs
|
||||
(like for Any Switch, Context Switch, Context Merge, Power Lora Loader, etc).
|
||||
|
||||
Note, for ComfyUI, all that's needed is the `__contains__` override below, which tells ComfyUI
|
||||
that our node will handle the input, regardless of what it is.
|
||||
Note, for ComfyUI, all that's needed is the `__contains__` override below, which tells ComfyUI
|
||||
that our node will handle the input, regardless of what it is.
|
||||
|
||||
However, with https://github.com/comfyanonymous/ComfyUI/pull/2666 a large change would occur
|
||||
requiring more details on the input itself. There, we need to return a list/tuple where the first
|
||||
item is the type. This can be a real type, or use the AnyType for additional flexibility.
|
||||
However, with https://github.com/comfyanonymous/ComfyUI/pull/2666 a large change would occur
|
||||
requiring more details on the input itself. There, we need to return a list/tuple where the first
|
||||
item is the type. This can be a real type, or use the AnyType for additional flexibility.
|
||||
|
||||
This should be forwards compatible unless more changes occur in the PR.
|
||||
"""
|
||||
def __init__(self, type):
|
||||
self.type = type
|
||||
This should be forwards compatible unless more changes occur in the PR.
|
||||
"""
|
||||
|
||||
def __getitem__(self, key):
|
||||
return (self.type, )
|
||||
def __init__(self, type):
|
||||
self.type = type
|
||||
|
||||
def __contains__(self, key):
|
||||
return True
|
||||
def __getitem__(self, key):
|
||||
return (self.type,)
|
||||
|
||||
def __contains__(self, key):
|
||||
return True
|
||||
|
||||
|
||||
any_type = AnyType("*")
|
||||
@@ -37,25 +39,27 @@ import os
|
||||
import logging
|
||||
import copy
|
||||
import sys
|
||||
import folder_paths
|
||||
import folder_paths # type: ignore
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def extract_lora_name(lora_path):
|
||||
"""Extract the lora name from a lora path (e.g., 'IL\\aorunIllstrious.safetensors' -> 'aorunIllstrious')"""
|
||||
# Get the basename without extension
|
||||
basename = os.path.basename(lora_path)
|
||||
return os.path.splitext(basename)[0]
|
||||
|
||||
|
||||
def get_loras_list(kwargs):
|
||||
"""Helper to extract loras list from either old or new kwargs format"""
|
||||
if 'loras' not in kwargs:
|
||||
if "loras" not in kwargs:
|
||||
return []
|
||||
|
||||
loras_data = kwargs['loras']
|
||||
|
||||
loras_data = kwargs["loras"]
|
||||
# Handle new format: {'loras': {'__value__': [...]}}
|
||||
if isinstance(loras_data, dict) and '__value__' in loras_data:
|
||||
return loras_data['__value__']
|
||||
if isinstance(loras_data, dict) and "__value__" in loras_data:
|
||||
return loras_data["__value__"]
|
||||
# Handle old format: {'loras': [...]}
|
||||
elif isinstance(loras_data, list):
|
||||
return loras_data
|
||||
@@ -64,24 +68,26 @@ def get_loras_list(kwargs):
|
||||
logger.warning(f"Unexpected loras format: {type(loras_data)}")
|
||||
return []
|
||||
|
||||
|
||||
def load_state_dict_in_safetensors(path, device="cpu", filter_prefix=""):
|
||||
"""Simplified version of load_state_dict_in_safetensors that just loads from a local path"""
|
||||
"""Simplified version of load_state_dict_in_safetensors that just loads from a local path"""
|
||||
import safetensors.torch
|
||||
|
||||
|
||||
state_dict = {}
|
||||
with safetensors.torch.safe_open(path, framework="pt", device=device) as f:
|
||||
with safetensors.torch.safe_open(path, framework="pt", device=device) as f: # type: ignore[attr-defined]
|
||||
for k in f.keys():
|
||||
if filter_prefix and not k.startswith(filter_prefix):
|
||||
continue
|
||||
state_dict[k.removeprefix(filter_prefix)] = f.get_tensor(k)
|
||||
return state_dict
|
||||
|
||||
|
||||
def to_diffusers(input_lora):
|
||||
"""Simplified version of to_diffusers for Flux LoRA conversion"""
|
||||
import torch
|
||||
from diffusers.utils.state_dict_utils import convert_unet_state_dict_to_peft
|
||||
from diffusers.loaders import FluxLoraLoaderMixin
|
||||
|
||||
from diffusers.loaders import FluxLoraLoaderMixin # type: ignore[attr-defined]
|
||||
|
||||
if isinstance(input_lora, str):
|
||||
tensors = load_state_dict_in_safetensors(input_lora, device="cpu")
|
||||
else:
|
||||
@@ -91,22 +97,27 @@ def to_diffusers(input_lora):
|
||||
for k, v in tensors.items():
|
||||
if v.dtype not in [torch.float64, torch.float32, torch.bfloat16, torch.float16]:
|
||||
tensors[k] = v.to(torch.bfloat16)
|
||||
|
||||
|
||||
new_tensors = FluxLoraLoaderMixin.lora_state_dict(tensors)
|
||||
new_tensors = convert_unet_state_dict_to_peft(new_tensors)
|
||||
|
||||
return new_tensors
|
||||
|
||||
|
||||
def nunchaku_load_lora(model, lora_name, lora_strength):
|
||||
"""Load a Flux LoRA for Nunchaku model"""
|
||||
"""Load a Flux LoRA for Nunchaku model"""
|
||||
# Get full path to the LoRA file. Allow both direct paths and registered LoRA names.
|
||||
lora_path = lora_name if os.path.isfile(lora_name) else folder_paths.get_full_path("loras", lora_name)
|
||||
lora_path = (
|
||||
lora_name
|
||||
if os.path.isfile(lora_name)
|
||||
else folder_paths.get_full_path("loras", lora_name)
|
||||
)
|
||||
if not lora_path or not os.path.isfile(lora_path):
|
||||
logger.warning("Skipping LoRA '%s' because it could not be found", lora_name)
|
||||
return model
|
||||
|
||||
model_wrapper = model.model.diffusion_model
|
||||
|
||||
|
||||
# Try to find copy_with_ctx in the same module as ComfyFluxWrapper
|
||||
module_name = model_wrapper.__class__.__module__
|
||||
module = sys.modules.get(module_name)
|
||||
@@ -118,14 +129,16 @@ def nunchaku_load_lora(model, lora_name, lora_strength):
|
||||
ret_model_wrapper.loras = [*model_wrapper.loras, (lora_path, lora_strength)]
|
||||
else:
|
||||
# Fallback to legacy logic
|
||||
logger.warning("Please upgrade ComfyUI-nunchaku to 1.1.0 or above for better LoRA support. Falling back to legacy loading logic.")
|
||||
logger.warning(
|
||||
"Please upgrade ComfyUI-nunchaku to 1.1.0 or above for better LoRA support. Falling back to legacy loading logic."
|
||||
)
|
||||
transformer = model_wrapper.model
|
||||
|
||||
|
||||
# Save the transformer temporarily
|
||||
model_wrapper.model = None
|
||||
ret_model = copy.deepcopy(model) # copy everything except the model
|
||||
ret_model_wrapper = ret_model.model.diffusion_model
|
||||
|
||||
|
||||
# Restore the model and set it for the copy
|
||||
model_wrapper.model = transformer
|
||||
ret_model_wrapper.model = transformer
|
||||
@@ -133,15 +146,15 @@ def nunchaku_load_lora(model, lora_name, lora_strength):
|
||||
|
||||
# Convert the LoRA to diffusers format
|
||||
sd = to_diffusers(lora_path)
|
||||
|
||||
|
||||
# Handle embedding adjustment if needed
|
||||
if "transformer.x_embedder.lora_A.weight" in sd:
|
||||
new_in_channels = sd["transformer.x_embedder.lora_A.weight"].shape[1]
|
||||
assert new_in_channels % 4 == 0
|
||||
new_in_channels = new_in_channels // 4
|
||||
|
||||
|
||||
old_in_channels = ret_model.model.model_config.unet_config["in_channels"]
|
||||
if old_in_channels < new_in_channels:
|
||||
ret_model.model.model_config.unet_config["in_channels"] = new_in_channels
|
||||
|
||||
return ret_model
|
||||
|
||||
return ret_model
|
||||
|
||||
@@ -6,23 +6,24 @@ from .parsers import (
|
||||
ComfyMetadataParser,
|
||||
MetaFormatParser,
|
||||
AutomaticMetadataParser,
|
||||
CivitaiApiMetadataParser
|
||||
CivitaiApiMetadataParser,
|
||||
)
|
||||
from .base import RecipeMetadataParser
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecipeParserFactory:
|
||||
"""Factory for creating recipe metadata parsers"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def create_parser(metadata) -> RecipeMetadataParser:
|
||||
def create_parser(metadata) -> RecipeMetadataParser | None:
|
||||
"""
|
||||
Create appropriate parser based on the metadata content
|
||||
|
||||
|
||||
Args:
|
||||
metadata: The metadata from the image (dict or str)
|
||||
|
||||
|
||||
Returns:
|
||||
Appropriate RecipeMetadataParser implementation
|
||||
"""
|
||||
@@ -34,17 +35,18 @@ class RecipeParserFactory:
|
||||
except Exception as e:
|
||||
logger.debug(f"CivitaiApiMetadataParser check failed: {e}")
|
||||
pass
|
||||
|
||||
|
||||
# Convert dict to string for other parsers that expect string input
|
||||
try:
|
||||
import json
|
||||
|
||||
metadata_str = json.dumps(metadata)
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to convert dict to JSON string: {e}")
|
||||
return None
|
||||
else:
|
||||
metadata_str = metadata
|
||||
|
||||
|
||||
# Try ComfyMetadataParser which requires valid JSON
|
||||
try:
|
||||
if ComfyMetadataParser().is_metadata_matching(metadata_str):
|
||||
@@ -52,7 +54,7 @@ class RecipeParserFactory:
|
||||
except Exception:
|
||||
# If JSON parsing fails, move on to other parsers
|
||||
pass
|
||||
|
||||
|
||||
# Check other parsers that expect string input
|
||||
if RecipeFormatParser().is_metadata_matching(metadata_str):
|
||||
return RecipeFormatParser()
|
||||
|
||||
@@ -9,15 +9,16 @@ from ...services.metadata_service import get_default_metadata_provider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
"""Parser for Civitai image metadata format"""
|
||||
|
||||
|
||||
def is_metadata_matching(self, metadata) -> bool:
|
||||
"""Check if the metadata matches the Civitai image metadata format
|
||||
|
||||
|
||||
Args:
|
||||
metadata: The metadata from the image (dict)
|
||||
|
||||
|
||||
Returns:
|
||||
bool: True if this parser can handle the metadata
|
||||
"""
|
||||
@@ -28,7 +29,7 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
# Check for common CivitAI image metadata fields
|
||||
civitai_image_fields = (
|
||||
"resources",
|
||||
"civitaiResources",
|
||||
"civitaiResources",
|
||||
"additionalResources",
|
||||
"hashes",
|
||||
"prompt",
|
||||
@@ -40,7 +41,7 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
"width",
|
||||
"height",
|
||||
"Model",
|
||||
"Model hash"
|
||||
"Model hash",
|
||||
)
|
||||
return any(key in payload for key in civitai_image_fields)
|
||||
|
||||
@@ -50,7 +51,9 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
|
||||
# Check for LoRA hash patterns
|
||||
hashes = metadata.get("hashes")
|
||||
if isinstance(hashes, dict) and any(str(key).lower().startswith("lora:") for key in hashes):
|
||||
if isinstance(hashes, dict) and any(
|
||||
str(key).lower().startswith("lora:") for key in hashes
|
||||
):
|
||||
return True
|
||||
|
||||
# Check nested meta object (common in CivitAI image responses)
|
||||
@@ -61,22 +64,28 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
|
||||
# Also check for LoRA hash patterns in nested meta
|
||||
hashes = nested_meta.get("hashes")
|
||||
if isinstance(hashes, dict) and any(str(key).lower().startswith("lora:") for key in hashes):
|
||||
if isinstance(hashes, dict) and any(
|
||||
str(key).lower().startswith("lora:") for key in hashes
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def parse_metadata(self, metadata, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
|
||||
|
||||
async def parse_metadata( # type: ignore[override]
|
||||
self, user_comment, recipe_scanner=None, civitai_client=None
|
||||
) -> Dict[str, Any]:
|
||||
"""Parse metadata from Civitai image format
|
||||
|
||||
|
||||
Args:
|
||||
metadata: The metadata from the image (dict)
|
||||
user_comment: The metadata from the image (dict)
|
||||
recipe_scanner: Optional recipe scanner service
|
||||
civitai_client: Optional Civitai API client (deprecated, use metadata_provider instead)
|
||||
|
||||
|
||||
Returns:
|
||||
Dict containing parsed recipe data
|
||||
"""
|
||||
metadata: Dict[str, Any] = user_comment # type: ignore[assignment]
|
||||
metadata = user_comment
|
||||
try:
|
||||
# Get metadata provider instead of using civitai_client directly
|
||||
metadata_provider = await get_default_metadata_provider()
|
||||
@@ -100,19 +109,19 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
)
|
||||
):
|
||||
metadata = inner_meta
|
||||
|
||||
|
||||
# Initialize result structure
|
||||
result = {
|
||||
'base_model': None,
|
||||
'loras': [],
|
||||
'model': None,
|
||||
'gen_params': {},
|
||||
'from_civitai_image': True
|
||||
"base_model": None,
|
||||
"loras": [],
|
||||
"model": None,
|
||||
"gen_params": {},
|
||||
"from_civitai_image": True,
|
||||
}
|
||||
|
||||
|
||||
# Track already added LoRAs to prevent duplicates
|
||||
added_loras = {} # key: model_version_id or hash, value: index in result["loras"]
|
||||
|
||||
|
||||
# Extract hash information from hashes field for LoRA matching
|
||||
lora_hashes = {}
|
||||
if "hashes" in metadata and isinstance(metadata["hashes"], dict):
|
||||
@@ -121,14 +130,14 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
if key_str.lower().startswith("lora:"):
|
||||
lora_name = key_str.split(":", 1)[1]
|
||||
lora_hashes[lora_name] = hash_value
|
||||
|
||||
|
||||
# Extract prompt and negative prompt
|
||||
if "prompt" in metadata:
|
||||
result["gen_params"]["prompt"] = metadata["prompt"]
|
||||
|
||||
|
||||
if "negativePrompt" in metadata:
|
||||
result["gen_params"]["negative_prompt"] = metadata["negativePrompt"]
|
||||
|
||||
|
||||
# Extract other generation parameters
|
||||
param_mapping = {
|
||||
"steps": "steps",
|
||||
@@ -138,98 +147,117 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
"Size": "size",
|
||||
"clipSkip": "clip_skip",
|
||||
}
|
||||
|
||||
|
||||
for civitai_key, our_key in param_mapping.items():
|
||||
if civitai_key in metadata and our_key in GEN_PARAM_KEYS:
|
||||
result["gen_params"][our_key] = metadata[civitai_key]
|
||||
|
||||
|
||||
# Extract base model information - directly if available
|
||||
if "baseModel" in metadata:
|
||||
result["base_model"] = metadata["baseModel"]
|
||||
elif "Model hash" in metadata and metadata_provider:
|
||||
model_hash = metadata["Model hash"]
|
||||
model_info, error = await metadata_provider.get_model_by_hash(model_hash)
|
||||
model_info, error = await metadata_provider.get_model_by_hash(
|
||||
model_hash
|
||||
)
|
||||
if model_info:
|
||||
result["base_model"] = model_info.get("baseModel", "")
|
||||
elif "Model" in metadata and isinstance(metadata.get("resources"), list):
|
||||
# Try to find base model in resources
|
||||
for resource in metadata.get("resources", []):
|
||||
if resource.get("type") == "model" and resource.get("name") == metadata.get("Model"):
|
||||
if resource.get("type") == "model" and resource.get(
|
||||
"name"
|
||||
) == metadata.get("Model"):
|
||||
# This is likely the checkpoint model
|
||||
if metadata_provider and resource.get("hash"):
|
||||
model_info, error = await metadata_provider.get_model_by_hash(resource.get("hash"))
|
||||
(
|
||||
model_info,
|
||||
error,
|
||||
) = await metadata_provider.get_model_by_hash(
|
||||
resource.get("hash")
|
||||
)
|
||||
if model_info:
|
||||
result["base_model"] = model_info.get("baseModel", "")
|
||||
|
||||
|
||||
base_model_counts = {}
|
||||
|
||||
|
||||
# Process standard resources array
|
||||
if "resources" in metadata and isinstance(metadata["resources"], list):
|
||||
for resource in metadata["resources"]:
|
||||
# Modified to process resources without a type field as potential LoRAs
|
||||
if resource.get("type", "lora") == "lora":
|
||||
lora_hash = resource.get("hash", "")
|
||||
|
||||
|
||||
# Try to get hash from the hashes field if not present in resource
|
||||
if not lora_hash and resource.get("name"):
|
||||
lora_hash = lora_hashes.get(resource["name"], "")
|
||||
|
||||
|
||||
# Skip LoRAs without proper identification (hash or modelVersionId)
|
||||
if not lora_hash and not resource.get("modelVersionId"):
|
||||
logger.debug(f"Skipping LoRA resource '{resource.get('name', 'Unknown')}' - no hash or modelVersionId")
|
||||
logger.debug(
|
||||
f"Skipping LoRA resource '{resource.get('name', 'Unknown')}' - no hash or modelVersionId"
|
||||
)
|
||||
continue
|
||||
|
||||
|
||||
# Skip if we've already added this LoRA by hash
|
||||
if lora_hash and lora_hash in added_loras:
|
||||
continue
|
||||
|
||||
|
||||
lora_entry = {
|
||||
'name': resource.get("name", "Unknown LoRA"),
|
||||
'type': "lora",
|
||||
'weight': float(resource.get("weight", 1.0)),
|
||||
'hash': lora_hash,
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': resource.get("name", "Unknown"),
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
"name": resource.get("name", "Unknown LoRA"),
|
||||
"type": "lora",
|
||||
"weight": float(resource.get("weight", 1.0)),
|
||||
"hash": lora_hash,
|
||||
"existsLocally": False,
|
||||
"localPath": None,
|
||||
"file_name": resource.get("name", "Unknown"),
|
||||
"thumbnailUrl": "/loras_static/images/no-preview.png",
|
||||
"baseModel": "",
|
||||
"size": 0,
|
||||
"downloadUrl": "",
|
||||
"isDeleted": False,
|
||||
}
|
||||
|
||||
|
||||
# Try to get info from Civitai if hash is available
|
||||
if lora_entry['hash'] and metadata_provider:
|
||||
if lora_entry["hash"] and metadata_provider:
|
||||
try:
|
||||
civitai_info = await metadata_provider.get_model_by_hash(lora_hash)
|
||||
|
||||
civitai_info = (
|
||||
await metadata_provider.get_model_by_hash(lora_hash)
|
||||
)
|
||||
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts,
|
||||
lora_hash
|
||||
lora_hash,
|
||||
)
|
||||
|
||||
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
|
||||
|
||||
lora_entry = populated_entry
|
||||
|
||||
|
||||
# If we have a version ID from Civitai, track it for deduplication
|
||||
if 'id' in lora_entry and lora_entry['id']:
|
||||
added_loras[str(lora_entry['id'])] = len(result["loras"])
|
||||
if "id" in lora_entry and lora_entry["id"]:
|
||||
added_loras[str(lora_entry["id"])] = len(
|
||||
result["loras"]
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA hash {lora_entry['hash']}: {e}")
|
||||
|
||||
logger.error(
|
||||
f"Error fetching Civitai info for LoRA hash {lora_entry['hash']}: {e}"
|
||||
)
|
||||
|
||||
# Track by hash if we have it
|
||||
if lora_hash:
|
||||
added_loras[lora_hash] = len(result["loras"])
|
||||
|
||||
|
||||
result["loras"].append(lora_entry)
|
||||
|
||||
|
||||
# Process civitaiResources array
|
||||
if "civitaiResources" in metadata and isinstance(metadata["civitaiResources"], list):
|
||||
if "civitaiResources" in metadata and isinstance(
|
||||
metadata["civitaiResources"], list
|
||||
):
|
||||
for resource in metadata["civitaiResources"]:
|
||||
# Get resource type and identifier
|
||||
resource_type = str(resource.get("type") or "").lower()
|
||||
@@ -237,32 +265,39 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
|
||||
if resource_type == "checkpoint":
|
||||
checkpoint_entry = {
|
||||
'id': resource.get("modelVersionId", 0),
|
||||
'modelId': resource.get("modelId", 0),
|
||||
'name': resource.get("modelName", "Unknown Checkpoint"),
|
||||
'version': resource.get("modelVersionName", ""),
|
||||
'type': resource.get("type", "checkpoint"),
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': resource.get("modelName", ""),
|
||||
'hash': resource.get("hash", "") or "",
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
"id": resource.get("modelVersionId", 0),
|
||||
"modelId": resource.get("modelId", 0),
|
||||
"name": resource.get("modelName", "Unknown Checkpoint"),
|
||||
"version": resource.get("modelVersionName", ""),
|
||||
"type": resource.get("type", "checkpoint"),
|
||||
"existsLocally": False,
|
||||
"localPath": None,
|
||||
"file_name": resource.get("modelName", ""),
|
||||
"hash": resource.get("hash", "") or "",
|
||||
"thumbnailUrl": "/loras_static/images/no-preview.png",
|
||||
"baseModel": "",
|
||||
"size": 0,
|
||||
"downloadUrl": "",
|
||||
"isDeleted": False,
|
||||
}
|
||||
|
||||
if version_id and metadata_provider:
|
||||
try:
|
||||
civitai_info = await metadata_provider.get_model_version_info(version_id)
|
||||
civitai_info = (
|
||||
await metadata_provider.get_model_version_info(
|
||||
version_id
|
||||
)
|
||||
)
|
||||
|
||||
checkpoint_entry = await self.populate_checkpoint_from_civitai(
|
||||
checkpoint_entry,
|
||||
civitai_info
|
||||
checkpoint_entry = (
|
||||
await self.populate_checkpoint_from_civitai(
|
||||
checkpoint_entry, civitai_info
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for checkpoint version {version_id}: {e}")
|
||||
logger.error(
|
||||
f"Error fetching Civitai info for checkpoint version {version_id}: {e}"
|
||||
)
|
||||
|
||||
if result["model"] is None:
|
||||
result["model"] = checkpoint_entry
|
||||
@@ -275,31 +310,35 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
|
||||
# Initialize lora entry
|
||||
lora_entry = {
|
||||
'id': resource.get("modelVersionId", 0),
|
||||
'modelId': resource.get("modelId", 0),
|
||||
'name': resource.get("modelName", "Unknown LoRA"),
|
||||
'version': resource.get("modelVersionName", ""),
|
||||
'type': resource.get("type", "lora"),
|
||||
'weight': round(float(resource.get("weight", 1.0)), 2),
|
||||
'existsLocally': False,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
"id": resource.get("modelVersionId", 0),
|
||||
"modelId": resource.get("modelId", 0),
|
||||
"name": resource.get("modelName", "Unknown LoRA"),
|
||||
"version": resource.get("modelVersionName", ""),
|
||||
"type": resource.get("type", "lora"),
|
||||
"weight": round(float(resource.get("weight", 1.0)), 2),
|
||||
"existsLocally": False,
|
||||
"thumbnailUrl": "/loras_static/images/no-preview.png",
|
||||
"baseModel": "",
|
||||
"size": 0,
|
||||
"downloadUrl": "",
|
||||
"isDeleted": False,
|
||||
}
|
||||
|
||||
# Try to get info from Civitai if modelVersionId is available
|
||||
if version_id and metadata_provider:
|
||||
try:
|
||||
# Use get_model_version_info instead of get_model_version
|
||||
civitai_info = await metadata_provider.get_model_version_info(version_id)
|
||||
civitai_info = (
|
||||
await metadata_provider.get_model_version_info(
|
||||
version_id
|
||||
)
|
||||
)
|
||||
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts
|
||||
base_model_counts,
|
||||
)
|
||||
|
||||
if populated_entry is None:
|
||||
@@ -307,74 +346,87 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for model version {version_id}: {e}")
|
||||
logger.error(
|
||||
f"Error fetching Civitai info for model version {version_id}: {e}"
|
||||
)
|
||||
|
||||
# Track this LoRA in our deduplication dict
|
||||
if version_id:
|
||||
added_loras[version_id] = len(result["loras"])
|
||||
|
||||
result["loras"].append(lora_entry)
|
||||
|
||||
|
||||
# Process additionalResources array
|
||||
if "additionalResources" in metadata and isinstance(metadata["additionalResources"], list):
|
||||
if "additionalResources" in metadata and isinstance(
|
||||
metadata["additionalResources"], list
|
||||
):
|
||||
for resource in metadata["additionalResources"]:
|
||||
# Skip resources that aren't LoRAs or LyCORIS
|
||||
if resource.get("type") not in ["lora", "lycoris"] and "type" not in resource:
|
||||
if (
|
||||
resource.get("type") not in ["lora", "lycoris"]
|
||||
and "type" not in resource
|
||||
):
|
||||
continue
|
||||
|
||||
|
||||
lora_type = resource.get("type", "lora")
|
||||
name = resource.get("name", "")
|
||||
|
||||
|
||||
# Extract ID from URN format if available
|
||||
version_id = None
|
||||
if name and "civitai:" in name:
|
||||
parts = name.split("@")
|
||||
if len(parts) > 1:
|
||||
version_id = parts[1]
|
||||
|
||||
|
||||
# Skip if we've already added this LoRA
|
||||
if version_id in added_loras:
|
||||
continue
|
||||
|
||||
|
||||
lora_entry = {
|
||||
'name': name,
|
||||
'type': lora_type,
|
||||
'weight': float(resource.get("strength", 1.0)),
|
||||
'hash': "",
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': name,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
"name": name,
|
||||
"type": lora_type,
|
||||
"weight": float(resource.get("strength", 1.0)),
|
||||
"hash": "",
|
||||
"existsLocally": False,
|
||||
"localPath": None,
|
||||
"file_name": name,
|
||||
"thumbnailUrl": "/loras_static/images/no-preview.png",
|
||||
"baseModel": "",
|
||||
"size": 0,
|
||||
"downloadUrl": "",
|
||||
"isDeleted": False,
|
||||
}
|
||||
|
||||
|
||||
# If we have a version ID and metadata provider, try to get more info
|
||||
if version_id and metadata_provider:
|
||||
try:
|
||||
# Use get_model_version_info with the version ID
|
||||
civitai_info = await metadata_provider.get_model_version_info(version_id)
|
||||
|
||||
civitai_info = (
|
||||
await metadata_provider.get_model_version_info(
|
||||
version_id
|
||||
)
|
||||
)
|
||||
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts
|
||||
base_model_counts,
|
||||
)
|
||||
|
||||
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
|
||||
|
||||
lora_entry = populated_entry
|
||||
|
||||
|
||||
# Track this LoRA for deduplication
|
||||
if version_id:
|
||||
added_loras[version_id] = len(result["loras"])
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for model ID {version_id}: {e}")
|
||||
|
||||
logger.error(
|
||||
f"Error fetching Civitai info for model ID {version_id}: {e}"
|
||||
)
|
||||
|
||||
result["loras"].append(lora_entry)
|
||||
|
||||
# If we found LoRA hashes in the metadata but haven't already
|
||||
@@ -390,30 +442,32 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
continue
|
||||
|
||||
lora_entry = {
|
||||
'name': lora_name,
|
||||
'type': "lora",
|
||||
'weight': 1.0,
|
||||
'hash': lora_hash,
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': lora_name,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
"name": lora_name,
|
||||
"type": "lora",
|
||||
"weight": 1.0,
|
||||
"hash": lora_hash,
|
||||
"existsLocally": False,
|
||||
"localPath": None,
|
||||
"file_name": lora_name,
|
||||
"thumbnailUrl": "/loras_static/images/no-preview.png",
|
||||
"baseModel": "",
|
||||
"size": 0,
|
||||
"downloadUrl": "",
|
||||
"isDeleted": False,
|
||||
}
|
||||
|
||||
if metadata_provider:
|
||||
try:
|
||||
civitai_info = await metadata_provider.get_model_by_hash(lora_hash)
|
||||
civitai_info = await metadata_provider.get_model_by_hash(
|
||||
lora_hash
|
||||
)
|
||||
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts,
|
||||
lora_hash
|
||||
lora_hash,
|
||||
)
|
||||
|
||||
if populated_entry is None:
|
||||
@@ -421,80 +475,93 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
|
||||
lora_entry = populated_entry
|
||||
|
||||
if 'id' in lora_entry and lora_entry['id']:
|
||||
added_loras[str(lora_entry['id'])] = len(result["loras"])
|
||||
if "id" in lora_entry and lora_entry["id"]:
|
||||
added_loras[str(lora_entry["id"])] = len(result["loras"])
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA hash {lora_hash}: {e}")
|
||||
logger.error(
|
||||
f"Error fetching Civitai info for LoRA hash {lora_hash}: {e}"
|
||||
)
|
||||
|
||||
added_loras[lora_hash] = len(result["loras"])
|
||||
result["loras"].append(lora_entry)
|
||||
|
||||
# Check for LoRA info in the format "Lora_0 Model hash", "Lora_0 Model name", etc.
|
||||
lora_index = 0
|
||||
while f"Lora_{lora_index} Model hash" in metadata and f"Lora_{lora_index} Model name" in metadata:
|
||||
while (
|
||||
f"Lora_{lora_index} Model hash" in metadata
|
||||
and f"Lora_{lora_index} Model name" in metadata
|
||||
):
|
||||
lora_hash = metadata[f"Lora_{lora_index} Model hash"]
|
||||
lora_name = metadata[f"Lora_{lora_index} Model name"]
|
||||
lora_strength_model = float(metadata.get(f"Lora_{lora_index} Strength model", 1.0))
|
||||
|
||||
lora_strength_model = float(
|
||||
metadata.get(f"Lora_{lora_index} Strength model", 1.0)
|
||||
)
|
||||
|
||||
# Skip if we've already added this LoRA by hash
|
||||
if lora_hash and lora_hash in added_loras:
|
||||
lora_index += 1
|
||||
continue
|
||||
|
||||
|
||||
lora_entry = {
|
||||
'name': lora_name,
|
||||
'type': "lora",
|
||||
'weight': lora_strength_model,
|
||||
'hash': lora_hash,
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': lora_name,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
"name": lora_name,
|
||||
"type": "lora",
|
||||
"weight": lora_strength_model,
|
||||
"hash": lora_hash,
|
||||
"existsLocally": False,
|
||||
"localPath": None,
|
||||
"file_name": lora_name,
|
||||
"thumbnailUrl": "/loras_static/images/no-preview.png",
|
||||
"baseModel": "",
|
||||
"size": 0,
|
||||
"downloadUrl": "",
|
||||
"isDeleted": False,
|
||||
}
|
||||
|
||||
|
||||
# Try to get info from Civitai if hash is available
|
||||
if lora_entry['hash'] and metadata_provider:
|
||||
if lora_entry["hash"] and metadata_provider:
|
||||
try:
|
||||
civitai_info = await metadata_provider.get_model_by_hash(lora_hash)
|
||||
|
||||
civitai_info = await metadata_provider.get_model_by_hash(
|
||||
lora_hash
|
||||
)
|
||||
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts,
|
||||
lora_hash
|
||||
lora_hash,
|
||||
)
|
||||
|
||||
|
||||
if populated_entry is None:
|
||||
lora_index += 1
|
||||
continue # Skip invalid LoRA types
|
||||
|
||||
|
||||
lora_entry = populated_entry
|
||||
|
||||
|
||||
# If we have a version ID from Civitai, track it for deduplication
|
||||
if 'id' in lora_entry and lora_entry['id']:
|
||||
added_loras[str(lora_entry['id'])] = len(result["loras"])
|
||||
if "id" in lora_entry and lora_entry["id"]:
|
||||
added_loras[str(lora_entry["id"])] = len(result["loras"])
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA hash {lora_entry['hash']}: {e}")
|
||||
|
||||
logger.error(
|
||||
f"Error fetching Civitai info for LoRA hash {lora_entry['hash']}: {e}"
|
||||
)
|
||||
|
||||
# Track by hash if we have it
|
||||
if lora_hash:
|
||||
added_loras[lora_hash] = len(result["loras"])
|
||||
|
||||
|
||||
result["loras"].append(lora_entry)
|
||||
|
||||
|
||||
lora_index += 1
|
||||
|
||||
|
||||
# If base model wasn't found earlier, use the most common one from LoRAs
|
||||
if not result["base_model"] and base_model_counts:
|
||||
result["base_model"] = max(base_model_counts.items(), key=lambda x: x[1])[0]
|
||||
|
||||
result["base_model"] = max(
|
||||
base_model_counts.items(), key=lambda x: x[1]
|
||||
)[0]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing Civitai image metadata: {e}", exc_info=True)
|
||||
return {"error": str(e), "loras": []}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import logging
|
||||
from typing import Dict
|
||||
from typing import Dict, List, Set
|
||||
from aiohttp import web
|
||||
|
||||
from .base_model_routes import BaseModelRoutes
|
||||
@@ -82,12 +82,22 @@ class CheckpointRoutes(BaseModelRoutes):
|
||||
return web.json_response({"error": str(e)}, status=500)
|
||||
|
||||
async def get_checkpoints_roots(self, request: web.Request) -> web.Response:
|
||||
"""Return the list of checkpoint roots from config"""
|
||||
"""Return the list of checkpoint roots from config (including extra paths)"""
|
||||
try:
|
||||
roots = config.checkpoints_roots
|
||||
# Merge checkpoints_roots with extra_checkpoints_roots, preserving order and removing duplicates
|
||||
roots: List[str] = []
|
||||
roots.extend(config.checkpoints_roots or [])
|
||||
roots.extend(config.extra_checkpoints_roots or [])
|
||||
# Remove duplicates while preserving order
|
||||
seen: set = set()
|
||||
unique_roots: List[str] = []
|
||||
for root in roots:
|
||||
if root and root not in seen:
|
||||
seen.add(root)
|
||||
unique_roots.append(root)
|
||||
return web.json_response({
|
||||
"success": True,
|
||||
"roots": roots
|
||||
"roots": unique_roots
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting checkpoint roots: {e}", exc_info=True)
|
||||
@@ -97,12 +107,22 @@ class CheckpointRoutes(BaseModelRoutes):
|
||||
}, status=500)
|
||||
|
||||
async def get_unet_roots(self, request: web.Request) -> web.Response:
|
||||
"""Return the list of unet roots from config"""
|
||||
"""Return the list of unet roots from config (including extra paths)"""
|
||||
try:
|
||||
roots = config.unet_roots
|
||||
# Merge unet_roots with extra_unet_roots, preserving order and removing duplicates
|
||||
roots: List[str] = []
|
||||
roots.extend(config.unet_roots or [])
|
||||
roots.extend(config.extra_unet_roots or [])
|
||||
# Remove duplicates while preserving order
|
||||
seen: set = set()
|
||||
unique_roots: List[str] = []
|
||||
for root in roots:
|
||||
if root and root not in seen:
|
||||
seen.add(root)
|
||||
unique_roots.append(root)
|
||||
return web.json_response({
|
||||
"success": True,
|
||||
"roots": roots
|
||||
"roots": unique_roots
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting unet roots: {e}", exc_info=True)
|
||||
|
||||
@@ -9,6 +9,7 @@ objects that can be composed by the route controller.
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
@@ -218,6 +219,45 @@ class HealthCheckHandler:
|
||||
return web.json_response({"status": "ok"})
|
||||
|
||||
|
||||
class SupportersHandler:
|
||||
"""Handler for supporters data."""
|
||||
|
||||
def __init__(self, logger: logging.Logger | None = None) -> None:
|
||||
self._logger = logger or logging.getLogger(__name__)
|
||||
|
||||
def _load_supporters(self) -> dict:
|
||||
"""Load supporters data from JSON file."""
|
||||
try:
|
||||
current_file = os.path.abspath(__file__)
|
||||
root_dir = os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(current_file)))
|
||||
)
|
||||
supporters_path = os.path.join(root_dir, "data", "supporters.json")
|
||||
|
||||
if os.path.exists(supporters_path):
|
||||
with open(supporters_path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
self._logger.debug(f"Failed to load supporters data: {e}")
|
||||
|
||||
return {
|
||||
"specialThanks": [],
|
||||
"allSupporters": [],
|
||||
"totalCount": 0
|
||||
}
|
||||
|
||||
async def get_supporters(self, request: web.Request) -> web.Response:
|
||||
"""Return supporters data as JSON."""
|
||||
try:
|
||||
supporters = self._load_supporters()
|
||||
return web.json_response({"success": True, "supporters": supporters})
|
||||
except Exception as exc:
|
||||
self._logger.error("Error loading supporters: %s", exc, exc_info=True)
|
||||
return web.json_response(
|
||||
{"success": False, "error": str(exc)}, status=500
|
||||
)
|
||||
|
||||
|
||||
class SettingsHandler:
|
||||
"""Sync settings between backend and frontend."""
|
||||
|
||||
@@ -1482,6 +1522,7 @@ class MiscHandlerSet:
|
||||
metadata_archive: MetadataArchiveHandler,
|
||||
filesystem: FileSystemHandler,
|
||||
custom_words: CustomWordsHandler,
|
||||
supporters: SupportersHandler,
|
||||
) -> None:
|
||||
self.health = health
|
||||
self.settings = settings
|
||||
@@ -1494,6 +1535,7 @@ class MiscHandlerSet:
|
||||
self.metadata_archive = metadata_archive
|
||||
self.filesystem = filesystem
|
||||
self.custom_words = custom_words
|
||||
self.supporters = supporters
|
||||
|
||||
def to_route_mapping(
|
||||
self,
|
||||
@@ -1522,6 +1564,7 @@ class MiscHandlerSet:
|
||||
"open_file_location": self.filesystem.open_file_location,
|
||||
"open_settings_location": self.filesystem.open_settings_location,
|
||||
"search_custom_words": self.custom_words.search_custom_words,
|
||||
"get_supporters": self.supporters.get_supporters,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -66,6 +66,23 @@ class ModelPageView:
|
||||
self._logger = logger
|
||||
self._app_version = self._get_app_version()
|
||||
|
||||
def _load_supporters(self) -> dict:
|
||||
"""Load supporters data from JSON file."""
|
||||
try:
|
||||
current_file = os.path.abspath(__file__)
|
||||
root_dir = os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(current_file)))
|
||||
)
|
||||
supporters_path = os.path.join(root_dir, "data", "supporters.json")
|
||||
|
||||
if os.path.exists(supporters_path):
|
||||
with open(supporters_path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
self._logger.debug(f"Failed to load supporters data: {e}")
|
||||
|
||||
return {"specialThanks": [], "allSupporters": [], "totalCount": 0}
|
||||
|
||||
def _get_app_version(self) -> str:
|
||||
version = "1.0.0"
|
||||
short_hash = "stable"
|
||||
@@ -383,20 +400,26 @@ class ModelManagementHandler:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "Model not found in cache"}, status=404
|
||||
)
|
||||
|
||||
|
||||
# Check if hash needs to be calculated (lazy hash for checkpoints)
|
||||
sha256 = model_data.get("sha256")
|
||||
hash_status = model_data.get("hash_status", "completed")
|
||||
|
||||
|
||||
if not sha256 or hash_status != "completed":
|
||||
# For checkpoints, calculate hash on-demand
|
||||
scanner = self._service.scanner
|
||||
if hasattr(scanner, 'calculate_hash_for_model'):
|
||||
self._logger.info(f"Lazy hash calculation triggered for {file_path}")
|
||||
if hasattr(scanner, "calculate_hash_for_model"):
|
||||
self._logger.info(
|
||||
f"Lazy hash calculation triggered for {file_path}"
|
||||
)
|
||||
sha256 = await scanner.calculate_hash_for_model(file_path)
|
||||
if not sha256:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "Failed to calculate SHA256 hash"}, status=500
|
||||
{
|
||||
"success": False,
|
||||
"error": "Failed to calculate SHA256 hash",
|
||||
},
|
||||
status=500,
|
||||
)
|
||||
# Update model_data with new hash
|
||||
model_data["sha256"] = sha256
|
||||
@@ -524,6 +547,153 @@ class ModelManagementHandler:
|
||||
self._logger.error("Error replacing preview: %s", exc, exc_info=True)
|
||||
return web.Response(text=str(exc), status=500)
|
||||
|
||||
async def set_preview_from_url(self, request: web.Request) -> web.Response:
|
||||
"""Set a preview image from a remote URL (e.g., CivitAI)."""
|
||||
try:
|
||||
from ...utils.civitai_utils import rewrite_preview_url
|
||||
from ...services.downloader import get_downloader
|
||||
|
||||
data = await request.json()
|
||||
model_path = data.get("model_path")
|
||||
image_url = data.get("image_url")
|
||||
nsfw_level = data.get("nsfw_level", 0)
|
||||
|
||||
if not model_path:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "Model path is required"}, status=400
|
||||
)
|
||||
|
||||
if not image_url:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "Image URL is required"}, status=400
|
||||
)
|
||||
|
||||
# Rewrite URL to use optimized rendition if it's a Civitai URL
|
||||
optimized_url, was_rewritten = rewrite_preview_url(
|
||||
image_url, media_type="image"
|
||||
)
|
||||
if was_rewritten and optimized_url:
|
||||
self._logger.info(
|
||||
f"Rewritten preview URL to optimized version: {optimized_url}"
|
||||
)
|
||||
else:
|
||||
optimized_url = image_url
|
||||
|
||||
# Download the image using the Downloader service
|
||||
self._logger.info(
|
||||
f"Downloading preview from {optimized_url} for {model_path}"
|
||||
)
|
||||
downloader = await get_downloader()
|
||||
success, preview_data, headers = await downloader.download_to_memory(
|
||||
optimized_url, use_auth=False, return_headers=True
|
||||
)
|
||||
|
||||
if not success:
|
||||
return web.json_response(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Failed to download image: {preview_data}",
|
||||
},
|
||||
status=502,
|
||||
)
|
||||
|
||||
# preview_data is bytes when success is True
|
||||
preview_bytes = (
|
||||
preview_data
|
||||
if isinstance(preview_data, bytes)
|
||||
else preview_data.encode("utf-8")
|
||||
)
|
||||
|
||||
# Determine content type from response headers
|
||||
content_type = (
|
||||
headers.get("Content-Type", "image/jpeg") if headers else "image/jpeg"
|
||||
)
|
||||
|
||||
# Extract original filename from URL
|
||||
original_filename = None
|
||||
if "?" in image_url:
|
||||
url_path = image_url.split("?")[0]
|
||||
else:
|
||||
url_path = image_url
|
||||
original_filename = url_path.split("/")[-1] if "/" in url_path else None
|
||||
|
||||
result = await self._preview_service.replace_preview(
|
||||
model_path=model_path,
|
||||
preview_data=preview_data,
|
||||
content_type=content_type,
|
||||
original_filename=original_filename,
|
||||
nsfw_level=nsfw_level,
|
||||
update_preview_in_cache=self._service.scanner.update_preview_in_cache,
|
||||
metadata_loader=self._metadata_sync.load_local_metadata,
|
||||
)
|
||||
|
||||
return web.json_response(
|
||||
{
|
||||
"success": True,
|
||||
"preview_url": config.get_preview_static_url(
|
||||
result["preview_path"]
|
||||
),
|
||||
"preview_nsfw_level": result["preview_nsfw_level"],
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
self._logger.error("Error setting preview from URL: %s", exc, exc_info=True)
|
||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||
|
||||
if not image_url:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "Image URL is required"}, status=400
|
||||
)
|
||||
|
||||
# Download the image from the remote URL
|
||||
self._logger.info(f"Downloading preview from {image_url} for {model_path}")
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(image_url) as response:
|
||||
if response.status != 200:
|
||||
return web.json_response(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Failed to download image: HTTP {response.status}",
|
||||
},
|
||||
status=502,
|
||||
)
|
||||
|
||||
content_type = response.headers.get("Content-Type", "image/jpeg")
|
||||
preview_data = await response.read()
|
||||
|
||||
# Extract original filename from URL
|
||||
original_filename = None
|
||||
if "?" in image_url:
|
||||
url_path = image_url.split("?")[0]
|
||||
else:
|
||||
url_path = image_url
|
||||
original_filename = (
|
||||
url_path.split("/")[-1] if "/" in url_path else None
|
||||
)
|
||||
|
||||
result = await self._preview_service.replace_preview(
|
||||
model_path=model_path,
|
||||
preview_data=preview_bytes,
|
||||
content_type=content_type,
|
||||
original_filename=original_filename,
|
||||
nsfw_level=nsfw_level,
|
||||
update_preview_in_cache=self._service.scanner.update_preview_in_cache,
|
||||
metadata_loader=self._metadata_sync.load_local_metadata,
|
||||
)
|
||||
|
||||
return web.json_response(
|
||||
{
|
||||
"success": True,
|
||||
"preview_url": config.get_preview_static_url(
|
||||
result["preview_path"]
|
||||
),
|
||||
"preview_nsfw_level": result["preview_nsfw_level"],
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
self._logger.error("Error setting preview from URL: %s", exc, exc_info=True)
|
||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||
|
||||
async def save_metadata(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
data = await request.json()
|
||||
@@ -814,9 +984,7 @@ class ModelQueryHandler:
|
||||
# Format response
|
||||
group = {"hash": sha256, "models": []}
|
||||
for model in sorted_models:
|
||||
group["models"].append(
|
||||
await self._service.format_response(model)
|
||||
)
|
||||
group["models"].append(await self._service.format_response(model))
|
||||
|
||||
# Only include groups with 2+ models after filtering
|
||||
if len(group["models"]) > 1:
|
||||
@@ -845,7 +1013,9 @@ class ModelQueryHandler:
|
||||
"favorites_only": request.query.get("favorites_only", "").lower() == "true",
|
||||
}
|
||||
|
||||
def _apply_duplicate_filters(self, models: List[Dict[str, Any]], filters: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
def _apply_duplicate_filters(
|
||||
self, models: List[Dict[str, Any]], filters: Dict[str, Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Apply filters to a list of models within a duplicate group."""
|
||||
result = models
|
||||
|
||||
@@ -886,7 +1056,9 @@ class ModelQueryHandler:
|
||||
|
||||
return result
|
||||
|
||||
def _sort_duplicate_group(self, models: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
def _sort_duplicate_group(
|
||||
self, models: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Sort models: originals first (left), copies (with -????. pattern) last (right)."""
|
||||
if len(models) <= 1:
|
||||
return models
|
||||
@@ -1171,10 +1343,13 @@ class ModelDownloadHandler:
|
||||
data["source"] = source
|
||||
if file_params_json:
|
||||
import json
|
||||
|
||||
try:
|
||||
data["file_params"] = json.loads(file_params_json)
|
||||
except json.JSONDecodeError:
|
||||
self._logger.warning("Invalid file_params JSON: %s", file_params_json)
|
||||
self._logger.warning(
|
||||
"Invalid file_params JSON: %s", file_params_json
|
||||
)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
future = loop.create_future()
|
||||
@@ -1905,7 +2080,8 @@ class ModelUpdateHandler:
|
||||
from dataclasses import replace
|
||||
|
||||
new_record = replace(
|
||||
record, versions=list(version_map.values()),
|
||||
record,
|
||||
versions=list(version_map.values()),
|
||||
)
|
||||
|
||||
# Optionally persist to database for caching
|
||||
@@ -2120,6 +2296,7 @@ class ModelUpdateHandler:
|
||||
if version.early_access_ends_at:
|
||||
try:
|
||||
from datetime import datetime, timezone
|
||||
|
||||
ea_date = datetime.fromisoformat(
|
||||
version.early_access_ends_at.replace("Z", "+00:00")
|
||||
)
|
||||
@@ -2127,7 +2304,7 @@ class ModelUpdateHandler:
|
||||
except (ValueError, AttributeError):
|
||||
# If date parsing fails, treat as active EA (conservative)
|
||||
is_early_access = True
|
||||
elif getattr(version, 'is_early_access', False):
|
||||
elif getattr(version, "is_early_access", False):
|
||||
# Fallback to basic EA flag from bulk API
|
||||
is_early_access = True
|
||||
|
||||
@@ -2207,6 +2384,7 @@ class ModelHandlerSet:
|
||||
"fetch_all_civitai": self.civitai.fetch_all_civitai,
|
||||
"relink_civitai": self.management.relink_civitai,
|
||||
"replace_preview": self.management.replace_preview,
|
||||
"set_preview_from_url": self.management.set_preview_from_url,
|
||||
"save_metadata": self.management.save_metadata,
|
||||
"add_tags": self.management.add_tags,
|
||||
"rename_model": self.management.rename_model,
|
||||
|
||||
@@ -26,6 +26,7 @@ MISC_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("GET", "/api/lm/settings/libraries", "get_settings_libraries"),
|
||||
RouteDefinition("POST", "/api/lm/settings/libraries/activate", "activate_library"),
|
||||
RouteDefinition("GET", "/api/lm/health-check", "health_check"),
|
||||
RouteDefinition("GET", "/api/lm/supporters", "get_supporters"),
|
||||
RouteDefinition("POST", "/api/lm/open-file-location", "open_file_location"),
|
||||
RouteDefinition("POST", "/api/lm/update-usage-stats", "update_usage_stats"),
|
||||
RouteDefinition("GET", "/api/lm/get-usage-stats", "get_usage_stats"),
|
||||
|
||||
@@ -29,6 +29,7 @@ from .handlers.misc_handlers import (
|
||||
NodeRegistry,
|
||||
NodeRegistryHandler,
|
||||
SettingsHandler,
|
||||
SupportersHandler,
|
||||
TrainedWordsHandler,
|
||||
UsageStatsHandler,
|
||||
build_service_registry_adapter,
|
||||
@@ -119,6 +120,7 @@ class MiscRoutes:
|
||||
metadata_provider_factory=self._metadata_provider_factory,
|
||||
)
|
||||
custom_words = CustomWordsHandler()
|
||||
supporters = SupportersHandler()
|
||||
|
||||
return self._handler_set_factory(
|
||||
health=health,
|
||||
@@ -132,6 +134,7 @@ class MiscRoutes:
|
||||
metadata_archive=metadata_archive,
|
||||
filesystem=filesystem,
|
||||
custom_words=custom_words,
|
||||
supporters=supporters,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Route registrar for model endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
@@ -27,6 +28,9 @@ COMMON_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/fetch-all-civitai", "fetch_all_civitai"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/relink-civitai", "relink_civitai"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/replace-preview", "replace_preview"),
|
||||
RouteDefinition(
|
||||
"POST", "/api/lm/{prefix}/set-preview-from-url", "set_preview_from_url"
|
||||
),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/save-metadata", "save_metadata"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/add-tags", "add_tags"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/rename", "rename_model"),
|
||||
@@ -36,7 +40,9 @@ COMMON_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/move_models_bulk", "move_models_bulk"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/auto-organize", "auto_organize_models"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/auto-organize", "auto_organize_models"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/auto-organize-progress", "get_auto_organize_progress"),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/{prefix}/auto-organize-progress", "get_auto_organize_progress"
|
||||
),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/top-tags", "get_top_tags"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/base-models", "get_base_models"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/model-types", "get_model_types"),
|
||||
@@ -44,30 +50,60 @@ COMMON_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/roots", "get_model_roots"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/folders", "get_folders"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/folder-tree", "get_folder_tree"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/unified-folder-tree", "get_unified_folder_tree"),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/{prefix}/unified-folder-tree", "get_unified_folder_tree"
|
||||
),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/find-duplicates", "find_duplicate_models"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/find-filename-conflicts", "find_filename_conflicts"),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/{prefix}/find-filename-conflicts", "find_filename_conflicts"
|
||||
),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/get-notes", "get_model_notes"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/preview-url", "get_model_preview_url"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/civitai-url", "get_model_civitai_url"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/metadata", "get_model_metadata"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/model-description", "get_model_description"),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/{prefix}/model-description", "get_model_description"
|
||||
),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/relative-paths", "get_relative_paths"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/civitai/versions/{model_id}", "get_civitai_versions"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/civitai/model/version/{modelVersionId}", "get_civitai_model_by_version"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/civitai/model/hash/{hash}", "get_civitai_model_by_hash"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/updates/refresh", "refresh_model_updates"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/updates/fetch-missing-license", "fetch_missing_civitai_license_data"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/updates/ignore", "set_model_update_ignore"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/updates/ignore-version", "set_version_update_ignore"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/updates/status/{model_id}", "get_model_update_status"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/updates/versions/{model_id}", "get_model_versions"),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/{prefix}/civitai/versions/{model_id}", "get_civitai_versions"
|
||||
),
|
||||
RouteDefinition(
|
||||
"GET",
|
||||
"/api/lm/{prefix}/civitai/model/version/{modelVersionId}",
|
||||
"get_civitai_model_by_version",
|
||||
),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/{prefix}/civitai/model/hash/{hash}", "get_civitai_model_by_hash"
|
||||
),
|
||||
RouteDefinition(
|
||||
"POST", "/api/lm/{prefix}/updates/refresh", "refresh_model_updates"
|
||||
),
|
||||
RouteDefinition(
|
||||
"POST",
|
||||
"/api/lm/{prefix}/updates/fetch-missing-license",
|
||||
"fetch_missing_civitai_license_data",
|
||||
),
|
||||
RouteDefinition(
|
||||
"POST", "/api/lm/{prefix}/updates/ignore", "set_model_update_ignore"
|
||||
),
|
||||
RouteDefinition(
|
||||
"POST", "/api/lm/{prefix}/updates/ignore-version", "set_version_update_ignore"
|
||||
),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/{prefix}/updates/status/{model_id}", "get_model_update_status"
|
||||
),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/{prefix}/updates/versions/{model_id}", "get_model_versions"
|
||||
),
|
||||
RouteDefinition("POST", "/api/lm/download-model", "download_model"),
|
||||
RouteDefinition("GET", "/api/lm/download-model-get", "download_model_get"),
|
||||
RouteDefinition("GET", "/api/lm/cancel-download-get", "cancel_download_get"),
|
||||
RouteDefinition("GET", "/api/lm/pause-download", "pause_download_get"),
|
||||
RouteDefinition("GET", "/api/lm/resume-download", "resume_download_get"),
|
||||
RouteDefinition("GET", "/api/lm/download-progress/{download_id}", "get_download_progress"),
|
||||
RouteDefinition(
|
||||
"GET", "/api/lm/download-progress/{download_id}", "get_download_progress"
|
||||
),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/cancel-task", "cancel_task"),
|
||||
RouteDefinition("GET", "/{prefix}", "handle_models_page"),
|
||||
)
|
||||
@@ -94,12 +130,18 @@ class ModelRouteRegistrar:
|
||||
definitions: Iterable[RouteDefinition] = COMMON_ROUTE_DEFINITIONS,
|
||||
) -> None:
|
||||
for definition in definitions:
|
||||
self._bind_route(definition.method, definition.build_path(prefix), handler_lookup[definition.handler_name])
|
||||
self._bind_route(
|
||||
definition.method,
|
||||
definition.build_path(prefix),
|
||||
handler_lookup[definition.handler_name],
|
||||
)
|
||||
|
||||
def add_route(self, method: str, path: str, handler: Callable) -> None:
|
||||
self._bind_route(method, path, handler)
|
||||
|
||||
def add_prefixed_route(self, method: str, path_template: str, prefix: str, handler: Callable) -> None:
|
||||
def add_prefixed_route(
|
||||
self, method: str, path_template: str, prefix: str, handler: Callable
|
||||
) -> None:
|
||||
self._bind_route(method, path_template.replace("{prefix}", prefix), handler)
|
||||
|
||||
def _bind_route(self, method: str, path: str, handler: Callable) -> None:
|
||||
|
||||
@@ -209,6 +209,80 @@ class StatsRoutes:
|
||||
'error': str(e)
|
||||
}, status=500)
|
||||
|
||||
async def get_model_usage_list(self, request: web.Request) -> web.Response:
|
||||
"""Get paginated model usage list for infinite scrolling"""
|
||||
try:
|
||||
await self.init_services()
|
||||
|
||||
model_type = request.query.get('type', 'lora')
|
||||
sort_order = request.query.get('sort', 'desc')
|
||||
|
||||
try:
|
||||
limit = int(request.query.get('limit', '50'))
|
||||
offset = int(request.query.get('offset', '0'))
|
||||
except ValueError:
|
||||
limit = 50
|
||||
offset = 0
|
||||
|
||||
# Get usage statistics
|
||||
usage_data = await self.usage_stats.get_stats()
|
||||
|
||||
# Select proper cache and usage dict based on type
|
||||
if model_type == 'lora':
|
||||
cache = await self.lora_scanner.get_cached_data()
|
||||
type_usage_data = usage_data.get('loras', {})
|
||||
elif model_type == 'checkpoint':
|
||||
cache = await self.checkpoint_scanner.get_cached_data()
|
||||
type_usage_data = usage_data.get('checkpoints', {})
|
||||
elif model_type == 'embedding':
|
||||
cache = await self.embedding_scanner.get_cached_data()
|
||||
type_usage_data = usage_data.get('embeddings', {})
|
||||
else:
|
||||
return web.json_response({'success': False, 'error': f"Invalid model type: {model_type}"}, status=400)
|
||||
|
||||
# Create list of all models
|
||||
all_models = []
|
||||
for item in cache.raw_data:
|
||||
sha256 = item.get('sha256')
|
||||
usage_info = type_usage_data.get(sha256, {}) if sha256 else {}
|
||||
usage_count = usage_info.get('total', 0) if isinstance(usage_info, dict) else 0
|
||||
|
||||
all_models.append({
|
||||
'name': item.get('model_name', 'Unknown'),
|
||||
'usage_count': usage_count,
|
||||
'base_model': item.get('base_model', 'Unknown'),
|
||||
'preview_url': config.get_preview_static_url(item.get('preview_url', '')),
|
||||
'folder': item.get('folder', '')
|
||||
})
|
||||
|
||||
# Sort the models
|
||||
reverse = (sort_order == 'desc')
|
||||
all_models.sort(key=lambda x: (x['usage_count'], x['name'].lower()), reverse=reverse)
|
||||
if not reverse:
|
||||
# If asc, sort by usage_count ascending, but keep name ascending
|
||||
all_models.sort(key=lambda x: (x['usage_count'], x['name'].lower()))
|
||||
else:
|
||||
all_models.sort(key=lambda x: (-x['usage_count'], x['name'].lower()))
|
||||
|
||||
# Slice for pagination
|
||||
paginated_models = all_models[offset:offset + limit]
|
||||
|
||||
return web.json_response({
|
||||
'success': True,
|
||||
'data': {
|
||||
'items': paginated_models,
|
||||
'total': len(all_models),
|
||||
'type': model_type
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting model usage list: {e}", exc_info=True)
|
||||
return web.json_response({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}, status=500)
|
||||
|
||||
async def get_base_model_distribution(self, request: web.Request) -> web.Response:
|
||||
"""Get base model distribution statistics"""
|
||||
try:
|
||||
@@ -530,6 +604,7 @@ class StatsRoutes:
|
||||
# Register API routes
|
||||
app.router.add_get('/api/lm/stats/collection-overview', self.get_collection_overview)
|
||||
app.router.add_get('/api/lm/stats/usage-analytics', self.get_usage_analytics)
|
||||
app.router.add_get('/api/lm/stats/model-usage-list', self.get_model_usage_list)
|
||||
app.router.add_get('/api/lm/stats/base-model-distribution', self.get_base_model_distribution)
|
||||
app.router.add_get('/api/lm/stats/tag-analytics', self.get_tag_analytics)
|
||||
app.router.add_get('/api/lm/stats/storage-analytics', self.get_storage_analytics)
|
||||
|
||||
@@ -125,16 +125,20 @@ class CacheEntryValidator:
|
||||
)
|
||||
|
||||
# Special validation: sha256 must not be empty for required field
|
||||
# BUT allow empty sha256 when hash_status is pending (lazy hash calculation)
|
||||
sha256 = working_entry.get('sha256', '')
|
||||
hash_status = working_entry.get('hash_status', 'completed')
|
||||
if not sha256 or (isinstance(sha256, str) and not sha256.strip()):
|
||||
errors.append("Required field 'sha256' is empty")
|
||||
# Cannot repair empty sha256 - entry is invalid
|
||||
return ValidationResult(
|
||||
is_valid=False,
|
||||
repaired=repaired,
|
||||
errors=errors,
|
||||
entry=working_entry if auto_repair else None
|
||||
)
|
||||
# Allow empty sha256 for lazy hash calculation (checkpoints)
|
||||
if hash_status != 'pending':
|
||||
errors.append("Required field 'sha256' is empty")
|
||||
# Cannot repair empty sha256 - entry is invalid
|
||||
return ValidationResult(
|
||||
is_valid=False,
|
||||
repaired=repaired,
|
||||
errors=errors,
|
||||
entry=working_entry if auto_repair else None
|
||||
)
|
||||
|
||||
# Normalize sha256 to lowercase if needed
|
||||
if isinstance(sha256, str):
|
||||
|
||||
@@ -3,36 +3,42 @@ import copy
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, Optional, Dict, Tuple, List, Sequence
|
||||
from .model_metadata_provider import CivitaiModelMetadataProvider, ModelMetadataProviderManager
|
||||
from .model_metadata_provider import (
|
||||
CivitaiModelMetadataProvider,
|
||||
ModelMetadataProviderManager,
|
||||
)
|
||||
from .downloader import get_downloader
|
||||
from .errors import RateLimitError, ResourceNotFoundError
|
||||
from ..utils.civitai_utils import resolve_license_payload
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CivitaiClient:
|
||||
_instance = None
|
||||
_lock = asyncio.Lock()
|
||||
|
||||
|
||||
@classmethod
|
||||
async def get_instance(cls):
|
||||
"""Get singleton instance of CivitaiClient"""
|
||||
async with cls._lock:
|
||||
if cls._instance is None:
|
||||
cls._instance = cls()
|
||||
|
||||
|
||||
# Register this client as a metadata provider
|
||||
provider_manager = await ModelMetadataProviderManager.get_instance()
|
||||
provider_manager.register_provider('civitai', CivitaiModelMetadataProvider(cls._instance), True)
|
||||
|
||||
provider_manager.register_provider(
|
||||
"civitai", CivitaiModelMetadataProvider(cls._instance), True
|
||||
)
|
||||
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
# Check if already initialized for singleton pattern
|
||||
if hasattr(self, '_initialized'):
|
||||
if hasattr(self, "_initialized"):
|
||||
return
|
||||
self._initialized = True
|
||||
|
||||
|
||||
self.base_url = "https://civitai.com/api/v1"
|
||||
|
||||
async def _make_request(
|
||||
@@ -75,8 +81,10 @@ class CivitaiClient:
|
||||
meta = image.get("meta")
|
||||
if isinstance(meta, dict) and "comfy" in meta:
|
||||
meta.pop("comfy", None)
|
||||
|
||||
async def download_file(self, url: str, save_dir: str, default_filename: str, progress_callback=None) -> Tuple[bool, str]:
|
||||
|
||||
async def download_file(
|
||||
self, url: str, save_dir: str, default_filename: str, progress_callback=None
|
||||
) -> Tuple[bool, str]:
|
||||
"""Download file with resumable downloads and retry mechanism
|
||||
|
||||
Args:
|
||||
@@ -90,41 +98,48 @@ class CivitaiClient:
|
||||
"""
|
||||
downloader = await get_downloader()
|
||||
save_path = os.path.join(save_dir, default_filename)
|
||||
|
||||
|
||||
# Use unified downloader with CivitAI authentication
|
||||
success, result = await downloader.download_file(
|
||||
url=url,
|
||||
save_path=save_path,
|
||||
progress_callback=progress_callback,
|
||||
use_auth=True, # Enable CivitAI authentication
|
||||
allow_resume=True
|
||||
allow_resume=True,
|
||||
)
|
||||
|
||||
|
||||
return success, result
|
||||
|
||||
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
async def get_model_by_hash(
|
||||
self, model_hash: str
|
||||
) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
try:
|
||||
success, version = await self._make_request(
|
||||
'GET',
|
||||
"GET",
|
||||
f"{self.base_url}/model-versions/by-hash/{model_hash}",
|
||||
use_auth=True
|
||||
use_auth=True,
|
||||
)
|
||||
if not success:
|
||||
message = str(version)
|
||||
if "not found" in message.lower():
|
||||
return None, "Model not found"
|
||||
|
||||
logger.error("Failed to fetch model info for %s: %s", model_hash[:10], message)
|
||||
logger.error(
|
||||
"Failed to fetch model info for %s: %s", model_hash[:10], message
|
||||
)
|
||||
return None, message
|
||||
|
||||
model_id = version.get('modelId')
|
||||
if model_id:
|
||||
model_data = await self._fetch_model_data(model_id)
|
||||
if model_data:
|
||||
self._enrich_version_with_model_data(version, model_data)
|
||||
if isinstance(version, dict):
|
||||
model_id = version.get("modelId")
|
||||
if model_id:
|
||||
model_data = await self._fetch_model_data(model_id)
|
||||
if model_data:
|
||||
self._enrich_version_with_model_data(version, model_data)
|
||||
|
||||
self._remove_comfy_metadata(version)
|
||||
return version, None
|
||||
self._remove_comfy_metadata(version)
|
||||
return version, None
|
||||
else:
|
||||
return None, "Invalid response format"
|
||||
except RateLimitError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
@@ -136,19 +151,19 @@ class CivitaiClient:
|
||||
downloader = await get_downloader()
|
||||
success, content, headers = await downloader.download_to_memory(
|
||||
image_url,
|
||||
use_auth=False # Preview images don't need auth
|
||||
use_auth=False, # Preview images don't need auth
|
||||
)
|
||||
if success:
|
||||
# Ensure directory exists
|
||||
os.makedirs(os.path.dirname(save_path), exist_ok=True)
|
||||
with open(save_path, 'wb') as f:
|
||||
with open(save_path, "wb") as f:
|
||||
f.write(content)
|
||||
return True
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Download Error: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _extract_error_message(payload: Any) -> str:
|
||||
"""Return a human-readable error message from an API payload."""
|
||||
@@ -175,19 +190,17 @@ class CivitaiClient:
|
||||
"""Get all versions of a model with local availability info"""
|
||||
try:
|
||||
success, result = await self._make_request(
|
||||
'GET',
|
||||
f"{self.base_url}/models/{model_id}",
|
||||
use_auth=True
|
||||
"GET", f"{self.base_url}/models/{model_id}", use_auth=True
|
||||
)
|
||||
if success:
|
||||
# Also return model type along with versions
|
||||
return {
|
||||
'modelVersions': result.get('modelVersions', []),
|
||||
'type': result.get('type', ''),
|
||||
'name': result.get('name', '')
|
||||
"modelVersions": result.get("modelVersions", []),
|
||||
"type": result.get("type", ""),
|
||||
"name": result.get("name", ""),
|
||||
}
|
||||
message = self._extract_error_message(result)
|
||||
if message and 'not found' in message.lower():
|
||||
if message and "not found" in message.lower():
|
||||
raise ResourceNotFoundError(f"Resource not found for model {model_id}")
|
||||
if message:
|
||||
raise RuntimeError(message)
|
||||
@@ -221,15 +234,15 @@ class CivitaiClient:
|
||||
try:
|
||||
query = ",".join(normalized_ids)
|
||||
success, result = await self._make_request(
|
||||
'GET',
|
||||
"GET",
|
||||
f"{self.base_url}/models",
|
||||
use_auth=True,
|
||||
params={'ids': query},
|
||||
params={"ids": query},
|
||||
)
|
||||
if not success:
|
||||
return None
|
||||
|
||||
items = result.get('items') if isinstance(result, dict) else None
|
||||
items = result.get("items") if isinstance(result, dict) else None
|
||||
if not isinstance(items, list):
|
||||
return {}
|
||||
|
||||
@@ -237,19 +250,19 @@ class CivitaiClient:
|
||||
for item in items:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
model_id = item.get('id')
|
||||
model_id = item.get("id")
|
||||
try:
|
||||
normalized_id = int(model_id)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
payload[normalized_id] = {
|
||||
'modelVersions': item.get('modelVersions', []),
|
||||
'type': item.get('type', ''),
|
||||
'name': item.get('name', ''),
|
||||
'allowNoCredit': item.get('allowNoCredit'),
|
||||
'allowCommercialUse': item.get('allowCommercialUse'),
|
||||
'allowDerivatives': item.get('allowDerivatives'),
|
||||
'allowDifferentLicense': item.get('allowDifferentLicense'),
|
||||
"modelVersions": item.get("modelVersions", []),
|
||||
"type": item.get("type", ""),
|
||||
"name": item.get("name", ""),
|
||||
"allowNoCredit": item.get("allowNoCredit"),
|
||||
"allowCommercialUse": item.get("allowCommercialUse"),
|
||||
"allowDerivatives": item.get("allowDerivatives"),
|
||||
"allowDifferentLicense": item.get("allowDifferentLicense"),
|
||||
}
|
||||
return payload
|
||||
except RateLimitError:
|
||||
@@ -257,8 +270,10 @@ class CivitaiClient:
|
||||
except Exception as exc:
|
||||
logger.error(f"Error fetching model versions in bulk: {exc}")
|
||||
return None
|
||||
|
||||
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
|
||||
|
||||
async def get_model_version(
|
||||
self, model_id: int = None, version_id: int = None
|
||||
) -> Optional[Dict]:
|
||||
"""Get specific model version with additional metadata."""
|
||||
try:
|
||||
if model_id is None and version_id is not None:
|
||||
@@ -281,7 +296,7 @@ class CivitaiClient:
|
||||
if version is None:
|
||||
return None
|
||||
|
||||
model_id = version.get('modelId')
|
||||
model_id = version.get("modelId")
|
||||
if not model_id:
|
||||
logger.error(f"No modelId found in version {version_id}")
|
||||
return None
|
||||
@@ -293,7 +308,9 @@ class CivitaiClient:
|
||||
self._remove_comfy_metadata(version)
|
||||
return version
|
||||
|
||||
async def _get_version_with_model_id(self, model_id: int, version_id: Optional[int]) -> Optional[Dict]:
|
||||
async def _get_version_with_model_id(
|
||||
self, model_id: int, version_id: Optional[int]
|
||||
) -> Optional[Dict]:
|
||||
model_data = await self._fetch_model_data(model_id)
|
||||
if not model_data:
|
||||
return None
|
||||
@@ -302,8 +319,12 @@ class CivitaiClient:
|
||||
if target_version is None:
|
||||
return None
|
||||
|
||||
target_version_id = target_version.get('id')
|
||||
version = await self._fetch_version_by_id(target_version_id) if target_version_id else None
|
||||
target_version_id = target_version.get("id")
|
||||
version = (
|
||||
await self._fetch_version_by_id(target_version_id)
|
||||
if target_version_id
|
||||
else None
|
||||
)
|
||||
|
||||
if version is None:
|
||||
model_hash = self._extract_primary_model_hash(target_version)
|
||||
@@ -315,7 +336,9 @@ class CivitaiClient:
|
||||
)
|
||||
|
||||
if version is None:
|
||||
version = self._build_version_from_model_data(target_version, model_id, model_data)
|
||||
version = self._build_version_from_model_data(
|
||||
target_version, model_id, model_data
|
||||
)
|
||||
|
||||
self._enrich_version_with_model_data(version, model_data)
|
||||
self._remove_comfy_metadata(version)
|
||||
@@ -323,9 +346,7 @@ class CivitaiClient:
|
||||
|
||||
async def _fetch_model_data(self, model_id: int) -> Optional[Dict]:
|
||||
success, data = await self._make_request(
|
||||
'GET',
|
||||
f"{self.base_url}/models/{model_id}",
|
||||
use_auth=True
|
||||
"GET", f"{self.base_url}/models/{model_id}", use_auth=True
|
||||
)
|
||||
if success:
|
||||
return data
|
||||
@@ -337,9 +358,7 @@ class CivitaiClient:
|
||||
return None
|
||||
|
||||
success, version = await self._make_request(
|
||||
'GET',
|
||||
f"{self.base_url}/model-versions/{version_id}",
|
||||
use_auth=True
|
||||
"GET", f"{self.base_url}/model-versions/{version_id}", use_auth=True
|
||||
)
|
||||
if success:
|
||||
return version
|
||||
@@ -352,9 +371,7 @@ class CivitaiClient:
|
||||
return None
|
||||
|
||||
success, version = await self._make_request(
|
||||
'GET',
|
||||
f"{self.base_url}/model-versions/by-hash/{model_hash}",
|
||||
use_auth=True
|
||||
"GET", f"{self.base_url}/model-versions/by-hash/{model_hash}", use_auth=True
|
||||
)
|
||||
if success:
|
||||
return version
|
||||
@@ -362,16 +379,17 @@ class CivitaiClient:
|
||||
logger.warning(f"Failed to fetch version by hash {model_hash}")
|
||||
return None
|
||||
|
||||
def _select_target_version(self, model_data: Dict, model_id: int, version_id: Optional[int]) -> Optional[Dict]:
|
||||
model_versions = model_data.get('modelVersions', [])
|
||||
def _select_target_version(
|
||||
self, model_data: Dict, model_id: int, version_id: Optional[int]
|
||||
) -> Optional[Dict]:
|
||||
model_versions = model_data.get("modelVersions", [])
|
||||
if not model_versions:
|
||||
logger.warning(f"No model versions found for model {model_id}")
|
||||
return None
|
||||
|
||||
if version_id is not None:
|
||||
target_version = next(
|
||||
(item for item in model_versions if item.get('id') == version_id),
|
||||
None
|
||||
(item for item in model_versions if item.get("id") == version_id), None
|
||||
)
|
||||
if target_version is None:
|
||||
logger.warning(
|
||||
@@ -383,46 +401,50 @@ class CivitaiClient:
|
||||
return model_versions[0]
|
||||
|
||||
def _extract_primary_model_hash(self, version_entry: Dict) -> Optional[str]:
|
||||
for file_info in version_entry.get('files', []):
|
||||
if file_info.get('type') == 'Model' and file_info.get('primary'):
|
||||
hashes = file_info.get('hashes', {})
|
||||
model_hash = hashes.get('SHA256')
|
||||
for file_info in version_entry.get("files", []):
|
||||
if file_info.get("type") == "Model" and file_info.get("primary"):
|
||||
hashes = file_info.get("hashes", {})
|
||||
model_hash = hashes.get("SHA256")
|
||||
if model_hash:
|
||||
return model_hash
|
||||
return None
|
||||
|
||||
def _build_version_from_model_data(self, version_entry: Dict, model_id: int, model_data: Dict) -> Dict:
|
||||
def _build_version_from_model_data(
|
||||
self, version_entry: Dict, model_id: int, model_data: Dict
|
||||
) -> Dict:
|
||||
version = copy.deepcopy(version_entry)
|
||||
version.pop('index', None)
|
||||
version['modelId'] = model_id
|
||||
version['model'] = {
|
||||
'name': model_data.get('name'),
|
||||
'type': model_data.get('type'),
|
||||
'nsfw': model_data.get('nsfw'),
|
||||
'poi': model_data.get('poi')
|
||||
version.pop("index", None)
|
||||
version["modelId"] = model_id
|
||||
version["model"] = {
|
||||
"name": model_data.get("name"),
|
||||
"type": model_data.get("type"),
|
||||
"nsfw": model_data.get("nsfw"),
|
||||
"poi": model_data.get("poi"),
|
||||
}
|
||||
return version
|
||||
|
||||
def _enrich_version_with_model_data(self, version: Dict, model_data: Dict) -> None:
|
||||
model_info = version.get('model')
|
||||
model_info = version.get("model")
|
||||
if not isinstance(model_info, dict):
|
||||
model_info = {}
|
||||
version['model'] = model_info
|
||||
version["model"] = model_info
|
||||
|
||||
model_info['description'] = model_data.get("description")
|
||||
model_info['tags'] = model_data.get("tags", [])
|
||||
version['creator'] = model_data.get("creator")
|
||||
model_info["description"] = model_data.get("description")
|
||||
model_info["tags"] = model_data.get("tags", [])
|
||||
version["creator"] = model_data.get("creator")
|
||||
|
||||
license_payload = resolve_license_payload(model_data)
|
||||
for field, value in license_payload.items():
|
||||
model_info[field] = value
|
||||
|
||||
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
async def get_model_version_info(
|
||||
self, version_id: str
|
||||
) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
"""Fetch model version metadata from Civitai
|
||||
|
||||
|
||||
Args:
|
||||
version_id: The Civitai model version ID
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple[Optional[Dict], Optional[str]]: A tuple containing:
|
||||
- The model version data or None if not found
|
||||
@@ -430,25 +452,23 @@ class CivitaiClient:
|
||||
"""
|
||||
try:
|
||||
url = f"{self.base_url}/model-versions/{version_id}"
|
||||
|
||||
|
||||
logger.debug(f"Resolving DNS for model version info: {url}")
|
||||
success, result = await self._make_request(
|
||||
'GET',
|
||||
url,
|
||||
use_auth=True
|
||||
)
|
||||
|
||||
success, result = await self._make_request("GET", url, use_auth=True)
|
||||
|
||||
if success:
|
||||
logger.debug(f"Successfully fetched model version info for: {version_id}")
|
||||
logger.debug(
|
||||
f"Successfully fetched model version info for: {version_id}"
|
||||
)
|
||||
self._remove_comfy_metadata(result)
|
||||
return result, None
|
||||
|
||||
|
||||
# Handle specific error cases
|
||||
if "not found" in str(result):
|
||||
error_msg = f"Model not found"
|
||||
logger.warning(f"Model version not found: {version_id} - {error_msg}")
|
||||
return None, error_msg
|
||||
|
||||
|
||||
# Other error cases
|
||||
logger.error(f"Failed to fetch model info for {version_id}: {result}")
|
||||
return None, str(result)
|
||||
@@ -464,27 +484,23 @@ class CivitaiClient:
|
||||
|
||||
Args:
|
||||
image_id: The Civitai image ID
|
||||
|
||||
|
||||
Returns:
|
||||
Optional[Dict]: The image data or None if not found
|
||||
"""
|
||||
try:
|
||||
url = f"{self.base_url}/images?imageId={image_id}&nsfw=X"
|
||||
|
||||
|
||||
logger.debug(f"Fetching image info for ID: {image_id}")
|
||||
success, result = await self._make_request(
|
||||
'GET',
|
||||
url,
|
||||
use_auth=True
|
||||
)
|
||||
|
||||
success, result = await self._make_request("GET", url, use_auth=True)
|
||||
|
||||
if success:
|
||||
if result and "items" in result and len(result["items"]) > 0:
|
||||
logger.debug(f"Successfully fetched image info for ID: {image_id}")
|
||||
return result["items"][0]
|
||||
logger.warning(f"No image found with ID: {image_id}")
|
||||
return None
|
||||
|
||||
|
||||
logger.error(f"Failed to fetch image info for ID: {image_id}: {result}")
|
||||
return None
|
||||
except RateLimitError:
|
||||
@@ -501,11 +517,7 @@ class CivitaiClient:
|
||||
|
||||
try:
|
||||
url = f"{self.base_url}/models?username={username}"
|
||||
success, result = await self._make_request(
|
||||
'GET',
|
||||
url,
|
||||
use_auth=True
|
||||
)
|
||||
success, result = await self._make_request("GET", url, use_auth=True)
|
||||
|
||||
if not success:
|
||||
logger.error("Failed to fetch models for %s: %s", username, result)
|
||||
|
||||
@@ -47,8 +47,7 @@ class BulkMetadataRefreshUseCase:
|
||||
to_process: Sequence[Dict[str, Any]] = [
|
||||
model
|
||||
for model in cache.raw_data
|
||||
if model.get("sha256")
|
||||
and not model.get("skip_metadata_refresh", False)
|
||||
if not model.get("skip_metadata_refresh", False)
|
||||
and not self._is_in_skip_path(model.get("folder", ""), skip_paths)
|
||||
and (not model.get("civitai") or not model["civitai"].get("id"))
|
||||
and not (
|
||||
@@ -85,6 +84,36 @@ class BulkMetadataRefreshUseCase:
|
||||
return {"success": False, "message": "Operation cancelled", "processed": processed, "updated": success, "total": total_models}
|
||||
try:
|
||||
original_name = model.get("model_name")
|
||||
|
||||
# Handle lazy hash calculation for models with pending hash status
|
||||
sha256 = model.get("sha256", "")
|
||||
hash_status = model.get("hash_status", "completed")
|
||||
file_path = model.get("file_path")
|
||||
|
||||
if not sha256 and hash_status == "pending" and file_path:
|
||||
self._logger.info(f"Calculating pending hash for {file_path}")
|
||||
# Check if scanner has calculate_hash_for_model method (CheckpointScanner)
|
||||
calculate_hash_method = getattr(self._service.scanner, "calculate_hash_for_model", None)
|
||||
if calculate_hash_method:
|
||||
sha256 = await calculate_hash_method(file_path)
|
||||
if sha256:
|
||||
model["sha256"] = sha256
|
||||
model["hash_status"] = "completed"
|
||||
else:
|
||||
self._logger.error(f"Failed to calculate hash for {file_path}")
|
||||
processed += 1
|
||||
continue
|
||||
else:
|
||||
self._logger.warning(f"Scanner does not support lazy hash calculation for {file_path}")
|
||||
processed += 1
|
||||
continue
|
||||
|
||||
# Skip models without valid hash
|
||||
if not model.get("sha256"):
|
||||
self._logger.warning(f"Skipping model without hash: {file_path}")
|
||||
processed += 1
|
||||
continue
|
||||
|
||||
await MetadataManager.hydrate_model_data(model)
|
||||
result, _ = await self._metadata_sync.fetch_and_update_model(
|
||||
sha256=model["sha256"],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[project]
|
||||
name = "comfyui-lora-manager"
|
||||
description = "Revolutionize your workflow with the ultimate LoRA companion for ComfyUI!"
|
||||
version = "0.9.16"
|
||||
version = "1.0.0"
|
||||
license = {file = "LICENSE"}
|
||||
dependencies = [
|
||||
"aiohttp",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[pytest]
|
||||
addopts = -v --import-mode=importlib
|
||||
addopts = -v --import-mode=importlib -m "not performance"
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
@@ -12,5 +12,6 @@ markers =
|
||||
asyncio: execute test within asyncio event loop
|
||||
no_settings_dir_isolation: allow tests to use real settings paths
|
||||
integration: integration tests requiring external resources
|
||||
performance: performance benchmarks (slow, skip by default)
|
||||
# Skip problematic directories to avoid import conflicts
|
||||
norecursedirs = .git .tox dist build *.egg __pycache__ py .hypothesis
|
||||
63
scripts/update_supporters.py
Normal file
63
scripts/update_supporters.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
|
||||
def update_readme():
|
||||
# 1. Read JSON data
|
||||
json_path = 'data/supporters.json'
|
||||
if not os.path.exists(json_path):
|
||||
print(f"Error: {json_path} not found.")
|
||||
return
|
||||
|
||||
with open(json_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# 2. Generate Markdown content
|
||||
special_thanks = data.get('specialThanks', [])
|
||||
all_supporters = data.get('allSupporters', [])
|
||||
total_count = data.get('totalCount', len(all_supporters))
|
||||
|
||||
md_content = "\n### 🌟 Special Thanks\n\n"
|
||||
if special_thanks:
|
||||
md_content += ", ".join([f"**{name}**" for name in special_thanks]) + "\n\n"
|
||||
else:
|
||||
md_content += "*None yet*\n\n"
|
||||
|
||||
md_content += f"### 💖 Supporters ({total_count})\n\n"
|
||||
if all_supporters:
|
||||
# Using a details block for the long list of supporters
|
||||
md_content += "<details>\n<summary>Click to view all awesome supporters</summary>\n<br>\n\n"
|
||||
md_content += ", ".join(all_supporters)
|
||||
md_content += "\n\n</details>\n"
|
||||
else:
|
||||
md_content += "*No supporters listed yet*\n"
|
||||
|
||||
# 3. Read existing README.md
|
||||
readme_path = 'README.md'
|
||||
with open(readme_path, 'r', encoding='utf-8') as f:
|
||||
readme = f.read()
|
||||
|
||||
# 4. Replace content between placeholders
|
||||
start_tag = '<!-- SUPPORTERS-START -->'
|
||||
end_tag = '<!-- SUPPORTERS-END -->'
|
||||
|
||||
if start_tag not in readme or end_tag not in readme:
|
||||
print(f"Error: Placeholders {start_tag} and {end_tag} not found in {readme_path}")
|
||||
return
|
||||
|
||||
# Using non-regex replacement to avoid issues with special characters in names
|
||||
parts = readme.split(start_tag)
|
||||
before_start = parts[0]
|
||||
after_start = parts[1].split(end_tag)
|
||||
after_end = after_start[1]
|
||||
|
||||
new_readme = f"{before_start}{start_tag}\n{md_content}\n{end_tag}{after_end}"
|
||||
|
||||
# 5. Write back to README.md
|
||||
with open(readme_path, 'w', encoding='utf-8') as f:
|
||||
f.write(new_readme)
|
||||
|
||||
print(f"Successfully updated {readme_path} with {len(all_supporters)} supporters!")
|
||||
|
||||
if __name__ == '__main__':
|
||||
update_readme()
|
||||
@@ -68,6 +68,7 @@ body {
|
||||
--space-1: calc(8px * 1);
|
||||
--space-2: calc(8px * 2);
|
||||
--space-3: calc(8px * 3);
|
||||
--space-4: calc(8px * 4);
|
||||
|
||||
/* Z-index Scale */
|
||||
--z-base: 10;
|
||||
@@ -77,6 +78,7 @@ body {
|
||||
|
||||
/* Border Radius */
|
||||
--border-radius-base: 12px;
|
||||
--border-radius-md: 12px;
|
||||
--border-radius-sm: 8px;
|
||||
--border-radius-xs: 4px;
|
||||
|
||||
|
||||
@@ -1,6 +1,26 @@
|
||||
/* Support Modal Styles */
|
||||
.support-modal {
|
||||
max-width: 570px;
|
||||
max-width: 1000px;
|
||||
width: 90vw;
|
||||
}
|
||||
|
||||
/* Two-column layout */
|
||||
.support-container {
|
||||
display: flex;
|
||||
gap: var(--space-3);
|
||||
min-height: 500px;
|
||||
}
|
||||
|
||||
.support-left {
|
||||
flex: 0 0 42%;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.support-right {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
border-left: 1px solid var(--lora-border);
|
||||
padding-left: var(--space-4);
|
||||
}
|
||||
|
||||
.support-header {
|
||||
@@ -214,6 +234,11 @@
|
||||
.support-links {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.support-modal {
|
||||
width: 95vw;
|
||||
max-width: 95vw;
|
||||
}
|
||||
}
|
||||
|
||||
/* Civitai link styles */
|
||||
@@ -239,4 +264,223 @@
|
||||
.folder-item:hover {
|
||||
border-color: var(--lora-accent);
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
/* Supporters Section Styles */
|
||||
.supporters-section {
|
||||
height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.supporters-header {
|
||||
margin-bottom: var(--space-4);
|
||||
}
|
||||
|
||||
.supporters-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-2);
|
||||
margin: 0 0 var(--space-1) 0;
|
||||
font-size: 1.3em !important;
|
||||
color: var(--lora-accent) !important;
|
||||
}
|
||||
|
||||
.supporters-title i {
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.supporters-subtitle {
|
||||
margin: 0;
|
||||
font-size: 0.95em;
|
||||
color: var(--text-color);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.supporters-group {
|
||||
margin-bottom: var(--space-3);
|
||||
}
|
||||
|
||||
.supporters-group-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin: 0 0 var(--space-2) 0;
|
||||
font-size: 1em;
|
||||
color: var(--text-color);
|
||||
opacity: 0.8;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.supporters-group-title i {
|
||||
color: var(--lora-accent);
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
/* Special Thanks - Clean Card Style */
|
||||
.special-thanks-group {
|
||||
margin-bottom: var(--space-4);
|
||||
}
|
||||
|
||||
.special-thanks-group .supporters-group-title {
|
||||
margin-bottom: var(--space-3);
|
||||
}
|
||||
|
||||
.special-thanks-group .supporters-group-title i {
|
||||
color: #fbbf24;
|
||||
}
|
||||
|
||||
.all-supporters-group .supporters-group-title i {
|
||||
color: var(--lora-error);
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.supporters-special-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.supporter-special-card {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: var(--space-2) var(--space-3);
|
||||
background: var(--card-bg);
|
||||
border: 1px solid var(--border-color);
|
||||
border-left: 3px solid var(--lora-accent);
|
||||
border-radius: var(--border-radius-sm);
|
||||
transition: all 0.2s ease;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.supporter-special-card:hover {
|
||||
border-color: var(--lora-accent);
|
||||
border-left-color: var(--lora-accent);
|
||||
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.08);
|
||||
transform: translateX(4px);
|
||||
}
|
||||
|
||||
.supporter-special-card .supporter-special-name {
|
||||
font-size: 1em;
|
||||
font-weight: 500;
|
||||
color: var(--text-color);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.supporter-special-card:hover .supporter-special-name {
|
||||
color: var(--lora-accent);
|
||||
}
|
||||
|
||||
/* All Supporters - Elegant Text Flow */
|
||||
.all-supporters-group {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
position: relative; /* Base for masks */
|
||||
}
|
||||
|
||||
/* Optional: Fading effect for credits feel at top and bottom */
|
||||
.all-supporters-group::before,
|
||||
.all-supporters-group::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 40px;
|
||||
pointer-events: none;
|
||||
z-index: 2;
|
||||
}
|
||||
|
||||
.all-supporters-group::before {
|
||||
top: 30px; /* Below the title */
|
||||
background: linear-gradient(to bottom, var(--lora-surface), transparent);
|
||||
}
|
||||
|
||||
.all-supporters-group::after {
|
||||
bottom: 0;
|
||||
background: linear-gradient(to top, var(--lora-surface), transparent);
|
||||
}
|
||||
|
||||
.all-supporters-group .supporters-group-title {
|
||||
margin-bottom: var(--space-2);
|
||||
}
|
||||
|
||||
.supporters-all-list {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: baseline;
|
||||
line-height: 2.2;
|
||||
max-height: 550px;
|
||||
overflow-y: auto;
|
||||
padding: var(--space-2) 0 40px 0; /* Extra padding at bottom for final visibility */
|
||||
color: var(--text-color);
|
||||
scroll-behavior: auto; /* Ensure manual scroll is immediate */
|
||||
}
|
||||
|
||||
/* Subtle scrollbar for credits look */
|
||||
.supporters-all-list::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
}
|
||||
|
||||
.supporters-all-list::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.supporters-all-list::-webkit-scrollbar-thumb {
|
||||
background: rgba(0, 0, 0, 0.05);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.supporters-all-list:hover::-webkit-scrollbar-thumb {
|
||||
background: rgba(0, 0, 0, 0.15);
|
||||
}
|
||||
|
||||
.supporter-name-item {
|
||||
font-size: 0.95em;
|
||||
color: var(--text-color);
|
||||
opacity: 0.85;
|
||||
transition: all 0.2s ease;
|
||||
white-space: nowrap;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.supporter-name-item:hover {
|
||||
opacity: 1;
|
||||
color: var(--lora-accent);
|
||||
}
|
||||
|
||||
.supporter-separator {
|
||||
margin: 0 10px;
|
||||
color: var(--text-color);
|
||||
opacity: 0.25;
|
||||
font-weight: 300;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
/* Responsive adjustments */
|
||||
@media (max-width: 768px) {
|
||||
.support-container {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.support-left {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.support-right {
|
||||
border-left: none;
|
||||
border-top: 1px solid var(--lora-border);
|
||||
padding-left: 0;
|
||||
padding-top: var(--space-3);
|
||||
}
|
||||
|
||||
.supporters-all-list {
|
||||
max-height: 200px;
|
||||
}
|
||||
|
||||
.supporters-special-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
@@ -250,12 +250,11 @@
|
||||
.changelog-content {
|
||||
max-height: 550px;
|
||||
overflow-y: auto;
|
||||
padding-left: var(--space-3);
|
||||
}
|
||||
|
||||
.changelog-item {
|
||||
margin-bottom: var(--space-2);
|
||||
padding-bottom: var(--space-2);
|
||||
padding: var(--space-2);
|
||||
border-bottom: 1px solid var(--lora-border);
|
||||
}
|
||||
|
||||
@@ -302,8 +301,7 @@
|
||||
|
||||
.changelog-item.latest {
|
||||
background-color: rgba(66, 153, 225, 0.05);
|
||||
border-radius: var(--border-radius-sm);
|
||||
padding: var(--space-2);
|
||||
border-radius: var(--border-radius-sm);
|
||||
border: 1px solid rgba(66, 153, 225, 0.2);
|
||||
}
|
||||
|
||||
|
||||
@@ -573,3 +573,171 @@
|
||||
.sidebar-tree-container::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--text-muted);
|
||||
}
|
||||
|
||||
/* ===== Drag and Drop - Create Folder Zone ===== */
|
||||
|
||||
/* Empty state drag hint */
|
||||
.sidebar-empty-hint {
|
||||
margin-top: 12px;
|
||||
font-size: 0.8em;
|
||||
color: var(--text-muted);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 6px;
|
||||
padding: 8px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.05);
|
||||
border: 1px dashed oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.2);
|
||||
}
|
||||
|
||||
.sidebar-empty-hint i {
|
||||
font-size: 0.9em;
|
||||
opacity: 0.8;
|
||||
margin: 0;
|
||||
display: inline;
|
||||
}
|
||||
|
||||
/* Create folder drop zone */
|
||||
.sidebar-create-folder-zone {
|
||||
position: absolute;
|
||||
bottom: 16px;
|
||||
left: 16px;
|
||||
right: 16px;
|
||||
padding: 16px;
|
||||
border: 2px dashed oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.4);
|
||||
border-radius: var(--border-radius-xs);
|
||||
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.08);
|
||||
opacity: 0;
|
||||
transform: translateY(10px);
|
||||
transition: all 0.2s ease;
|
||||
pointer-events: none;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
.sidebar-create-folder-zone.active {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
|
||||
.sidebar-create-folder-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
color: var(--lora-accent);
|
||||
font-size: 0.85em;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.sidebar-create-folder-content i {
|
||||
font-size: 1.5em;
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
/* Create folder input container */
|
||||
.sidebar-create-folder-input-container {
|
||||
position: absolute;
|
||||
bottom: 16px;
|
||||
left: 16px;
|
||||
right: 16px;
|
||||
padding: 12px;
|
||||
background: var(--bg-color);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--border-radius-xs);
|
||||
box-shadow: 0 3px 8px rgba(0, 0, 0, 0.15);
|
||||
z-index: 20;
|
||||
animation: slideUp 0.2s ease;
|
||||
}
|
||||
|
||||
@keyframes slideUp {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(10px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
.sidebar-create-folder-input-wrapper {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.sidebar-create-folder-input-wrapper > i {
|
||||
color: var(--lora-accent);
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
.sidebar-create-folder-input {
|
||||
flex: 1;
|
||||
padding: 6px 10px;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--border-radius-xs);
|
||||
background: var(--bg-color);
|
||||
color: var(--text-color);
|
||||
font-size: 0.85em;
|
||||
outline: none;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.sidebar-create-folder-input:focus {
|
||||
border-color: var(--lora-accent);
|
||||
box-shadow: 0 0 0 2px oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.15);
|
||||
}
|
||||
|
||||
.sidebar-create-folder-btn {
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border: none;
|
||||
border-radius: var(--border-radius-xs);
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
background: transparent;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.sidebar-create-folder-btn:hover {
|
||||
background: var(--lora-surface);
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.sidebar-create-folder-confirm:hover {
|
||||
background: oklch(from var(--success-color) l c h / 0.15);
|
||||
color: var(--success-color);
|
||||
}
|
||||
|
||||
.sidebar-create-folder-cancel:hover {
|
||||
background: oklch(from var(--error-color) l c h / 0.15);
|
||||
color: var(--error-color);
|
||||
}
|
||||
|
||||
.sidebar-create-folder-hint {
|
||||
margin-top: 6px;
|
||||
font-size: 0.75em;
|
||||
color: var(--text-muted);
|
||||
text-align: center;
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
/* Dragging state for sidebar */
|
||||
.folder-sidebar.dragging-active {
|
||||
border-color: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.5);
|
||||
box-shadow: 0 0 0 3px oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1),
|
||||
0 2px 8px rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
.folder-sidebar.dragging-active .sidebar-tree-container {
|
||||
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.02);
|
||||
}
|
||||
|
||||
/* Tree container positioning for create folder elements */
|
||||
.sidebar-tree-container {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
@@ -196,6 +196,9 @@
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
max-height: 400px;
|
||||
overflow-y: auto;
|
||||
padding-right: 4px;
|
||||
}
|
||||
|
||||
.model-item {
|
||||
|
||||
@@ -86,6 +86,7 @@ export function getApiEndpoints(modelType) {
|
||||
|
||||
// Preview management
|
||||
replacePreview: `/api/lm/${modelType}/replace-preview`,
|
||||
setPreviewFromUrl: `/api/lm/${modelType}/set-preview-from-url`,
|
||||
|
||||
// Query operations
|
||||
scan: `/api/lm/${modelType}/scan`,
|
||||
|
||||
@@ -307,6 +307,56 @@ export class BaseModelApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a preview from a remote URL (e.g., CivitAI)
|
||||
* @param {string} filePath - Path to the model file
|
||||
* @param {string} imageUrl - Remote image URL
|
||||
* @param {number} nsfwLevel - NSFW level for the preview
|
||||
*/
|
||||
async setPreviewFromUrl(filePath, imageUrl, nsfwLevel = 0) {
|
||||
try {
|
||||
state.loadingManager.showSimpleLoading('Setting preview from URL...');
|
||||
|
||||
const response = await fetch(this.apiConfig.endpoints.setPreviewFromUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model_path: filePath,
|
||||
image_url: imageUrl,
|
||||
nsfw_level: nsfwLevel
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to set preview from URL');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const pageState = this.getPageState();
|
||||
|
||||
const timestamp = Date.now();
|
||||
if (pageState.previewVersions) {
|
||||
pageState.previewVersions.set(filePath, timestamp);
|
||||
|
||||
const storageKey = `${this.modelType}_preview_versions`;
|
||||
saveMapToStorage(storageKey, pageState.previewVersions);
|
||||
}
|
||||
|
||||
const updateData = {
|
||||
preview_url: data.preview_url,
|
||||
preview_nsfw_level: data.preview_nsfw_level
|
||||
};
|
||||
|
||||
state.virtualScroller.updateSingleItem(filePath, updateData);
|
||||
showToast('toast.api.previewUpdated', {}, 'success');
|
||||
} catch (error) {
|
||||
console.error('Error setting preview from URL:', error);
|
||||
showToast('toast.api.previewUploadFailed', {}, 'error');
|
||||
} finally {
|
||||
state.loadingManager.hide();
|
||||
}
|
||||
}
|
||||
|
||||
async saveModelMetadata(filePath, data) {
|
||||
try {
|
||||
state.loadingManager.showSimpleLoading('Saving metadata...');
|
||||
|
||||
@@ -5,6 +5,7 @@ import { FilterManager } from '../managers/FilterManager.js';
|
||||
import { initPageState } from '../state/index.js';
|
||||
import { getStorageItem } from '../utils/storageHelpers.js';
|
||||
import { updateElementAttribute } from '../utils/i18nHelpers.js';
|
||||
import { renderSupporters } from '../services/supportersService.js';
|
||||
|
||||
/**
|
||||
* Header.js - Manages the application header behavior across different pages
|
||||
@@ -85,9 +86,15 @@ export class HeaderManager {
|
||||
// Handle support toggle
|
||||
const supportToggle = document.getElementById('supportToggleBtn');
|
||||
if (supportToggle) {
|
||||
supportToggle.addEventListener('click', () => {
|
||||
supportToggle.addEventListener('click', async () => {
|
||||
if (window.modalManager) {
|
||||
window.modalManager.toggleModal('supportModal');
|
||||
// Load supporters data when modal opens
|
||||
try {
|
||||
await renderSupporters();
|
||||
} catch (error) {
|
||||
console.error('Error loading supporters:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ class RecipeCard {
|
||||
createCardElement() {
|
||||
const card = document.createElement('div');
|
||||
card.className = 'model-card';
|
||||
card.draggable = true;
|
||||
card.dataset.filepath = this.recipe.file_path;
|
||||
card.dataset.title = this.recipe.title;
|
||||
card.dataset.nsfwLevel = this.recipe.preview_nsfw_level || 0;
|
||||
|
||||
@@ -29,11 +29,14 @@ export class SidebarManager {
|
||||
this.draggedRootPath = null;
|
||||
this.draggedFromBulk = false;
|
||||
this.dragHandlersInitialized = false;
|
||||
this.sidebarDragHandlersInitialized = false;
|
||||
this.folderTreeElement = null;
|
||||
this.currentDropTarget = null;
|
||||
this.lastPageControls = null;
|
||||
this.isDisabledBySetting = false;
|
||||
this.initializationPromise = null;
|
||||
this.isCreatingFolder = false;
|
||||
this._pendingDragState = null; // 用于保存拖拽创建文件夹时的状态
|
||||
|
||||
// Bind methods
|
||||
this.handleTreeClick = this.handleTreeClick.bind(this);
|
||||
@@ -56,6 +59,12 @@ export class SidebarManager {
|
||||
this.handleFolderDragOver = this.handleFolderDragOver.bind(this);
|
||||
this.handleFolderDragLeave = this.handleFolderDragLeave.bind(this);
|
||||
this.handleFolderDrop = this.handleFolderDrop.bind(this);
|
||||
this.handleSidebarDragEnter = this.handleSidebarDragEnter.bind(this);
|
||||
this.handleSidebarDragOver = this.handleSidebarDragOver.bind(this);
|
||||
this.handleSidebarDragLeave = this.handleSidebarDragLeave.bind(this);
|
||||
this.handleSidebarDrop = this.handleSidebarDrop.bind(this);
|
||||
this.handleCreateFolderSubmit = this.handleCreateFolderSubmit.bind(this);
|
||||
this.handleCreateFolderCancel = this.handleCreateFolderCancel.bind(this);
|
||||
}
|
||||
|
||||
setHostPageControls(pageControls) {
|
||||
@@ -118,19 +127,18 @@ export class SidebarManager {
|
||||
this.removeEventHandlers();
|
||||
|
||||
this.clearAllDropHighlights();
|
||||
if (this.dragHandlersInitialized) {
|
||||
document.removeEventListener('dragstart', this.handleCardDragStart);
|
||||
document.removeEventListener('dragend', this.handleCardDragEnd);
|
||||
this.dragHandlersInitialized = false;
|
||||
}
|
||||
if (this.folderTreeElement) {
|
||||
this.folderTreeElement.removeEventListener('dragenter', this.handleFolderDragEnter);
|
||||
this.folderTreeElement.removeEventListener('dragover', this.handleFolderDragOver);
|
||||
this.folderTreeElement.removeEventListener('dragleave', this.handleFolderDragLeave);
|
||||
this.folderTreeElement.removeEventListener('drop', this.handleFolderDrop);
|
||||
this.folderTreeElement = null;
|
||||
}
|
||||
this.resetDragState();
|
||||
this.hideCreateFolderInput();
|
||||
|
||||
// Cleanup sidebar drag handlers
|
||||
const sidebar = document.getElementById('folderSidebar');
|
||||
if (sidebar && this.sidebarDragHandlersInitialized) {
|
||||
sidebar.removeEventListener('dragenter', this.handleSidebarDragEnter);
|
||||
sidebar.removeEventListener('dragover', this.handleSidebarDragOver);
|
||||
sidebar.removeEventListener('dragleave', this.handleSidebarDragLeave);
|
||||
sidebar.removeEventListener('drop', this.handleSidebarDrop);
|
||||
this.sidebarDragHandlersInitialized = false;
|
||||
}
|
||||
|
||||
// Reset state
|
||||
this.pageControls = null;
|
||||
@@ -233,6 +241,16 @@ export class SidebarManager {
|
||||
|
||||
this.folderTreeElement = folderTree;
|
||||
}
|
||||
|
||||
// Add sidebar-level drag handlers for creating new folders
|
||||
const sidebar = document.getElementById('folderSidebar');
|
||||
if (sidebar && !this.sidebarDragHandlersInitialized) {
|
||||
sidebar.addEventListener('dragenter', this.handleSidebarDragEnter);
|
||||
sidebar.addEventListener('dragover', this.handleSidebarDragOver);
|
||||
sidebar.addEventListener('dragleave', this.handleSidebarDragLeave);
|
||||
sidebar.addEventListener('drop', this.handleSidebarDrop);
|
||||
this.sidebarDragHandlersInitialized = true;
|
||||
}
|
||||
}
|
||||
|
||||
handleCardDragStart(event) {
|
||||
@@ -271,6 +289,12 @@ export class SidebarManager {
|
||||
}
|
||||
|
||||
card.classList.add('dragging');
|
||||
|
||||
// Add dragging state to sidebar for visual feedback
|
||||
const sidebar = document.getElementById('folderSidebar');
|
||||
if (sidebar) {
|
||||
sidebar.classList.add('dragging-active');
|
||||
}
|
||||
}
|
||||
|
||||
handleCardDragEnd(event) {
|
||||
@@ -278,6 +302,13 @@ export class SidebarManager {
|
||||
if (card) {
|
||||
card.classList.remove('dragging');
|
||||
}
|
||||
|
||||
// Remove dragging state from sidebar
|
||||
const sidebar = document.getElementById('folderSidebar');
|
||||
if (sidebar) {
|
||||
sidebar.classList.remove('dragging-active');
|
||||
}
|
||||
|
||||
this.clearAllDropHighlights();
|
||||
this.resetDragState();
|
||||
}
|
||||
@@ -417,7 +448,12 @@ export class SidebarManager {
|
||||
}
|
||||
|
||||
async performDragMove(targetRelativePath) {
|
||||
console.log('[SidebarManager] performDragMove called with targetRelativePath:', targetRelativePath);
|
||||
console.log('[SidebarManager] draggedFilePaths:', this.draggedFilePaths);
|
||||
console.log('[SidebarManager] draggedRootPath:', this.draggedRootPath);
|
||||
|
||||
if (!this.draggedFilePaths || this.draggedFilePaths.length === 0) {
|
||||
console.log('[SidebarManager] performDragMove returning false - no draggedFilePaths');
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -428,12 +464,15 @@ export class SidebarManager {
|
||||
}
|
||||
|
||||
if (this.apiClient?.apiConfig?.config?.supportsMove === false) {
|
||||
console.log('[SidebarManager] performDragMove returning false - supportsMove is false');
|
||||
showToast('toast.models.moveFailed', { message: translate('sidebar.dragDrop.moveUnsupported', {}, 'Move not supported for this page') }, 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
const rootPath = this.draggedRootPath ? this.draggedRootPath.replace(/\\/g, '/') : '';
|
||||
console.log('[SidebarManager] rootPath:', rootPath);
|
||||
if (!rootPath) {
|
||||
console.log('[SidebarManager] performDragMove returning false - no rootPath');
|
||||
showToast(
|
||||
'toast.models.moveFailed',
|
||||
{ message: translate('sidebar.dragDrop.unableToResolveRoot', {}, 'Unable to determine destination path for move.') },
|
||||
@@ -446,15 +485,19 @@ export class SidebarManager {
|
||||
const useBulkMove = this.draggedFromBulk || this.draggedFilePaths.length > 1;
|
||||
|
||||
try {
|
||||
console.log('[SidebarManager] calling apiClient.move, useBulkMove:', useBulkMove);
|
||||
if (useBulkMove) {
|
||||
await this.apiClient.moveBulkModels(this.draggedFilePaths, destination);
|
||||
} else {
|
||||
await this.apiClient.moveSingleModel(this.draggedFilePaths[0], destination);
|
||||
}
|
||||
console.log('[SidebarManager] apiClient.move successful');
|
||||
|
||||
if (this.pageControls && typeof this.pageControls.resetAndReload === 'function') {
|
||||
console.log('[SidebarManager] calling resetAndReload');
|
||||
await this.pageControls.resetAndReload(true);
|
||||
} else {
|
||||
console.log('[SidebarManager] calling refresh');
|
||||
await this.refresh();
|
||||
}
|
||||
|
||||
@@ -462,10 +505,12 @@ export class SidebarManager {
|
||||
bulkManager.toggleBulkMode();
|
||||
}
|
||||
|
||||
console.log('[SidebarManager] performDragMove returning true');
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Error moving model(s) via drag-and-drop:', error);
|
||||
console.error('[SidebarManager] Error moving model(s) via drag-and-drop:', error);
|
||||
showToast('toast.models.moveFailed', { message: error.message || 'Unknown error' }, 'error');
|
||||
console.log('[SidebarManager] performDragMove returning false due to error');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -476,6 +521,365 @@ export class SidebarManager {
|
||||
this.draggedFromBulk = false;
|
||||
}
|
||||
|
||||
// Version of performDragMove that accepts state as parameters (for create folder submit)
|
||||
async performDragMoveWithState(targetRelativePath, draggedFilePaths, draggedRootPath, draggedFromBulk) {
|
||||
console.log('[SidebarManager] performDragMoveWithState called with:', { targetRelativePath, draggedFilePaths, draggedRootPath, draggedFromBulk });
|
||||
|
||||
if (!draggedFilePaths || draggedFilePaths.length === 0) {
|
||||
console.log('[SidebarManager] performDragMoveWithState returning false - no draggedFilePaths');
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!this.apiClient) {
|
||||
this.apiClient = this.pageControls?.getSidebarApiClient?.()
|
||||
|| this.pageControls?.sidebarApiClient
|
||||
|| getModelApiClient();
|
||||
}
|
||||
|
||||
if (this.apiClient?.apiConfig?.config?.supportsMove === false) {
|
||||
console.log('[SidebarManager] performDragMoveWithState returning false - supportsMove is false');
|
||||
showToast('toast.models.moveFailed', { message: translate('sidebar.dragDrop.moveUnsupported', {}, 'Move not supported for this page') }, 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
const rootPath = draggedRootPath ? draggedRootPath.replace(/\\/g, '/') : '';
|
||||
console.log('[SidebarManager] rootPath:', rootPath);
|
||||
if (!rootPath) {
|
||||
console.log('[SidebarManager] performDragMoveWithState returning false - no rootPath');
|
||||
showToast(
|
||||
'toast.models.moveFailed',
|
||||
{ message: translate('sidebar.dragDrop.unableToResolveRoot', {}, 'Unable to determine destination path for move.') },
|
||||
'error'
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
const destination = this.combineRootAndRelativePath(rootPath, targetRelativePath);
|
||||
const useBulkMove = draggedFromBulk || draggedFilePaths.length > 1;
|
||||
|
||||
try {
|
||||
console.log('[SidebarManager] calling apiClient.move, useBulkMove:', useBulkMove);
|
||||
if (useBulkMove) {
|
||||
await this.apiClient.moveBulkModels(draggedFilePaths, destination);
|
||||
} else {
|
||||
await this.apiClient.moveSingleModel(draggedFilePaths[0], destination);
|
||||
}
|
||||
console.log('[SidebarManager] apiClient.move successful');
|
||||
|
||||
if (this.pageControls && typeof this.pageControls.resetAndReload === 'function') {
|
||||
console.log('[SidebarManager] calling resetAndReload');
|
||||
await this.pageControls.resetAndReload(true);
|
||||
} else {
|
||||
console.log('[SidebarManager] calling refresh');
|
||||
await this.refresh();
|
||||
}
|
||||
|
||||
if (draggedFromBulk && state.bulkMode && typeof bulkManager?.toggleBulkMode === 'function') {
|
||||
bulkManager.toggleBulkMode();
|
||||
}
|
||||
|
||||
console.log('[SidebarManager] performDragMoveWithState returning true');
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('[SidebarManager] Error moving model(s) via drag-and-drop:', error);
|
||||
showToast('toast.models.moveFailed', { message: error.message || 'Unknown error' }, 'error');
|
||||
console.log('[SidebarManager] performDragMoveWithState returning false due to error');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ===== Sidebar-level drag handlers for creating new folders =====
|
||||
|
||||
handleSidebarDragEnter(event) {
|
||||
if (!this.draggedFilePaths || this.draggedFilePaths.length === 0) return;
|
||||
|
||||
const sidebar = document.getElementById('folderSidebar');
|
||||
if (!sidebar) return;
|
||||
|
||||
// Only show create folder zone if not hovering over an existing folder
|
||||
const folderElement = this.getFolderElementFromEvent(event);
|
||||
if (folderElement) {
|
||||
this.hideCreateFolderZone();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if drag is within the sidebar tree container area
|
||||
const treeContainer = document.querySelector('.sidebar-tree-container');
|
||||
if (treeContainer && treeContainer.contains(event.target)) {
|
||||
event.preventDefault();
|
||||
this.showCreateFolderZone();
|
||||
}
|
||||
}
|
||||
|
||||
handleSidebarDragOver(event) {
|
||||
if (!this.draggedFilePaths || this.draggedFilePaths.length === 0) return;
|
||||
|
||||
const folderElement = this.getFolderElementFromEvent(event);
|
||||
if (folderElement) {
|
||||
this.hideCreateFolderZone();
|
||||
return;
|
||||
}
|
||||
|
||||
const treeContainer = document.querySelector('.sidebar-tree-container');
|
||||
if (treeContainer && treeContainer.contains(event.target)) {
|
||||
event.preventDefault();
|
||||
if (event.dataTransfer) {
|
||||
event.dataTransfer.dropEffect = 'move';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handleSidebarDragLeave(event) {
|
||||
if (!this.draggedFilePaths || this.draggedFilePaths.length === 0) return;
|
||||
|
||||
const sidebar = document.getElementById('folderSidebar');
|
||||
if (!sidebar) return;
|
||||
|
||||
const relatedTarget = event.relatedTarget instanceof Element ? event.relatedTarget : null;
|
||||
|
||||
// Only hide if leaving the sidebar entirely
|
||||
if (!relatedTarget || !sidebar.contains(relatedTarget)) {
|
||||
this.hideCreateFolderZone();
|
||||
}
|
||||
}
|
||||
|
||||
async handleSidebarDrop(event) {
|
||||
if (!this.draggedFilePaths || this.draggedFilePaths.length === 0) return;
|
||||
|
||||
const folderElement = this.getFolderElementFromEvent(event);
|
||||
if (folderElement) {
|
||||
// Let the folder drop handler take over
|
||||
return;
|
||||
}
|
||||
|
||||
const treeContainer = document.querySelector('.sidebar-tree-container');
|
||||
if (!treeContainer || !treeContainer.contains(event.target)) {
|
||||
return;
|
||||
}
|
||||
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
|
||||
// Show create folder input
|
||||
this.showCreateFolderInput();
|
||||
}
|
||||
|
||||
showCreateFolderZone() {
|
||||
if (this.isCreatingFolder) return;
|
||||
|
||||
const treeContainer = document.querySelector('.sidebar-tree-container');
|
||||
if (!treeContainer) return;
|
||||
|
||||
let zone = document.getElementById('sidebarCreateFolderZone');
|
||||
if (!zone) {
|
||||
zone = document.createElement('div');
|
||||
zone.id = 'sidebarCreateFolderZone';
|
||||
zone.className = 'sidebar-create-folder-zone';
|
||||
zone.innerHTML = `
|
||||
<div class="sidebar-create-folder-content">
|
||||
<i class="fas fa-plus-circle"></i>
|
||||
<span>${translate('sidebar.dragDrop.createFolderHint', {}, 'Release to create new folder')}</span>
|
||||
</div>
|
||||
`;
|
||||
treeContainer.appendChild(zone);
|
||||
}
|
||||
|
||||
zone.classList.add('active');
|
||||
}
|
||||
|
||||
hideCreateFolderZone() {
|
||||
const zone = document.getElementById('sidebarCreateFolderZone');
|
||||
if (zone) {
|
||||
zone.classList.remove('active');
|
||||
}
|
||||
}
|
||||
|
||||
showCreateFolderInput() {
|
||||
console.log('[SidebarManager] showCreateFolderInput called');
|
||||
this.isCreatingFolder = true;
|
||||
|
||||
// 立即保存拖拽状态,防止后续事件(如blur)清空状态
|
||||
this._pendingDragState = {
|
||||
filePaths: this.draggedFilePaths ? [...this.draggedFilePaths] : null,
|
||||
rootPath: this.draggedRootPath,
|
||||
fromBulk: this.draggedFromBulk
|
||||
};
|
||||
console.log('[SidebarManager] saved pending drag state:', this._pendingDragState);
|
||||
|
||||
this.hideCreateFolderZone();
|
||||
|
||||
const treeContainer = document.querySelector('.sidebar-tree-container');
|
||||
if (!treeContainer) return;
|
||||
|
||||
// Remove existing input if any
|
||||
this.hideCreateFolderInput();
|
||||
|
||||
const inputContainer = document.createElement('div');
|
||||
inputContainer.id = 'sidebarCreateFolderInput';
|
||||
inputContainer.className = 'sidebar-create-folder-input-container';
|
||||
inputContainer.innerHTML = `
|
||||
<div class="sidebar-create-folder-input-wrapper">
|
||||
<i class="fas fa-folder-plus"></i>
|
||||
<input type="text"
|
||||
class="sidebar-create-folder-input"
|
||||
placeholder="${translate('sidebar.dragDrop.newFolderName', {}, 'New folder name')}"
|
||||
autofocus />
|
||||
<button class="sidebar-create-folder-btn sidebar-create-folder-confirm" title="${translate('common.confirm', {}, 'Confirm')}">
|
||||
<i class="fas fa-check"></i>
|
||||
</button>
|
||||
<button class="sidebar-create-folder-btn sidebar-create-folder-cancel" title="${translate('common.cancel', {}, 'Cancel')}">
|
||||
<i class="fas fa-times"></i>
|
||||
</button>
|
||||
</div>
|
||||
<div class="sidebar-create-folder-hint">
|
||||
${translate('sidebar.dragDrop.folderNameHint', {}, 'Press Enter to confirm, Escape to cancel')}
|
||||
</div>
|
||||
`;
|
||||
|
||||
treeContainer.appendChild(inputContainer);
|
||||
|
||||
// Focus input
|
||||
const input = inputContainer.querySelector('.sidebar-create-folder-input');
|
||||
if (input) {
|
||||
input.focus();
|
||||
}
|
||||
|
||||
// Bind events
|
||||
const confirmBtn = inputContainer.querySelector('.sidebar-create-folder-confirm');
|
||||
const cancelBtn = inputContainer.querySelector('.sidebar-create-folder-cancel');
|
||||
|
||||
// Flag to prevent blur from canceling when clicking buttons
|
||||
let isButtonClick = false;
|
||||
|
||||
confirmBtn?.addEventListener('mousedown', () => {
|
||||
isButtonClick = true;
|
||||
console.log('[SidebarManager] confirmBtn mousedown - isButtonClick set to true');
|
||||
});
|
||||
cancelBtn?.addEventListener('mousedown', () => {
|
||||
isButtonClick = true;
|
||||
console.log('[SidebarManager] cancelBtn mousedown - isButtonClick set to true');
|
||||
});
|
||||
|
||||
confirmBtn?.addEventListener('click', (e) => {
|
||||
console.log('[SidebarManager] confirmBtn click event triggered');
|
||||
this.handleCreateFolderSubmit();
|
||||
});
|
||||
cancelBtn?.addEventListener('click', () => {
|
||||
console.log('[SidebarManager] cancelBtn click event triggered');
|
||||
this.handleCreateFolderCancel();
|
||||
});
|
||||
input?.addEventListener('keydown', (e) => {
|
||||
console.log('[SidebarManager] input keydown:', e.key);
|
||||
if (e.key === 'Enter') {
|
||||
console.log('[SidebarManager] Enter pressed, calling handleCreateFolderSubmit');
|
||||
this.handleCreateFolderSubmit();
|
||||
} else if (e.key === 'Escape') {
|
||||
console.log('[SidebarManager] Escape pressed, calling handleCreateFolderCancel');
|
||||
this.handleCreateFolderCancel();
|
||||
}
|
||||
});
|
||||
input?.addEventListener('blur', () => {
|
||||
console.log('[SidebarManager] input blur event - isButtonClick:', isButtonClick);
|
||||
// Delay to allow button clicks to process first
|
||||
setTimeout(() => {
|
||||
console.log('[SidebarManager] blur timeout - isButtonClick:', isButtonClick, 'activeElement:', document.activeElement?.className);
|
||||
if (!isButtonClick && document.activeElement !== confirmBtn && document.activeElement !== cancelBtn) {
|
||||
console.log('[SidebarManager] blur timeout - calling handleCreateFolderCancel');
|
||||
this.handleCreateFolderCancel();
|
||||
} else {
|
||||
console.log('[SidebarManager] blur timeout - NOT canceling (button click detected)');
|
||||
}
|
||||
isButtonClick = false;
|
||||
}, 200);
|
||||
});
|
||||
}
|
||||
|
||||
hideCreateFolderInput() {
|
||||
console.log('[SidebarManager] hideCreateFolderInput called');
|
||||
const inputContainer = document.getElementById('sidebarCreateFolderInput');
|
||||
console.log('[SidebarManager] inputContainer:', inputContainer);
|
||||
if (inputContainer) {
|
||||
inputContainer.remove();
|
||||
console.log('[SidebarManager] inputContainer removed');
|
||||
}
|
||||
this.isCreatingFolder = false;
|
||||
console.log('[SidebarManager] isCreatingFolder set to false');
|
||||
}
|
||||
|
||||
async handleCreateFolderSubmit() {
|
||||
console.log('[SidebarManager] handleCreateFolderSubmit called');
|
||||
const input = document.querySelector('#sidebarCreateFolderInput .sidebar-create-folder-input');
|
||||
console.log('[SidebarManager] input element:', input);
|
||||
if (!input) {
|
||||
console.log('[SidebarManager] input not found, returning');
|
||||
return;
|
||||
}
|
||||
|
||||
const folderName = input.value.trim();
|
||||
console.log('[SidebarManager] folderName:', folderName);
|
||||
if (!folderName) {
|
||||
showToast('sidebar.dragDrop.emptyFolderName', {}, 'warning');
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate folder name (no slashes, no special chars)
|
||||
if (/[\\/:*?"<>|]/.test(folderName)) {
|
||||
showToast('sidebar.dragDrop.invalidFolderName', {}, 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
// Build target path - use selected path as parent, or root if none selected
|
||||
const parentPath = this.selectedPath || '';
|
||||
const targetRelativePath = parentPath ? `${parentPath}/${folderName}` : folderName;
|
||||
console.log('[SidebarManager] targetRelativePath:', targetRelativePath);
|
||||
|
||||
// 使用 showCreateFolderInput 时保存的拖拽状态
|
||||
const pendingState = this._pendingDragState;
|
||||
console.log('[SidebarManager] using pending drag state:', pendingState);
|
||||
|
||||
if (!pendingState || !pendingState.filePaths || pendingState.filePaths.length === 0) {
|
||||
console.log('[SidebarManager] no pending drag state found, cannot proceed');
|
||||
showToast('sidebar.dragDrop.noDragState', {}, 'error');
|
||||
this.hideCreateFolderInput();
|
||||
return;
|
||||
}
|
||||
|
||||
this.hideCreateFolderInput();
|
||||
|
||||
// Perform the move with saved state
|
||||
console.log('[SidebarManager] calling performDragMove with pending state');
|
||||
const success = await this.performDragMoveWithState(targetRelativePath, pendingState.filePaths, pendingState.rootPath, pendingState.fromBulk);
|
||||
console.log('[SidebarManager] performDragMove result:', success);
|
||||
|
||||
if (success) {
|
||||
// Expand the parent folder to show the new folder
|
||||
if (parentPath) {
|
||||
this.expandedNodes.add(parentPath);
|
||||
this.saveExpandedState();
|
||||
}
|
||||
// Refresh the tree to show the newly created folder
|
||||
// restoreSelectedFolder() inside refresh() will maintain the current active folder
|
||||
await this.refresh();
|
||||
}
|
||||
|
||||
// 清理待处理的拖拽状态
|
||||
this._pendingDragState = null;
|
||||
this.resetDragState();
|
||||
this.clearAllDropHighlights();
|
||||
}
|
||||
|
||||
handleCreateFolderCancel() {
|
||||
this.hideCreateFolderInput();
|
||||
// 清理待处理的拖拽状态
|
||||
this._pendingDragState = null;
|
||||
this.resetDragState();
|
||||
this.clearAllDropHighlights();
|
||||
}
|
||||
|
||||
saveSelectedFolder() {
|
||||
setStorageItem(`${this.pageType}_activeFolder`, this.selectedPath);
|
||||
}
|
||||
|
||||
clearAllDropHighlights() {
|
||||
const highlighted = document.querySelectorAll('.sidebar-tree-node-content.drop-target, .sidebar-node-content.drop-target');
|
||||
highlighted.forEach((element) => element.classList.remove('drop-target'));
|
||||
@@ -917,7 +1321,11 @@ export class SidebarManager {
|
||||
folderTree.innerHTML = `
|
||||
<div class="sidebar-tree-placeholder">
|
||||
<i class="fas fa-folder-open"></i>
|
||||
<div>No folders found</div>
|
||||
<div>${translate('sidebar.empty.noFolders', {}, 'No folders found')}</div>
|
||||
<div class="sidebar-empty-hint">
|
||||
<i class="fas fa-hand-pointer"></i>
|
||||
${translate('sidebar.empty.dragHint', {}, 'Drag items here to create folders')}
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
@@ -26,8 +26,7 @@ export function generateVideoWrapper(media, heightPercent, shouldBlur, nsfwText,
|
||||
</button>
|
||||
` : ''}
|
||||
${mediaControlsHtml}
|
||||
<video controls autoplay muted loop crossorigin="anonymous"
|
||||
referrerpolicy="no-referrer"
|
||||
<video controls autoplay muted loop
|
||||
data-local-src="${localUrl || ''}"
|
||||
data-remote-src="${remoteUrl}"
|
||||
data-nsfw-level="${nsfwLevel}"
|
||||
|
||||
@@ -527,17 +527,18 @@ function initSetPreviewHandlers(container) {
|
||||
const response = await fetch(mediaElement.dataset.localSrc);
|
||||
const blob = await response.blob();
|
||||
const file = new File([blob], 'preview.jpg', { type: blob.type });
|
||||
|
||||
|
||||
// Use the existing baseModelApi uploadPreview method with nsfw level
|
||||
await apiClient.uploadPreview(modelFilePath, file, modelType, nsfwLevel);
|
||||
await apiClient.uploadPreview(modelFilePath, file, nsfwLevel);
|
||||
} else {
|
||||
// We need to download the remote file first
|
||||
const response = await fetch(mediaElement.src);
|
||||
const blob = await response.blob();
|
||||
const file = new File([blob], 'preview.jpg', { type: blob.type });
|
||||
|
||||
// Use the existing baseModelApi uploadPreview method with nsfw level
|
||||
await apiClient.uploadPreview(modelFilePath, file, modelType, nsfwLevel);
|
||||
// Remote file - send URL to backend to download (avoids CORS issues)
|
||||
const imageUrl = mediaElement.src || mediaElement.dataset.remoteSrc;
|
||||
if (!imageUrl) {
|
||||
throw new Error('No image URL available');
|
||||
}
|
||||
|
||||
// Use the new setPreviewFromUrl method
|
||||
await apiClient.setPreviewFromUrl(modelFilePath, imageUrl, nsfwLevel);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error setting preview:', error);
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
} from './MediaUtils.js';
|
||||
import { generateMetadataPanel } from './MetadataPanel.js';
|
||||
import { generateImageWrapper, generateVideoWrapper } from './MediaRenderers.js';
|
||||
import { getShowcaseUrl } from '../../../utils/civitaiUtils.js';
|
||||
|
||||
export const showcaseListenerMetrics = {
|
||||
wheelListeners: 0,
|
||||
@@ -61,8 +62,14 @@ export async function loadExampleImages(images, modelHash) {
|
||||
|
||||
// Re-initialize the showcase event listeners
|
||||
const carousel = showcaseTab.querySelector('.carousel');
|
||||
if (carousel && !carousel.classList.contains('collapsed')) {
|
||||
initShowcaseContent(carousel);
|
||||
if (carousel) {
|
||||
// Always bind scroll-indicator click events (even when collapsed)
|
||||
bindScrollIndicatorEvents(carousel);
|
||||
|
||||
// Only initialize full showcase content when expanded
|
||||
if (!carousel.classList.contains('collapsed')) {
|
||||
initShowcaseContent(carousel);
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize the example import functionality
|
||||
@@ -151,11 +158,19 @@ export function renderShowcaseContent(images, exampleFiles = [], startExpanded =
|
||||
function renderMediaItem(img, index, exampleFiles) {
|
||||
// Find matching file in our list of actual files
|
||||
let localFile = findLocalFile(img, index, exampleFiles);
|
||||
|
||||
const remoteUrl = img.url || '';
|
||||
|
||||
// Get original remote URL
|
||||
const originalRemoteUrl = img.url || '';
|
||||
|
||||
// Determine media type for optimization
|
||||
const isVideo = localFile ? localFile.is_video :
|
||||
originalRemoteUrl.endsWith('.mp4') || originalRemoteUrl.endsWith('.webm');
|
||||
const mediaType = isVideo ? 'video' : 'image';
|
||||
|
||||
// Optimize CivitAI URLs for showcase display (full quality)
|
||||
const remoteUrl = getShowcaseUrl(originalRemoteUrl, mediaType);
|
||||
|
||||
const localUrl = localFile ? localFile.path : '';
|
||||
const isVideo = localFile ? localFile.is_video :
|
||||
remoteUrl.endsWith('.mp4') || remoteUrl.endsWith('.webm');
|
||||
|
||||
// Calculate appropriate aspect ratio
|
||||
const aspectRatio = (img.height / img.width) * 100;
|
||||
@@ -576,6 +591,41 @@ export function toggleShowcase(element) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bind scroll-indicator click events (works even when carousel is collapsed)
|
||||
* @param {HTMLElement} carousel - The carousel element
|
||||
*/
|
||||
function bindScrollIndicatorEvents(carousel) {
|
||||
if (!carousel) return;
|
||||
|
||||
const scrollIndicator = carousel.previousElementSibling;
|
||||
if (scrollIndicator && scrollIndicator.classList.contains('scroll-indicator')) {
|
||||
// Remove previous listeners to avoid duplicates
|
||||
scrollIndicator.onclick = null;
|
||||
scrollIndicator.removeEventListener('click', scrollIndicator._leftClickHandler);
|
||||
scrollIndicator.removeEventListener('mousedown', scrollIndicator._middleClickHandler);
|
||||
|
||||
// Handler for left-click (button 0) - uses 'click' event
|
||||
scrollIndicator._leftClickHandler = (event) => {
|
||||
if (event.button === 0) {
|
||||
event.preventDefault();
|
||||
toggleShowcase(scrollIndicator);
|
||||
}
|
||||
};
|
||||
|
||||
// Handler for middle-click (button 1) - uses 'mousedown' event
|
||||
scrollIndicator._middleClickHandler = (event) => {
|
||||
if (event.button === 1) {
|
||||
event.preventDefault();
|
||||
toggleShowcase(scrollIndicator);
|
||||
}
|
||||
};
|
||||
|
||||
scrollIndicator.addEventListener('click', scrollIndicator._leftClickHandler);
|
||||
scrollIndicator.addEventListener('mousedown', scrollIndicator._middleClickHandler);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize all showcase content interactions
|
||||
* @param {HTMLElement} carousel - The carousel element
|
||||
@@ -589,15 +639,8 @@ export function initShowcaseContent(carousel) {
|
||||
initMediaControlHandlers(carousel);
|
||||
positionAllMediaControls(carousel);
|
||||
|
||||
// Bind scroll-indicator click to toggleShowcase
|
||||
const scrollIndicator = carousel.previousElementSibling;
|
||||
if (scrollIndicator && scrollIndicator.classList.contains('scroll-indicator')) {
|
||||
// Remove previous click listeners to avoid duplicates
|
||||
scrollIndicator.onclick = null;
|
||||
scrollIndicator.removeEventListener('click', scrollIndicator._toggleShowcaseHandler);
|
||||
scrollIndicator._toggleShowcaseHandler = () => toggleShowcase(scrollIndicator);
|
||||
scrollIndicator.addEventListener('click', scrollIndicator._toggleShowcaseHandler);
|
||||
}
|
||||
// Bind scroll-indicator click events
|
||||
bindScrollIndicatorEvents(carousel);
|
||||
|
||||
// Add window resize handler
|
||||
const resizeHandler = () => positionAllMediaControls(carousel);
|
||||
|
||||
@@ -8,6 +8,22 @@ export class LoadingManager {
|
||||
return LoadingManager.instance;
|
||||
}
|
||||
|
||||
// Delay DOM creation until first use to ensure i18n is ready
|
||||
this._initialized = false;
|
||||
this.overlay = null;
|
||||
this.loadingContent = null;
|
||||
this.progressBar = null;
|
||||
this.statusText = null;
|
||||
this.cancelButton = null;
|
||||
this.onCancelCallback = null;
|
||||
this.detailsContainer = null;
|
||||
|
||||
LoadingManager.instance = this;
|
||||
}
|
||||
|
||||
_ensureInitialized() {
|
||||
if (this._initialized) return;
|
||||
|
||||
this.overlay = document.getElementById('loading-overlay');
|
||||
|
||||
if (!this.overlay) {
|
||||
@@ -53,7 +69,6 @@ export class LoadingManager {
|
||||
this.loadingContent.appendChild(this.cancelButton);
|
||||
}
|
||||
|
||||
this.onCancelCallback = null;
|
||||
this.cancelButton.onclick = () => {
|
||||
if (this.onCancelCallback) {
|
||||
this.onCancelCallback();
|
||||
@@ -62,12 +77,11 @@ export class LoadingManager {
|
||||
}
|
||||
};
|
||||
|
||||
this.detailsContainer = null; // Will be created when needed
|
||||
|
||||
LoadingManager.instance = this;
|
||||
this._initialized = true;
|
||||
}
|
||||
|
||||
show(message = 'Loading...', progress = 0) {
|
||||
this._ensureInitialized();
|
||||
this.overlay.style.display = 'flex';
|
||||
this.setProgress(progress);
|
||||
this.setStatus(message);
|
||||
@@ -77,21 +91,25 @@ export class LoadingManager {
|
||||
}
|
||||
|
||||
hide() {
|
||||
if (!this._initialized) return;
|
||||
this.overlay.style.display = 'none';
|
||||
this.reset();
|
||||
this.removeDetailsContainer();
|
||||
}
|
||||
|
||||
setProgress(percent) {
|
||||
if (!this._initialized) return;
|
||||
this.progressBar.style.width = `${percent}%`;
|
||||
this.progressBar.setAttribute('aria-valuenow', percent);
|
||||
}
|
||||
|
||||
setStatus(message) {
|
||||
if (!this._initialized) return;
|
||||
this.statusText.textContent = message;
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (!this._initialized) return;
|
||||
this.setProgress(0);
|
||||
this.setStatus('');
|
||||
this.removeDetailsContainer();
|
||||
@@ -100,6 +118,7 @@ export class LoadingManager {
|
||||
}
|
||||
|
||||
showCancelButton(onCancel) {
|
||||
this._ensureInitialized();
|
||||
if (this.cancelButton) {
|
||||
this.onCancelCallback = onCancel;
|
||||
this.cancelButton.style.display = 'flex';
|
||||
@@ -109,6 +128,7 @@ export class LoadingManager {
|
||||
}
|
||||
|
||||
hideCancelButton() {
|
||||
if (!this._initialized) return;
|
||||
if (this.cancelButton) {
|
||||
this.cancelButton.style.display = 'none';
|
||||
this.onCancelCallback = null;
|
||||
@@ -117,6 +137,7 @@ export class LoadingManager {
|
||||
|
||||
// Create a details container for enhanced progress display
|
||||
createDetailsContainer() {
|
||||
this._ensureInitialized();
|
||||
// Remove existing container if any
|
||||
this.removeDetailsContainer();
|
||||
|
||||
@@ -332,12 +353,14 @@ export class LoadingManager {
|
||||
}
|
||||
|
||||
showSimpleLoading(message = 'Loading...') {
|
||||
this._ensureInitialized();
|
||||
this.overlay.style.display = 'flex';
|
||||
this.progressBar.style.display = 'none';
|
||||
this.setStatus(message);
|
||||
}
|
||||
|
||||
restoreProgressBar() {
|
||||
if (!this._initialized) return;
|
||||
this.progressBar.style.display = 'block';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,6 +88,11 @@ class MoveManager {
|
||||
folderPathInput.value = '';
|
||||
}
|
||||
|
||||
// Reset folder tree selection
|
||||
if (this.folderTreeManager) {
|
||||
this.folderTreeManager.clearSelection();
|
||||
}
|
||||
|
||||
try {
|
||||
// Fetch model roots
|
||||
const modelRootSelect = document.getElementById('moveModelRoot');
|
||||
@@ -286,6 +291,9 @@ class MoveManager {
|
||||
|
||||
if (recursive) {
|
||||
// Visible if it's in activeFolder or any subfolder
|
||||
// Special case for root: if activeFolder is empty, everything is visible in recursive mode
|
||||
if (normalizedActive === '') return true;
|
||||
|
||||
return normalizedRelative === normalizedActive ||
|
||||
normalizedRelative.startsWith(normalizedActive + '/');
|
||||
} else {
|
||||
@@ -305,7 +313,7 @@ class MoveManager {
|
||||
}
|
||||
|
||||
// Get selected folder path from folder tree manager
|
||||
const targetFolder = this.folderTreeManager.getSelectedPath();
|
||||
const targetFolder = this.useDefaultPath ? '' : this.folderTreeManager.getSelectedPath();
|
||||
|
||||
let targetPath = selectedRoot;
|
||||
if (targetFolder) {
|
||||
@@ -315,81 +323,31 @@ class MoveManager {
|
||||
try {
|
||||
if (this.bulkFilePaths) {
|
||||
// Bulk move mode
|
||||
const results = await apiClient.moveBulkModels(this.bulkFilePaths, targetPath, this.useDefaultPath);
|
||||
|
||||
// Update virtual scroller visibility/metadata
|
||||
const pageState = getCurrentPageState();
|
||||
if (state.virtualScroller) {
|
||||
results.forEach(result => {
|
||||
if (result.success) {
|
||||
// Deselect moving item
|
||||
bulkManager.deselectItem(result.original_file_path);
|
||||
|
||||
const newRelativeFolder = this._getRelativeFolder(result.new_file_path);
|
||||
const isVisible = this._isModelVisible(newRelativeFolder, pageState);
|
||||
|
||||
if (!isVisible) {
|
||||
state.virtualScroller.removeItemByFilePath(result.original_file_path);
|
||||
} else {
|
||||
const newFileNameWithExt = result.new_file_path.substring(result.new_file_path.lastIndexOf('/') + 1);
|
||||
const baseFileName = newFileNameWithExt.substring(0, newFileNameWithExt.lastIndexOf('.'));
|
||||
|
||||
const updateData = {
|
||||
file_path: result.new_file_path,
|
||||
file_name: baseFileName,
|
||||
folder: newRelativeFolder
|
||||
};
|
||||
|
||||
// Only update sub_type if it's present in the cache_entry
|
||||
if (result.cache_entry && result.cache_entry.sub_type) {
|
||||
updateData.sub_type = result.cache_entry.sub_type;
|
||||
}
|
||||
|
||||
state.virtualScroller.updateSingleItem(result.original_file_path, updateData);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
await apiClient.moveBulkModels(this.bulkFilePaths, targetPath, this.useDefaultPath);
|
||||
|
||||
// Deselect moving items
|
||||
this.bulkFilePaths.forEach(path => bulkManager.deselectItem(path));
|
||||
} else {
|
||||
// Single move mode
|
||||
const result = await apiClient.moveSingleModel(this.currentFilePath, targetPath, this.useDefaultPath);
|
||||
|
||||
const pageState = getCurrentPageState();
|
||||
if (result && result.new_file_path && state.virtualScroller) {
|
||||
// Deselect moving item
|
||||
bulkManager.deselectItem(this.currentFilePath);
|
||||
|
||||
const newRelativeFolder = this._getRelativeFolder(result.new_file_path);
|
||||
const isVisible = this._isModelVisible(newRelativeFolder, pageState);
|
||||
|
||||
if (!isVisible) {
|
||||
state.virtualScroller.removeItemByFilePath(this.currentFilePath);
|
||||
} else {
|
||||
const newFileNameWithExt = result.new_file_path.substring(result.new_file_path.lastIndexOf('/') + 1);
|
||||
const baseFileName = newFileNameWithExt.substring(0, newFileNameWithExt.lastIndexOf('.'));
|
||||
|
||||
const updateData = {
|
||||
file_path: result.new_file_path,
|
||||
file_name: baseFileName,
|
||||
folder: newRelativeFolder
|
||||
};
|
||||
|
||||
// Only update sub_type if it's present in the cache_entry
|
||||
if (result.cache_entry && result.cache_entry.sub_type) {
|
||||
updateData.sub_type = result.cache_entry.sub_type;
|
||||
}
|
||||
|
||||
state.virtualScroller.updateSingleItem(this.currentFilePath, updateData);
|
||||
}
|
||||
}
|
||||
await apiClient.moveSingleModel(this.currentFilePath, targetPath, this.useDefaultPath);
|
||||
|
||||
// Deselect moving item
|
||||
bulkManager.deselectItem(this.currentFilePath);
|
||||
}
|
||||
|
||||
// Refresh folder tags after successful move
|
||||
sidebarManager.refresh();
|
||||
// Refresh UI by reloading the current page, same as drag-and-drop behavior
|
||||
// This ensures all metadata (like preview URLs) are correctly formatted by the backend
|
||||
if (sidebarManager.pageControls && typeof sidebarManager.pageControls.resetAndReload === 'function') {
|
||||
await sidebarManager.pageControls.resetAndReload(true);
|
||||
} else if (sidebarManager.lastPageControls && typeof sidebarManager.lastPageControls.resetAndReload === 'function') {
|
||||
await sidebarManager.lastPageControls.resetAndReload(true);
|
||||
}
|
||||
|
||||
// Refresh folder tree in sidebar
|
||||
await sidebarManager.refresh();
|
||||
|
||||
modalManager.closeModal('moveModal');
|
||||
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error moving model(s):', error);
|
||||
showToast('toast.models.moveFailed', { message: error.message }, 'error');
|
||||
|
||||
209
static/js/services/supportersService.js
Normal file
209
static/js/services/supportersService.js
Normal file
@@ -0,0 +1,209 @@
|
||||
/**
|
||||
* Supporters service - Fetches and manages supporters data
|
||||
*/
|
||||
|
||||
let supportersData = null;
|
||||
let isLoading = false;
|
||||
let loadPromise = null;
|
||||
|
||||
/**
|
||||
* Fetch supporters data from the API
|
||||
* @returns {Promise<Object>} Supporters data
|
||||
*/
|
||||
export async function fetchSupporters() {
|
||||
// Return cached data if available
|
||||
if (supportersData) {
|
||||
return supportersData;
|
||||
}
|
||||
|
||||
// Return existing promise if already loading
|
||||
if (isLoading && loadPromise) {
|
||||
return loadPromise;
|
||||
}
|
||||
|
||||
isLoading = true;
|
||||
loadPromise = fetch('/api/lm/supporters')
|
||||
.then(response => {
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch supporters: ${response.statusText}`);
|
||||
}
|
||||
return response.json();
|
||||
})
|
||||
.then(data => {
|
||||
if (data.success && data.supporters) {
|
||||
supportersData = data.supporters;
|
||||
return supportersData;
|
||||
}
|
||||
throw new Error(data.error || 'Failed to load supporters data');
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error loading supporters:', error);
|
||||
// Return empty data on error
|
||||
return {
|
||||
specialThanks: [],
|
||||
allSupporters: [],
|
||||
totalCount: 0
|
||||
};
|
||||
})
|
||||
.finally(() => {
|
||||
isLoading = false;
|
||||
loadPromise = null;
|
||||
});
|
||||
|
||||
return loadPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached supporters data
|
||||
*/
|
||||
export function clearSupportersCache() {
|
||||
supportersData = null;
|
||||
}
|
||||
|
||||
let autoScrollRequest = null;
|
||||
let autoScrollTimeout = null;
|
||||
let isUserInteracting = false;
|
||||
let isHovering = false;
|
||||
let currentScrollPos = 0;
|
||||
|
||||
/**
|
||||
* Handle user interaction to stop auto-scroll
|
||||
*/
|
||||
function handleInteraction() {
|
||||
isUserInteracting = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle mouse enter to pause auto-scroll
|
||||
*/
|
||||
function handleMouseEnter() {
|
||||
isHovering = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle mouse leave to resume auto-scroll
|
||||
*/
|
||||
function handleMouseLeave() {
|
||||
isHovering = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize auto-scrolling for the supporters list like movie credits
|
||||
* @param {HTMLElement} container The scrollable container
|
||||
*/
|
||||
function initAutoScroll(container) {
|
||||
if (!container) return;
|
||||
|
||||
// Stop any existing animation and clear any pending timeout
|
||||
if (autoScrollRequest) {
|
||||
cancelAnimationFrame(autoScrollRequest);
|
||||
autoScrollRequest = null;
|
||||
}
|
||||
if (autoScrollTimeout) {
|
||||
clearTimeout(autoScrollTimeout);
|
||||
autoScrollTimeout = null;
|
||||
}
|
||||
|
||||
// Reset state for new scroll
|
||||
isUserInteracting = false;
|
||||
isHovering = false;
|
||||
container.scrollTop = 0;
|
||||
currentScrollPos = 0;
|
||||
|
||||
const scrollSpeed = 0.4; // Pixels per frame (~24px/sec at 60fps)
|
||||
|
||||
const step = () => {
|
||||
// Stop animation if container is hidden or no longer in DOM
|
||||
if (!container.offsetParent) {
|
||||
autoScrollRequest = null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isHovering && !isUserInteracting) {
|
||||
const prevScrollTop = container.scrollTop;
|
||||
currentScrollPos += scrollSpeed;
|
||||
container.scrollTop = currentScrollPos;
|
||||
|
||||
// Check if we reached the bottom
|
||||
if (container.scrollTop === prevScrollTop && currentScrollPos > 1) {
|
||||
const isAtBottom = container.scrollTop + container.clientHeight >= container.scrollHeight - 1;
|
||||
if (isAtBottom) {
|
||||
autoScrollRequest = null;
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Keep currentScrollPos in sync if user scrolls manually or pauses
|
||||
currentScrollPos = container.scrollTop;
|
||||
}
|
||||
|
||||
autoScrollRequest = requestAnimationFrame(step);
|
||||
};
|
||||
|
||||
// Remove existing listeners before adding to avoid duplicates
|
||||
container.removeEventListener('mouseenter', handleMouseEnter);
|
||||
container.removeEventListener('mouseleave', handleMouseLeave);
|
||||
container.removeEventListener('wheel', handleInteraction);
|
||||
container.removeEventListener('touchstart', handleInteraction);
|
||||
container.removeEventListener('mousedown', handleInteraction);
|
||||
|
||||
// Event listeners to handle user control
|
||||
container.addEventListener('mouseenter', handleMouseEnter);
|
||||
container.addEventListener('mouseleave', handleMouseLeave);
|
||||
|
||||
// Use { passive: true } for better scroll performance
|
||||
container.addEventListener('wheel', handleInteraction, { passive: true });
|
||||
container.addEventListener('touchstart', handleInteraction, { passive: true });
|
||||
container.addEventListener('mousedown', handleInteraction);
|
||||
|
||||
// Initial delay before starting the credits-style scroll
|
||||
autoScrollTimeout = setTimeout(() => {
|
||||
if (container.scrollHeight > container.clientHeight) {
|
||||
autoScrollRequest = requestAnimationFrame(step);
|
||||
}
|
||||
}, 1800);
|
||||
}
|
||||
|
||||
/**
|
||||
* Render supporters in the support modal
|
||||
*/
|
||||
export async function renderSupporters() {
|
||||
const supporters = await fetchSupporters();
|
||||
|
||||
// Update subtitle with total count
|
||||
const subtitleEl = document.getElementById('supportersSubtitle');
|
||||
if (subtitleEl) {
|
||||
const originalText = subtitleEl.textContent;
|
||||
subtitleEl.textContent = originalText.replace(/\d+/, supporters.totalCount);
|
||||
}
|
||||
|
||||
// Render special thanks
|
||||
const specialThanksGrid = document.getElementById('specialThanksGrid');
|
||||
if (specialThanksGrid && supporters.specialThanks) {
|
||||
specialThanksGrid.innerHTML = supporters.specialThanks
|
||||
.map(supporter => `
|
||||
<div class="supporter-special-card" title="${supporter}">
|
||||
<span class="supporter-special-name">${supporter}</span>
|
||||
</div>
|
||||
`)
|
||||
.join('');
|
||||
}
|
||||
|
||||
// Render all supporters
|
||||
const supportersGrid = document.getElementById('supportersGrid');
|
||||
if (supportersGrid && supporters.allSupporters) {
|
||||
supportersGrid.innerHTML = supporters.allSupporters
|
||||
.map((supporter, index, array) => {
|
||||
const separator = index < array.length - 1
|
||||
? '<span class="supporter-separator">·</span>'
|
||||
: '';
|
||||
return `
|
||||
<span class="supporter-name-item" title="${supporter}">${supporter}</span>${separator}
|
||||
`;
|
||||
})
|
||||
.join('');
|
||||
|
||||
// Initialize the auto-scroll effect
|
||||
initAutoScroll(supportersGrid);
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,11 @@ export class StatisticsManager {
|
||||
this.charts = {};
|
||||
this.data = {};
|
||||
this.initialized = false;
|
||||
this.listStates = {
|
||||
lora: { offset: 0, limit: 50, sort: 'desc', isLoading: false, hasMore: true },
|
||||
checkpoint: { offset: 0, limit: 50, sort: 'desc', isLoading: false, hasMore: true },
|
||||
embedding: { offset: 0, limit: 50, sort: 'desc', isLoading: false, hasMore: true }
|
||||
};
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
@@ -24,7 +29,7 @@ export class StatisticsManager {
|
||||
await this.loadAllData();
|
||||
|
||||
// Initialize charts and visualizations
|
||||
this.initializeVisualizations();
|
||||
await this.initializeVisualizations();
|
||||
|
||||
this.initialized = true;
|
||||
}
|
||||
@@ -97,7 +102,7 @@ export class StatisticsManager {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
initializeVisualizations() {
|
||||
async initializeVisualizations() {
|
||||
// Initialize metrics cards
|
||||
this.renderMetricsCards();
|
||||
|
||||
@@ -105,7 +110,8 @@ export class StatisticsManager {
|
||||
this.initializeCharts();
|
||||
|
||||
// Initialize lists and other components
|
||||
this.renderTopModelsLists();
|
||||
await this.initializeLists();
|
||||
this.renderLargestModelsList();
|
||||
this.renderTagCloud();
|
||||
this.renderInsights();
|
||||
}
|
||||
@@ -548,86 +554,87 @@ export class StatisticsManager {
|
||||
});
|
||||
}
|
||||
|
||||
renderTopModelsLists() {
|
||||
this.renderTopLorasList();
|
||||
this.renderTopCheckpointsList();
|
||||
this.renderTopEmbeddingsList();
|
||||
this.renderLargestModelsList();
|
||||
async initializeLists() {
|
||||
const listTypes = [
|
||||
{ type: 'lora', containerId: 'topLorasList' },
|
||||
{ type: 'checkpoint', containerId: 'topCheckpointsList' },
|
||||
{ type: 'embedding', containerId: 'topEmbeddingsList' }
|
||||
];
|
||||
|
||||
const promises = listTypes.map(({ type, containerId }) => {
|
||||
const container = document.getElementById(containerId);
|
||||
|
||||
if (container) {
|
||||
// Handle infinite scrolling
|
||||
container.addEventListener('scroll', () => {
|
||||
if (container.scrollTop + container.clientHeight >= container.scrollHeight - 50) {
|
||||
this.fetchAndRenderList(type, container);
|
||||
}
|
||||
});
|
||||
|
||||
// Initial fetch
|
||||
return this.fetchAndRenderList(type, container);
|
||||
}
|
||||
return Promise.resolve();
|
||||
});
|
||||
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
renderTopLorasList() {
|
||||
const container = document.getElementById('topLorasList');
|
||||
if (!container || !this.data.usage?.top_loras) return;
|
||||
async fetchAndRenderList(type, container) {
|
||||
const state = this.listStates[type];
|
||||
if (state.isLoading || !state.hasMore) return;
|
||||
|
||||
const topLoras = this.data.usage.top_loras;
|
||||
state.isLoading = true;
|
||||
|
||||
if (topLoras.length === 0) {
|
||||
container.innerHTML = '<div class="loading-placeholder">No usage data available</div>';
|
||||
return;
|
||||
// Show loading indicator on initial load
|
||||
if (state.offset === 0) {
|
||||
container.innerHTML = '<div class="loading-placeholder"><i class="fas fa-spinner fa-spin"></i> Loading...</div>';
|
||||
}
|
||||
|
||||
container.innerHTML = topLoras.map(lora => `
|
||||
<div class="model-item">
|
||||
<img src="${lora.preview_url || '/loras_static/images/no-preview.png'}"
|
||||
alt="${lora.name}" class="model-preview"
|
||||
onerror="this.src='/loras_static/images/no-preview.png'">
|
||||
<div class="model-info">
|
||||
<div class="model-name" title="${lora.name}">${lora.name}</div>
|
||||
<div class="model-meta">${lora.base_model} • ${lora.folder}</div>
|
||||
</div>
|
||||
<div class="model-usage">${lora.usage_count}</div>
|
||||
</div>
|
||||
`).join('');
|
||||
}
|
||||
try {
|
||||
const url = `/api/lm/stats/model-usage-list?type=${type}&sort=${state.sort}&offset=${state.offset}&limit=${state.limit}`;
|
||||
const result = await this.fetchData(url);
|
||||
|
||||
if (result.success) {
|
||||
const items = result.data.items;
|
||||
|
||||
// Remove loading indicator if it's the first page
|
||||
if (state.offset === 0) {
|
||||
container.innerHTML = '';
|
||||
}
|
||||
|
||||
renderTopCheckpointsList() {
|
||||
const container = document.getElementById('topCheckpointsList');
|
||||
if (!container || !this.data.usage?.top_checkpoints) return;
|
||||
if (items.length === 0 && state.offset === 0) {
|
||||
container.innerHTML = '<div class="loading-placeholder">No models found</div>';
|
||||
state.hasMore = false;
|
||||
} else if (items.length < state.limit) {
|
||||
state.hasMore = false;
|
||||
}
|
||||
|
||||
const topCheckpoints = this.data.usage.top_checkpoints;
|
||||
|
||||
if (topCheckpoints.length === 0) {
|
||||
container.innerHTML = '<div class="loading-placeholder">No usage data available</div>';
|
||||
return;
|
||||
const html = items.map(model => `
|
||||
<div class="model-item">
|
||||
<img src="${model.preview_url || '/loras_static/images/no-preview.png'}"
|
||||
alt="${model.name}" class="model-preview"
|
||||
onerror="this.src='/loras_static/images/no-preview.png'">
|
||||
<div class="model-info">
|
||||
<div class="model-name" title="${model.name}">${model.name}</div>
|
||||
<div class="model-meta">${model.base_model} • ${model.folder || 'Root'}</div>
|
||||
</div>
|
||||
<div class="model-usage">${model.usage_count}</div>
|
||||
</div>
|
||||
`).join('');
|
||||
|
||||
container.insertAdjacentHTML('beforeend', html);
|
||||
state.offset += state.limit;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error loading ${type} list:`, error);
|
||||
if (state.offset === 0) {
|
||||
container.innerHTML = '<div class="loading-placeholder">Error loading data</div>';
|
||||
}
|
||||
} finally {
|
||||
state.isLoading = false;
|
||||
}
|
||||
|
||||
container.innerHTML = topCheckpoints.map(checkpoint => `
|
||||
<div class="model-item">
|
||||
<img src="${checkpoint.preview_url || '/loras_static/images/no-preview.png'}"
|
||||
alt="${checkpoint.name}" class="model-preview"
|
||||
onerror="this.src='/loras_static/images/no-preview.png'">
|
||||
<div class="model-info">
|
||||
<div class="model-name" title="${checkpoint.name}">${checkpoint.name}</div>
|
||||
<div class="model-meta">${checkpoint.base_model} • ${checkpoint.folder}</div>
|
||||
</div>
|
||||
<div class="model-usage">${checkpoint.usage_count}</div>
|
||||
</div>
|
||||
`).join('');
|
||||
}
|
||||
|
||||
renderTopEmbeddingsList() {
|
||||
const container = document.getElementById('topEmbeddingsList');
|
||||
if (!container || !this.data.usage?.top_embeddings) return;
|
||||
|
||||
const topEmbeddings = this.data.usage.top_embeddings;
|
||||
|
||||
if (topEmbeddings.length === 0) {
|
||||
container.innerHTML = '<div class="loading-placeholder">No usage data available</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
container.innerHTML = topEmbeddings.map(embedding => `
|
||||
<div class="model-item">
|
||||
<img src="${embedding.preview_url || '/loras_static/images/no-preview.png'}"
|
||||
alt="${embedding.name}" class="model-preview"
|
||||
onerror="this.src='/loras_static/images/no-preview.png'">
|
||||
<div class="model-info">
|
||||
<div class="model-name" title="${embedding.name}">${embedding.name}</div>
|
||||
<div class="model-meta">${embedding.base_model} • ${embedding.folder}</div>
|
||||
</div>
|
||||
<div class="model-usage">${embedding.usage_count}</div>
|
||||
</div>
|
||||
`).join('');
|
||||
}
|
||||
|
||||
renderLargestModelsList() {
|
||||
|
||||
119
static/js/utils/civitaiUtils.js
Normal file
119
static/js/utils/civitaiUtils.js
Normal file
@@ -0,0 +1,119 @@
|
||||
/**
|
||||
* CivitAI URL utilities
|
||||
* Functions for working with CivitAI media URLs
|
||||
*/
|
||||
|
||||
/**
|
||||
* Optimization strategies for CivitAI URLs
|
||||
*/
|
||||
export const OptimizationMode = {
|
||||
/** Full quality for showcase/display - uses /optimized=true only */
|
||||
SHOWCASE: 'showcase',
|
||||
/** Thumbnail size for cards - uses /width=450,optimized=true */
|
||||
THUMBNAIL: 'thumbnail',
|
||||
};
|
||||
|
||||
/**
|
||||
* Rewrite Civitai preview URLs to use optimized renditions.
|
||||
* Mirrors the backend's rewrite_preview_url() function from py/utils/civitai_utils.py
|
||||
*
|
||||
* @param {string|null} sourceUrl - Original preview URL from the Civitai API
|
||||
* @param {string|null} mediaType - Optional media type hint ("image" or "video")
|
||||
* @param {string} mode - Optimization mode ('showcase' or 'thumbnail')
|
||||
* @returns {[string|null, boolean]} - Tuple of [rewritten URL or original, wasRewritten flag]
|
||||
*/
|
||||
export function rewriteCivitaiUrl(sourceUrl, mediaType = null, mode = OptimizationMode.THUMBNAIL) {
|
||||
if (!sourceUrl) {
|
||||
return [sourceUrl, false];
|
||||
}
|
||||
|
||||
try {
|
||||
const url = new URL(sourceUrl);
|
||||
|
||||
// Check if it's a CivitAI image domain
|
||||
if (url.hostname.toLowerCase() !== 'image.civitai.com') {
|
||||
return [sourceUrl, false];
|
||||
}
|
||||
|
||||
// Determine replacement based on mode and media type
|
||||
let replacement;
|
||||
if (mode === OptimizationMode.SHOWCASE) {
|
||||
// Full quality for showcase - no width restriction
|
||||
replacement = '/optimized=true';
|
||||
} else {
|
||||
// Thumbnail mode with width restriction
|
||||
replacement = '/width=450,optimized=true';
|
||||
if (mediaType && mediaType.toLowerCase() === 'video') {
|
||||
replacement = '/transcode=true,width=450,optimized=true';
|
||||
}
|
||||
}
|
||||
|
||||
// Replace /original=true with optimized version
|
||||
if (!url.pathname.includes('/original=true')) {
|
||||
return [sourceUrl, false];
|
||||
}
|
||||
|
||||
const updatedPath = url.pathname.replace('/original=true', replacement, 1);
|
||||
|
||||
if (updatedPath === url.pathname) {
|
||||
return [sourceUrl, false];
|
||||
}
|
||||
|
||||
url.pathname = updatedPath;
|
||||
return [url.toString(), true];
|
||||
} catch (e) {
|
||||
// Invalid URL
|
||||
return [sourceUrl, false];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the optimized URL for a media item, falling back to original if not a CivitAI URL
|
||||
*
|
||||
* @param {string} url - Original URL
|
||||
* @param {string} type - Media type ("image" or "video")
|
||||
* @param {string} mode - Optimization mode ('showcase' or 'thumbnail')
|
||||
* @returns {string} - Optimized URL or original URL
|
||||
*/
|
||||
export function getOptimizedUrl(url, type = 'image', mode = OptimizationMode.THUMBNAIL) {
|
||||
const [optimizedUrl] = rewriteCivitaiUrl(url, type, mode);
|
||||
return optimizedUrl || url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get showcase-optimized URL (full quality)
|
||||
*
|
||||
* @param {string} url - Original URL
|
||||
* @param {string} type - Media type ("image" or "video")
|
||||
* @returns {string} - Optimized URL for showcase display
|
||||
*/
|
||||
export function getShowcaseUrl(url, type = 'image') {
|
||||
return getOptimizedUrl(url, type, OptimizationMode.SHOWCASE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get thumbnail-optimized URL (width=450)
|
||||
*
|
||||
* @param {string} url - Original URL
|
||||
* @param {string} type - Media type ("image" or "video")
|
||||
* @returns {string} - Optimized URL for thumbnail display
|
||||
*/
|
||||
export function getThumbnailUrl(url, type = 'image') {
|
||||
return getOptimizedUrl(url, type, OptimizationMode.THUMBNAIL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a URL is from CivitAI
|
||||
*
|
||||
* @param {string} url - URL to check
|
||||
* @returns {boolean} - True if it's a CivitAI URL
|
||||
*/
|
||||
export function isCivitaiUrl(url) {
|
||||
if (!url) return false;
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
return parsed.hostname.toLowerCase() === 'image.civitai.com';
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -2,90 +2,133 @@
|
||||
<div id="supportModal" class="modal">
|
||||
<div class="modal-content support-modal">
|
||||
<button class="close" onclick="modalManager.closeModal('supportModal')">×</button>
|
||||
<div class="support-header">
|
||||
<i class="fas fa-heart support-icon"></i>
|
||||
<h2>{{ t('support.title') }}</h2>
|
||||
</div>
|
||||
<div class="support-content">
|
||||
<p>{{ t('support.message') }}</p>
|
||||
|
||||
<div class="support-section">
|
||||
<h3><i class="fas fa-comment"></i> {{ t('support.feedback.title') }}</h3>
|
||||
<p>{{ t('support.feedback.description') }}</p>
|
||||
<div class="support-links">
|
||||
<a href="https://github.com/willmiao/ComfyUI-Lora-Manager/issues/new" class="social-link" target="_blank">
|
||||
<i class="fab fa-github"></i>
|
||||
<span>{{ t('support.links.submitGithubIssue') }}</span>
|
||||
</a>
|
||||
<a href="https://discord.gg/vcqNrWVFvM" class="social-link" target="_blank">
|
||||
<i class="fab fa-discord"></i>
|
||||
<span>{{ t('support.links.joinDiscord') }}</span>
|
||||
</a>
|
||||
|
||||
<div class="support-container">
|
||||
<!-- Left Side: Support Options -->
|
||||
<div class="support-left">
|
||||
<div class="support-header">
|
||||
<i class="fas fa-heart support-icon"></i>
|
||||
<h2>{{ t('support.title') }}</h2>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="support-section">
|
||||
<h3><i class="fas fa-rss"></i> {{ t('support.sections.followUpdates') }}</h3>
|
||||
<div class="support-links">
|
||||
<a href="https://www.youtube.com/@pixelpaws-ai" class="social-link" target="_blank">
|
||||
<i class="fab fa-youtube"></i>
|
||||
<span>{{ t('support.links.youtubeChannel') }}</span>
|
||||
</a>
|
||||
<a href="https://civitai.com/user/PixelPawsAI" class="social-link civitai-link" target="_blank">
|
||||
<svg class="civitai-icon" viewBox="0 0 225 225" width="20" height="20">
|
||||
<g transform="translate(0,225) scale(0.1,-0.1)" fill="currentColor">
|
||||
<path d="M950 1899 c-96 -55 -262 -150 -367 -210 -106 -61 -200 -117 -208
|
||||
-125 -13 -13 -15 -76 -15 -443 0 -395 1 -429 18 -443 9 -9 116 -73 237 -143
|
||||
121 -70 283 -163 359 -208 76 -45 146 -80 155 -80 9 1 183 98 386 215 l370
|
||||
215 2 444 3 444 -376 215 c-206 118 -378 216 -382 217 -4 1 -86 -43 -182 -98z
|
||||
m346 -481 l163 -93 1 -57 0 -58 -89 0 c-87 0 -91 1 -166 44 l-78 45 -51 -30
|
||||
c-28 -17 -61 -35 -73 -41 -21 -10 -23 -18 -23 -99 l0 -87 71 -41 c39 -23 73
|
||||
-41 76 -41 3 0 37 18 75 40 68 39 72 40 164 40 l94 0 0 -53 c0 -60 23 -41
|
||||
-198 -168 l-133 -77 -92 52 c-51 29 -126 73 -167 97 l-75 45 0 193 0 192 164
|
||||
95 c91 52 167 94 169 94 2 0 78 -42 168 -92z"/>
|
||||
</g>
|
||||
</svg>
|
||||
<span>{{ t('support.links.civitaiProfile') }}</span>
|
||||
</a>
|
||||
<div class="support-content">
|
||||
<p>{{ t('support.message') }}</p>
|
||||
|
||||
<div class="support-section">
|
||||
<h3><i class="fas fa-comment"></i> {{ t('support.feedback.title') }}</h3>
|
||||
<p>{{ t('support.feedback.description') }}</p>
|
||||
<div class="support-links">
|
||||
<a href="https://github.com/willmiao/ComfyUI-Lora-Manager/issues/new" class="social-link" target="_blank">
|
||||
<i class="fab fa-github"></i>
|
||||
<span>{{ t('support.links.submitGithubIssue') }}</span>
|
||||
</a>
|
||||
<a href="https://discord.gg/vcqNrWVFvM" class="social-link" target="_blank">
|
||||
<i class="fab fa-discord"></i>
|
||||
<span>{{ t('support.links.joinDiscord') }}</span>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="support-section">
|
||||
<h3><i class="fas fa-rss"></i> {{ t('support.sections.followUpdates') }}</h3>
|
||||
<div class="support-links">
|
||||
<a href="https://www.youtube.com/@pixelpaws-ai" class="social-link" target="_blank">
|
||||
<i class="fab fa-youtube"></i>
|
||||
<span>{{ t('support.links.youtubeChannel') }}</span>
|
||||
</a>
|
||||
<a href="https://civitai.com/user/PixelPawsAI" class="social-link civitai-link" target="_blank">
|
||||
<svg class="civitai-icon" viewBox="0 0 225 225" width="20" height="20">
|
||||
<g transform="translate(0,225) scale(0.1,-0.1)" fill="currentColor">
|
||||
<path d="M950 1899 c-96 -55 -262 -150 -367 -210 -106 -61 -200 -117 -208
|
||||
-125 -13 -13 -15 -76 -15 -443 0 -395 1 -429 18 -443 9 -9 116 -73 237 -143
|
||||
121 -70 283 -163 359 -208 76 -45 146 -80 155 -80 9 1 183 98 386 215 l370
|
||||
215 2 444 3 444 -376 215 c-206 118 -378 216 -382 217 -4 1 -86 -43 -182 -98z
|
||||
m346 -481 l163 -93 1 -57 0 -58 -89 0 c-87 0 -91 1 -166 44 l-78 45 -51 -30
|
||||
c-28 -17 -61 -35 -73 -41 -21 -10 -23 -18 -23 -99 l0 -87 71 -41 c39 -23 73
|
||||
-41 76 -41 3 0 37 18 75 40 68 39 72 40 164 40 l94 0 0 -53 c0 -60 23 -41
|
||||
-198 -168 l-133 -77 -92 52 c-51 29 -126 73 -167 97 l-75 45 0 193 0 192 164
|
||||
95 c91 52 167 94 169 94 2 0 78 -42 168 -92z"/>
|
||||
</g>
|
||||
</svg>
|
||||
<span>{{ t('support.links.civitaiProfile') }}</span>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="support-section">
|
||||
<h3><i class="fas fa-coffee"></i> {{ t('support.sections.buyMeCoffee') }}</h3>
|
||||
<p>{{ t('support.sections.coffeeDescription') }}</p>
|
||||
<a href="https://ko-fi.com/pixelpawsai" class="kofi-button" target="_blank">
|
||||
<i class="fas fa-mug-hot"></i>
|
||||
<span>{{ t('support.links.supportKofi') }}</span>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<!-- Patreon Support Section -->
|
||||
<div class="support-section">
|
||||
<h3><i class="fab fa-patreon"></i> {{ t('support.sections.becomePatron') }}</h3>
|
||||
<p>{{ t('support.sections.patronDescription') }}</p>
|
||||
<a href="https://patreon.com/PixelPawsAI" class="patreon-button" target="_blank">
|
||||
<i class="fab fa-patreon"></i>
|
||||
<span>{{ t('support.links.supportPatreon') }}</span>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<!-- New section for Chinese payment methods -->
|
||||
<div class="support-section">
|
||||
<h3><i class="fas fa-qrcode"></i> {{ t('support.sections.wechatSupport') }}</h3>
|
||||
<p>{{ t('support.sections.wechatDescription') }}</p>
|
||||
<button class="secondary-btn qrcode-toggle" id="toggleQRCode">
|
||||
<i class="fas fa-qrcode"></i>
|
||||
<span class="toggle-text">{{ t('support.sections.showWechatQR') }}</span>
|
||||
<i class="fas fa-chevron-down toggle-icon"></i>
|
||||
</button>
|
||||
<div class="qrcode-container" id="qrCodeContainer">
|
||||
<img src="/loras_static/images/wechat-qr.webp" alt="WeChat Pay QR Code" class="qrcode-image">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="support-footer">
|
||||
<p>{{ t('support.footer') }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="support-section">
|
||||
<h3><i class="fas fa-coffee"></i> {{ t('support.sections.buyMeCoffee') }}</h3>
|
||||
<p>{{ t('support.sections.coffeeDescription') }}</p>
|
||||
<a href="https://ko-fi.com/pixelpawsai" class="kofi-button" target="_blank">
|
||||
<i class="fas fa-mug-hot"></i>
|
||||
<span>{{ t('support.links.supportKofi') }}</span>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<!-- Patreon Support Section -->
|
||||
<div class="support-section">
|
||||
<h3><i class="fab fa-patreon"></i> {{ t('support.sections.becomePatron') }}</h3>
|
||||
<p>{{ t('support.sections.patronDescription') }}</p>
|
||||
<a href="https://patreon.com/PixelPawsAI" class="patreon-button" target="_blank">
|
||||
<i class="fab fa-patreon"></i>
|
||||
<span>{{ t('support.links.supportPatreon') }}</span>
|
||||
</a>
|
||||
</div>
|
||||
<!-- Right Side: Supporters -->
|
||||
<div class="support-right">
|
||||
<div class="supporters-section">
|
||||
<div class="supporters-header">
|
||||
<h2 class="supporters-title">
|
||||
<i class="fas fa-hands-helping"></i>
|
||||
{{ t('support.supporters.title') }}
|
||||
</h2>
|
||||
<p class="supporters-subtitle" id="supportersSubtitle">
|
||||
{{ t('support.supporters.subtitle', count=0) }}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Special Thanks Section -->
|
||||
<div class="supporters-group special-thanks-group">
|
||||
<h3 class="supporters-group-title">
|
||||
<i class="fas fa-star"></i>
|
||||
{{ t('support.supporters.specialThanks') }}
|
||||
</h3>
|
||||
<div class="supporters-special-grid" id="specialThanksGrid">
|
||||
<!-- Supporters will be loaded dynamically -->
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- New section for Chinese payment methods -->
|
||||
<div class="support-section">
|
||||
<h3><i class="fas fa-qrcode"></i> {{ t('support.sections.wechatSupport') }}</h3>
|
||||
<p>{{ t('support.sections.wechatDescription') }}</p>
|
||||
<button class="secondary-btn qrcode-toggle" id="toggleQRCode">
|
||||
<i class="fas fa-qrcode"></i>
|
||||
<span class="toggle-text">{{ t('support.sections.showWechatQR') }}</span>
|
||||
<i class="fas fa-chevron-down toggle-icon"></i>
|
||||
</button>
|
||||
<div class="qrcode-container" id="qrCodeContainer">
|
||||
<img src="/loras_static/images/wechat-qr.webp" alt="WeChat Pay QR Code" class="qrcode-image">
|
||||
<!-- All Supporters Section -->
|
||||
<div class="supporters-group all-supporters-group">
|
||||
<h3 class="supporters-group-title">
|
||||
<i class="fas fa-heart"></i>
|
||||
{{ t('support.supporters.allSupporters') }}
|
||||
</h3>
|
||||
<div class="supporters-all-list" id="supportersGrid">
|
||||
<!-- Supporters will be loaded dynamically -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="support-footer">
|
||||
<p>{{ t('support.footer') }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
159
tests/frontend/managers/MoveManager.test.js
Normal file
159
tests/frontend/managers/MoveManager.test.js
Normal file
@@ -0,0 +1,159 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { moveManager } from '../../../static/js/managers/MoveManager.js';
|
||||
import { state } from '../../../static/js/state/index.js';
|
||||
import { modalManager } from '../../../static/js/managers/ModalManager.js';
|
||||
import { getModelApiClient } from '../../../static/js/api/modelApiFactory.js';
|
||||
import * as storageHelpers from '../../../static/js/utils/storageHelpers.js';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('../../../static/js/state/index.js', () => ({
|
||||
state: {
|
||||
currentPageType: 'loras',
|
||||
selectedModels: new Set(),
|
||||
global: {
|
||||
settings: {
|
||||
download_path_templates: {
|
||||
lora: '{base_model}/unstaged'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
vi.mock('../../../static/js/managers/ModalManager.js', () => ({
|
||||
modalManager: {
|
||||
showModal: vi.fn(),
|
||||
closeModal: vi.fn()
|
||||
}
|
||||
}));
|
||||
|
||||
vi.mock('../../../static/js/api/modelApiFactory.js', () => ({
|
||||
getModelApiClient: vi.fn()
|
||||
}));
|
||||
|
||||
vi.mock('../../../static/js/utils/storageHelpers.js', () => ({
|
||||
getStorageItem: vi.fn(),
|
||||
setStorageItem: vi.fn()
|
||||
}));
|
||||
|
||||
vi.mock('../../../static/js/utils/uiHelpers.js', () => ({
|
||||
showToast: vi.fn()
|
||||
}));
|
||||
|
||||
vi.mock('../../../static/js/utils/i18nHelpers.js', () => ({
|
||||
translate: vi.fn(key => key)
|
||||
}));
|
||||
|
||||
describe('MoveManager', () => {
|
||||
let mockApiClient;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Setup DOM
|
||||
document.body.innerHTML = `
|
||||
<div id="moveModal">
|
||||
<h2 id="moveModalTitle"></h2>
|
||||
<label id="moveRootLabel"></label>
|
||||
<select id="moveModelRoot"></select>
|
||||
<input type="checkbox" id="moveUseDefaultPath" />
|
||||
<div id="moveManualPathSelection">
|
||||
<input id="moveFolderPath" />
|
||||
<div id="moveFolderTree"></div>
|
||||
</div>
|
||||
<div id="moveTargetPathDisplay"><span class="path-text"></span></div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
mockApiClient = {
|
||||
apiConfig: {
|
||||
config: {
|
||||
displayName: 'LoRA',
|
||||
supportsMove: true
|
||||
},
|
||||
endpoints: {
|
||||
moveModel: '/api/move'
|
||||
}
|
||||
},
|
||||
modelType: 'loras',
|
||||
fetchModelRoots: vi.fn().mockResolvedValue({ roots: ['/models/loras'] }),
|
||||
fetchUnifiedFolderTree: vi.fn().mockResolvedValue({ success: true, tree: {} }),
|
||||
moveSingleModel: vi.fn().mockResolvedValue({ success: true })
|
||||
};
|
||||
getModelApiClient.mockReturnValue(mockApiClient);
|
||||
});
|
||||
|
||||
it('should reset folder selection when showing move modal', async () => {
|
||||
// Manually set a selected path in folderTreeManager
|
||||
moveManager.folderTreeManager.selectedPath = 'previous/path';
|
||||
|
||||
await moveManager.showMoveModal('some/file.safetensors');
|
||||
|
||||
expect(moveManager.folderTreeManager.getSelectedPath()).toBe('');
|
||||
});
|
||||
|
||||
it('should ignore manual folder selection when useDefaultPath is true', async () => {
|
||||
// Setup state
|
||||
moveManager.useDefaultPath = true;
|
||||
moveManager.currentFilePath = '/models/loras/flux/my-lora.safetensors';
|
||||
document.getElementById('moveModelRoot').innerHTML = '<option value="/models/loras">/models/loras</option>';
|
||||
document.getElementById('moveModelRoot').value = '/models/loras';
|
||||
|
||||
// Manually set a selected path despite useDefaultPath being true
|
||||
moveManager.folderTreeManager.selectedPath = 'wrong/folder';
|
||||
|
||||
await moveManager.moveModel();
|
||||
|
||||
// Should call moveSingleModel with the root path, NOT including the 'wrong/folder'
|
||||
expect(mockApiClient.moveSingleModel).toHaveBeenCalledWith(
|
||||
'/models/loras/flux/my-lora.safetensors',
|
||||
'/models/loras',
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should include manual folder selection when useDefaultPath is false', async () => {
|
||||
// Setup state
|
||||
moveManager.useDefaultPath = false;
|
||||
moveManager.currentFilePath = '/models/loras/flux/my-lora.safetensors';
|
||||
document.getElementById('moveModelRoot').innerHTML = '<option value="/models/loras">/models/loras</option>';
|
||||
document.getElementById('moveModelRoot').value = '/models/loras';
|
||||
|
||||
// Set a selected path
|
||||
moveManager.folderTreeManager.selectedPath = 'my/organized/folder';
|
||||
|
||||
await moveManager.moveModel();
|
||||
|
||||
// Should call moveSingleModel with root + selected folder
|
||||
expect(mockApiClient.moveSingleModel).toHaveBeenCalledWith(
|
||||
'/models/loras/flux/my-lora.safetensors',
|
||||
'/models/loras/my/organized/folder',
|
||||
false
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle bulk move and ignore manual folder selection when useDefaultPath is true', async () => {
|
||||
// Setup state
|
||||
moveManager.useDefaultPath = true;
|
||||
moveManager.bulkFilePaths = [
|
||||
'/models/loras/flux/lora1.safetensors',
|
||||
'/models/loras/flux/lora2.safetensors'
|
||||
];
|
||||
document.getElementById('moveModelRoot').innerHTML = '<option value="/models/loras">/models/loras</option>';
|
||||
document.getElementById('moveModelRoot').value = '/models/loras';
|
||||
|
||||
// Manually set a selected path
|
||||
moveManager.folderTreeManager.selectedPath = 'wrong/folder';
|
||||
|
||||
mockApiClient.moveBulkModels = vi.fn().mockResolvedValue({ success: true });
|
||||
|
||||
await moveManager.moveModel();
|
||||
|
||||
// Should call moveBulkModels with the root path, NOT including the 'wrong/folder'
|
||||
expect(mockApiClient.moveBulkModels).toHaveBeenCalledWith(
|
||||
moveManager.bulkFilePaths,
|
||||
'/models/loras',
|
||||
true
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -107,6 +107,33 @@ describe('Statistics dashboard rendering', () => {
|
||||
],
|
||||
},
|
||||
},
|
||||
'/api/lm/stats/model-usage-list?type=lora&sort=desc&offset=0&limit=50': {
|
||||
success: true,
|
||||
data: {
|
||||
items: [
|
||||
{ name: 'Lora A', base_model: 'SDXL', folder: 'loras', usage_count: 10, preview_url: '' },
|
||||
],
|
||||
total: 1,
|
||||
},
|
||||
},
|
||||
'/api/lm/stats/model-usage-list?type=checkpoint&sort=desc&offset=0&limit=50': {
|
||||
success: true,
|
||||
data: {
|
||||
items: [
|
||||
{ name: 'Checkpoint A', base_model: 'SDXL', folder: 'checkpoints', usage_count: 5, preview_url: '' },
|
||||
],
|
||||
total: 1,
|
||||
},
|
||||
},
|
||||
'/api/lm/stats/model-usage-list?type=embedding&sort=desc&offset=0&limit=50': {
|
||||
success: true,
|
||||
data: {
|
||||
items: [
|
||||
{ name: 'Embedding A', base_model: 'SDXL', folder: 'embeddings', usage_count: 7, preview_url: '' },
|
||||
],
|
||||
total: 1,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const { StatisticsManager } = await import(STATISTICS_MODULE);
|
||||
|
||||
172
tests/frontend/utils/civitaiUtils.test.js
Normal file
172
tests/frontend/utils/civitaiUtils.test.js
Normal file
@@ -0,0 +1,172 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
rewriteCivitaiUrl,
|
||||
getOptimizedUrl,
|
||||
getShowcaseUrl,
|
||||
getThumbnailUrl,
|
||||
isCivitaiUrl,
|
||||
OptimizationMode
|
||||
} from '../../../static/js/utils/civitaiUtils.js';
|
||||
|
||||
describe('civitaiUtils', () => {
|
||||
describe('OptimizationMode', () => {
|
||||
it('should have correct mode values', () => {
|
||||
expect(OptimizationMode.SHOWCASE).toBe('showcase');
|
||||
expect(OptimizationMode.THUMBNAIL).toBe('thumbnail');
|
||||
});
|
||||
});
|
||||
|
||||
describe('rewriteCivitaiUrl', () => {
|
||||
it('should rewrite image URLs with /original=true for thumbnail mode', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.jpeg';
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl(originalUrl, 'image', OptimizationMode.THUMBNAIL);
|
||||
|
||||
expect(wasRewritten).toBe(true);
|
||||
expect(rewritten).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/width=450,optimized=true/12345.jpeg');
|
||||
});
|
||||
|
||||
it('should rewrite image URLs with /original=true for showcase mode (no width)', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.jpeg';
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl(originalUrl, 'image', OptimizationMode.SHOWCASE);
|
||||
|
||||
expect(wasRewritten).toBe(true);
|
||||
expect(rewritten).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/optimized=true/12345.jpeg');
|
||||
});
|
||||
|
||||
it('should rewrite video URLs with /original=true for thumbnail mode', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.mp4';
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl(originalUrl, 'video', OptimizationMode.THUMBNAIL);
|
||||
|
||||
expect(wasRewritten).toBe(true);
|
||||
expect(rewritten).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/transcode=true,width=450,optimized=true/12345.mp4');
|
||||
});
|
||||
|
||||
it('should rewrite video URLs with /original=true for showcase mode (no width/transcode)', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.mp4';
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl(originalUrl, 'video', OptimizationMode.SHOWCASE);
|
||||
|
||||
expect(wasRewritten).toBe(true);
|
||||
expect(rewritten).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/optimized=true/12345.mp4');
|
||||
});
|
||||
|
||||
it('should default to thumbnail mode when mode is not specified', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.jpeg';
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl(originalUrl, 'image');
|
||||
|
||||
expect(wasRewritten).toBe(true);
|
||||
expect(rewritten).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/width=450,optimized=true/12345.jpeg');
|
||||
});
|
||||
|
||||
it('should not rewrite URLs without /original=true', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/width=450/12345.jpeg';
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl(originalUrl, 'image', OptimizationMode.THUMBNAIL);
|
||||
|
||||
expect(wasRewritten).toBe(false);
|
||||
expect(rewritten).toBe(originalUrl);
|
||||
});
|
||||
|
||||
it('should not rewrite non-CivitAI URLs', () => {
|
||||
const originalUrl = 'https://example.com/image.jpg';
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl(originalUrl, 'image', OptimizationMode.SHOWCASE);
|
||||
|
||||
expect(wasRewritten).toBe(false);
|
||||
expect(rewritten).toBe(originalUrl);
|
||||
});
|
||||
|
||||
it('should handle null/undefined URLs', () => {
|
||||
const [rewritten1, wasRewritten1] = rewriteCivitaiUrl(null, 'image');
|
||||
expect(wasRewritten1).toBe(false);
|
||||
expect(rewritten1).toBe(null);
|
||||
|
||||
const [rewritten2, wasRewritten2] = rewriteCivitaiUrl(undefined, 'image');
|
||||
expect(wasRewritten2).toBe(false);
|
||||
expect(rewritten2).toBe(undefined);
|
||||
});
|
||||
|
||||
it('should handle empty strings', () => {
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl('', 'image');
|
||||
expect(wasRewritten).toBe(false);
|
||||
expect(rewritten).toBe('');
|
||||
});
|
||||
|
||||
it('should handle invalid URLs gracefully', () => {
|
||||
const [rewritten, wasRewritten] = rewriteCivitaiUrl('not-a-valid-url', 'image');
|
||||
expect(wasRewritten).toBe(false);
|
||||
expect(rewritten).toBe('not-a-valid-url');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getOptimizedUrl', () => {
|
||||
it('should return optimized URL for CivitAI images in thumbnail mode', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.jpeg';
|
||||
const optimized = getOptimizedUrl(originalUrl, 'image', OptimizationMode.THUMBNAIL);
|
||||
|
||||
expect(optimized).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/width=450,optimized=true/12345.jpeg');
|
||||
});
|
||||
|
||||
it('should return optimized URL for CivitAI images in showcase mode', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.jpeg';
|
||||
const optimized = getOptimizedUrl(originalUrl, 'image', OptimizationMode.SHOWCASE);
|
||||
|
||||
expect(optimized).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/optimized=true/12345.jpeg');
|
||||
});
|
||||
|
||||
it('should return original URL for non-CivitAI URLs', () => {
|
||||
const originalUrl = 'https://example.com/image.jpg';
|
||||
const optimized = getOptimizedUrl(originalUrl, 'image');
|
||||
|
||||
expect(optimized).toBe(originalUrl);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getShowcaseUrl', () => {
|
||||
it('should return showcase-optimized URL (full quality)', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.jpeg';
|
||||
const showcaseUrl = getShowcaseUrl(originalUrl, 'image');
|
||||
|
||||
expect(showcaseUrl).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/optimized=true/12345.jpeg');
|
||||
});
|
||||
|
||||
it('should handle videos for showcase', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.mp4';
|
||||
const showcaseUrl = getShowcaseUrl(originalUrl, 'video');
|
||||
|
||||
expect(showcaseUrl).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/optimized=true/12345.mp4');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getThumbnailUrl', () => {
|
||||
it('should return thumbnail-optimized URL (width=450)', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.jpeg';
|
||||
const thumbnailUrl = getThumbnailUrl(originalUrl, 'image');
|
||||
|
||||
expect(thumbnailUrl).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/width=450,optimized=true/12345.jpeg');
|
||||
});
|
||||
|
||||
it('should handle videos for thumbnails', () => {
|
||||
const originalUrl = 'https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/original=true/12345.mp4';
|
||||
const thumbnailUrl = getThumbnailUrl(originalUrl, 'video');
|
||||
|
||||
expect(thumbnailUrl).toBe('https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc123/transcode=true,width=450,optimized=true/12345.mp4');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isCivitaiUrl', () => {
|
||||
it('should return true for CivitAI URLs', () => {
|
||||
expect(isCivitaiUrl('https://image.civitai.com/something')).toBe(true);
|
||||
expect(isCivitaiUrl('https://image.civitai.com/')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-CivitAI URLs', () => {
|
||||
expect(isCivitaiUrl('https://example.com/image.jpg')).toBe(false);
|
||||
expect(isCivitaiUrl('https://civitai.com/image.jpg')).toBe(false);
|
||||
expect(isCivitaiUrl('')).toBe(false);
|
||||
expect(isCivitaiUrl(null)).toBe(false);
|
||||
expect(isCivitaiUrl(undefined)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle invalid URLs gracefully', () => {
|
||||
expect(isCivitaiUrl('not-a-url')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -14,12 +14,17 @@ from py.services.model_hash_index import ModelHashIndex
|
||||
from py.utils.utils import fuzzy_match, calculate_recipe_fingerprint
|
||||
|
||||
|
||||
pytestmark = pytest.mark.performance
|
||||
|
||||
|
||||
class TestHashIndexPerformance:
|
||||
"""Performance benchmarks for hash index operations."""
|
||||
|
||||
def test_hash_index_lookup_small(self, benchmark):
|
||||
"""Benchmark hash index lookup with 100 models."""
|
||||
index, target_hash = self._create_hash_index_with_n_models(100, return_target=True)
|
||||
index, target_hash = self._create_hash_index_with_n_models(
|
||||
100, return_target=True
|
||||
)
|
||||
|
||||
def lookup():
|
||||
return index.get_path(target_hash)
|
||||
@@ -29,7 +34,9 @@ class TestHashIndexPerformance:
|
||||
|
||||
def test_hash_index_lookup_medium(self, benchmark):
|
||||
"""Benchmark hash index lookup with 1,000 models."""
|
||||
index, target_hash = self._create_hash_index_with_n_models(1000, return_target=True)
|
||||
index, target_hash = self._create_hash_index_with_n_models(
|
||||
1000, return_target=True
|
||||
)
|
||||
|
||||
def lookup():
|
||||
return index.get_path(target_hash)
|
||||
@@ -39,7 +46,9 @@ class TestHashIndexPerformance:
|
||||
|
||||
def test_hash_index_lookup_large(self, benchmark):
|
||||
"""Benchmark hash index lookup with 10,000 models."""
|
||||
index, target_hash = self._create_hash_index_with_n_models(10000, return_target=True)
|
||||
index, target_hash = self._create_hash_index_with_n_models(
|
||||
10000, return_target=True
|
||||
)
|
||||
|
||||
def lookup():
|
||||
return index.get_path(target_hash)
|
||||
@@ -71,11 +80,11 @@ class TestHashIndexPerformance:
|
||||
|
||||
def _create_hash_index_with_n_models(self, n: int, return_target: bool = False):
|
||||
"""Create a hash index with n mock models.
|
||||
|
||||
|
||||
Args:
|
||||
n: Number of models to create
|
||||
return_target: If True, returns the hash of the middle model for lookup testing
|
||||
|
||||
|
||||
Returns:
|
||||
ModelHashIndex or tuple of (ModelHashIndex, target_hash)
|
||||
"""
|
||||
@@ -94,7 +103,7 @@ class TestHashIndexPerformance:
|
||||
|
||||
def _random_string(self, length: int) -> str:
|
||||
"""Generate a random string of fixed length."""
|
||||
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=length))
|
||||
return "".join(random.choices(string.ascii_lowercase + string.digits, k=length))
|
||||
|
||||
|
||||
class TestFuzzyMatchPerformance:
|
||||
|
||||
@@ -31,7 +31,9 @@ from py.utils.metadata_manager import MetadataManager
|
||||
class DummyRoutes(BaseModelRoutes):
|
||||
template_name = "dummy.html"
|
||||
|
||||
def setup_specific_routes(self, registrar, prefix: str) -> None: # pragma: no cover - no extra routes in smoke tests
|
||||
def setup_specific_routes(
|
||||
self, registrar, prefix: str
|
||||
) -> None: # pragma: no cover - no extra routes in smoke tests
|
||||
return None
|
||||
|
||||
def __init__(self, service=None):
|
||||
@@ -59,7 +61,9 @@ class NullUpdateRecord:
|
||||
|
||||
@property
|
||||
def in_library_version_ids(self) -> list[int]:
|
||||
return [version.version_id for version in self.versions if version.is_in_library]
|
||||
return [
|
||||
version.version_id for version in self.versions if version.is_in_library
|
||||
]
|
||||
|
||||
def has_update(self) -> bool:
|
||||
return False
|
||||
@@ -86,7 +90,9 @@ class NullModelUpdateService:
|
||||
)
|
||||
for version_id in version_ids
|
||||
]
|
||||
return NullUpdateRecord(model_type=model_type, model_id=model_id, versions=versions)
|
||||
return NullUpdateRecord(
|
||||
model_type=model_type, model_id=model_id, versions=versions
|
||||
)
|
||||
|
||||
async def set_should_ignore(self, model_type, model_id, should_ignore):
|
||||
return NullUpdateRecord(
|
||||
@@ -95,7 +101,9 @@ class NullModelUpdateService:
|
||||
should_ignore_model=should_ignore,
|
||||
)
|
||||
|
||||
async def set_version_should_ignore(self, model_type, model_id, version_id, should_ignore):
|
||||
async def set_version_should_ignore(
|
||||
self, model_type, model_id, version_id, should_ignore
|
||||
):
|
||||
return await self.set_should_ignore(model_type, model_id, should_ignore)
|
||||
|
||||
async def get_record(self, *args, **kwargs):
|
||||
@@ -167,7 +175,9 @@ def download_manager_stub():
|
||||
|
||||
|
||||
def test_list_models_returns_formatted_items(mock_service, mock_scanner):
|
||||
mock_service.paginated_items = [{"file_path": "/tmp/demo.safetensors", "name": "Demo"}]
|
||||
mock_service.paginated_items = [
|
||||
{"file_path": "/tmp/demo.safetensors", "name": "Demo"}
|
||||
]
|
||||
|
||||
async def scenario():
|
||||
client = await create_test_client(mock_service)
|
||||
@@ -176,7 +186,13 @@ def test_list_models_returns_formatted_items(mock_service, mock_scanner):
|
||||
payload = await response.json()
|
||||
|
||||
assert response.status == 200
|
||||
assert payload["items"] == [{"file_path": "/tmp/demo.safetensors", "name": "Demo", "formatted": True}]
|
||||
assert payload["items"] == [
|
||||
{
|
||||
"file_path": "/tmp/demo.safetensors",
|
||||
"name": "Demo",
|
||||
"formatted": True,
|
||||
}
|
||||
]
|
||||
assert payload["total"] == 1
|
||||
assert mock_service.formatted == payload["items"]
|
||||
finally:
|
||||
@@ -220,7 +236,9 @@ def test_routes_return_service_not_ready_when_unattached():
|
||||
asyncio.run(scenario())
|
||||
|
||||
|
||||
def test_delete_model_updates_cache_and_hash_index(mock_service, mock_scanner, tmp_path: Path):
|
||||
def test_delete_model_updates_cache_and_hash_index(
|
||||
mock_service, mock_scanner, tmp_path: Path
|
||||
):
|
||||
model_path = tmp_path / "sample.safetensors"
|
||||
model_path.write_bytes(b"model")
|
||||
mock_scanner._cache.raw_data = [{"file_path": str(model_path)}]
|
||||
@@ -271,17 +289,23 @@ def test_replace_preview_writes_file_and_updates_cache(
|
||||
)
|
||||
|
||||
form = FormData()
|
||||
form.add_field("preview_file", b"binary-data", filename="preview.png", content_type="image/png")
|
||||
form.add_field(
|
||||
"preview_file", b"binary-data", filename="preview.png", content_type="image/png"
|
||||
)
|
||||
form.add_field("model_path", str(model_path))
|
||||
form.add_field("nsfw_level", "2")
|
||||
|
||||
async def scenario():
|
||||
client = await create_test_client(mock_service)
|
||||
try:
|
||||
response = await client.post("/api/lm/test-models/replace-preview", data=form)
|
||||
response = await client.post(
|
||||
"/api/lm/test-models/replace-preview", data=form
|
||||
)
|
||||
payload = await response.json()
|
||||
|
||||
expected_preview = str((tmp_path / "preview-model.webp")).replace(os.sep, "/")
|
||||
expected_preview = str((tmp_path / "preview-model.webp")).replace(
|
||||
os.sep, "/"
|
||||
)
|
||||
assert response.status == 200
|
||||
assert payload["success"] is True
|
||||
assert payload["preview_url"] == "/static/preview-model.webp"
|
||||
@@ -299,6 +323,66 @@ def test_replace_preview_writes_file_and_updates_cache(
|
||||
asyncio.run(scenario())
|
||||
|
||||
|
||||
def test_set_preview_from_url_downloads_and_updates_cache(
|
||||
mock_service,
|
||||
mock_scanner,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
tmp_path: Path,
|
||||
):
|
||||
"""Test that set_preview_from_url endpoint downloads remote images and sets them as preview."""
|
||||
model_path = tmp_path / "url-preview-model.safetensors"
|
||||
model_path.write_bytes(b"model")
|
||||
metadata_path = tmp_path / "url-preview-model.metadata.json"
|
||||
metadata_path.write_text(json.dumps({"file_path": str(model_path)}))
|
||||
|
||||
mock_scanner._cache.raw_data = [{"file_path": str(model_path)}]
|
||||
|
||||
monkeypatch.setattr(
|
||||
config,
|
||||
"get_preview_static_url",
|
||||
lambda preview_path: f"/static/{Path(preview_path).name}",
|
||||
)
|
||||
|
||||
async def scenario():
|
||||
client = await create_test_client(mock_service)
|
||||
try:
|
||||
# Mock the Downloader to return a test image
|
||||
from py.services import downloader
|
||||
|
||||
class FakeDownloader:
|
||||
async def download_to_memory(
|
||||
self, url, use_auth=False, return_headers=True
|
||||
):
|
||||
return True, b"fake-image-data", {"Content-Type": "image/jpeg"}
|
||||
|
||||
async def fake_get_downloader():
|
||||
return FakeDownloader()
|
||||
|
||||
monkeypatch.setattr(downloader, "get_downloader", fake_get_downloader)
|
||||
|
||||
response = await client.post(
|
||||
"/api/lm/test-models/set-preview-from-url",
|
||||
json={
|
||||
"model_path": str(model_path),
|
||||
"image_url": "https://example.com/image.jpg",
|
||||
"nsfw_level": 3,
|
||||
},
|
||||
)
|
||||
payload = await response.json()
|
||||
|
||||
expected_preview = str((tmp_path / "url-preview-model.webp")).replace(
|
||||
os.sep, "/"
|
||||
)
|
||||
assert response.status == 200
|
||||
assert payload["success"] is True
|
||||
assert payload["preview_url"] == "/static/url-preview-model.webp"
|
||||
assert Path(expected_preview).exists()
|
||||
finally:
|
||||
await client.close()
|
||||
|
||||
asyncio.run(scenario())
|
||||
|
||||
|
||||
def test_fetch_civitai_hydrates_metadata_before_sync(
|
||||
mock_service,
|
||||
mock_scanner,
|
||||
@@ -370,9 +454,15 @@ def test_fetch_civitai_hydrates_metadata_before_sync(
|
||||
save_calls: list[tuple[str, dict]] = []
|
||||
captured: dict[str, dict] = {}
|
||||
|
||||
monkeypatch.setattr(MetadataManager, "load_metadata", staticmethod(fake_load_metadata))
|
||||
monkeypatch.setattr(MetadataManager, "save_metadata", staticmethod(fake_save_metadata))
|
||||
monkeypatch.setattr(MetadataSyncService, "fetch_and_update_model", fake_fetch_and_update_model)
|
||||
monkeypatch.setattr(
|
||||
MetadataManager, "load_metadata", staticmethod(fake_load_metadata)
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
MetadataManager, "save_metadata", staticmethod(fake_save_metadata)
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
MetadataSyncService, "fetch_and_update_model", fake_fetch_and_update_model
|
||||
)
|
||||
|
||||
async def scenario():
|
||||
client = await create_test_client(mock_service)
|
||||
@@ -386,7 +476,10 @@ def test_fetch_civitai_hydrates_metadata_before_sync(
|
||||
assert response.status == 200
|
||||
assert payload["success"] is True
|
||||
assert captured["model_data"]["custom_field"] == "preserve"
|
||||
assert captured["model_data"]["civitai"]["images"][0]["url"] == "https://example.com/existing.png"
|
||||
assert (
|
||||
captured["model_data"]["civitai"]["images"][0]["url"]
|
||||
== "https://example.com/existing.png"
|
||||
)
|
||||
assert captured["model_data"]["civitai"]["trainedWords"] == ["keep"]
|
||||
assert captured["model_data"]["civitai"]["id"] == 99
|
||||
finally:
|
||||
@@ -398,7 +491,10 @@ def test_fetch_civitai_hydrates_metadata_before_sync(
|
||||
saved_path, saved_payload = save_calls[0]
|
||||
assert saved_path == str(metadata_path)
|
||||
assert saved_payload["custom_field"] == "preserve"
|
||||
assert saved_payload["civitai"]["images"][0]["url"] == "https://example.com/existing.png"
|
||||
assert (
|
||||
saved_payload["civitai"]["images"][0]["url"]
|
||||
== "https://example.com/existing.png"
|
||||
)
|
||||
assert saved_payload["civitai"]["trainedWords"] == ["keep"]
|
||||
assert saved_payload["civitai"]["id"] == 99
|
||||
assert saved_payload["legacy_field"] == "legacy"
|
||||
@@ -432,11 +528,22 @@ def test_download_model_invokes_download_manager(
|
||||
assert call_args["download_id"] == payload["download_id"]
|
||||
progress = ws_manager.get_download_progress(payload["download_id"])
|
||||
assert progress is not None
|
||||
expected_progress = round(download_manager_stub.last_progress_snapshot.percent_complete)
|
||||
expected_progress = round(
|
||||
download_manager_stub.last_progress_snapshot.percent_complete
|
||||
)
|
||||
assert progress["progress"] == expected_progress
|
||||
assert progress["bytes_downloaded"] == download_manager_stub.last_progress_snapshot.bytes_downloaded
|
||||
assert progress["total_bytes"] == download_manager_stub.last_progress_snapshot.total_bytes
|
||||
assert progress["bytes_per_second"] == download_manager_stub.last_progress_snapshot.bytes_per_second
|
||||
assert (
|
||||
progress["bytes_downloaded"]
|
||||
== download_manager_stub.last_progress_snapshot.bytes_downloaded
|
||||
)
|
||||
assert (
|
||||
progress["total_bytes"]
|
||||
== download_manager_stub.last_progress_snapshot.total_bytes
|
||||
)
|
||||
assert (
|
||||
progress["bytes_per_second"]
|
||||
== download_manager_stub.last_progress_snapshot.bytes_per_second
|
||||
)
|
||||
assert "timestamp" in progress
|
||||
|
||||
progress_response = await client.get(
|
||||
@@ -526,21 +633,30 @@ def test_auto_organize_progress_returns_latest_snapshot(mock_service):
|
||||
async def scenario():
|
||||
client = await create_test_client(mock_service)
|
||||
try:
|
||||
await ws_manager.broadcast_auto_organize_progress({"status": "processing", "percent": 50})
|
||||
await ws_manager.broadcast_auto_organize_progress(
|
||||
{"status": "processing", "percent": 50}
|
||||
)
|
||||
|
||||
response = await client.get("/api/lm/test-models/auto-organize-progress")
|
||||
payload = await response.json()
|
||||
|
||||
assert response.status == 200
|
||||
assert payload == {"success": True, "progress": {"status": "processing", "percent": 50}}
|
||||
assert payload == {
|
||||
"success": True,
|
||||
"progress": {"status": "processing", "percent": 50},
|
||||
}
|
||||
finally:
|
||||
await client.close()
|
||||
|
||||
|
||||
asyncio.run(scenario())
|
||||
|
||||
|
||||
def test_auto_organize_route_emits_progress(mock_service, monkeypatch: pytest.MonkeyPatch):
|
||||
async def fake_auto_organize(self, file_paths=None, progress_callback=None, exclusion_patterns=None):
|
||||
def test_auto_organize_route_emits_progress(
|
||||
mock_service, monkeypatch: pytest.MonkeyPatch
|
||||
):
|
||||
async def fake_auto_organize(
|
||||
self, file_paths=None, progress_callback=None, exclusion_patterns=None
|
||||
):
|
||||
result = AutoOrganizeResult()
|
||||
result.total = 1
|
||||
result.processed = 1
|
||||
@@ -549,8 +665,12 @@ def test_auto_organize_route_emits_progress(mock_service, monkeypatch: pytest.Mo
|
||||
result.failure_count = 0
|
||||
result.operation_type = "bulk"
|
||||
if progress_callback is not None:
|
||||
await progress_callback.on_progress({"type": "auto_organize_progress", "status": "started"})
|
||||
await progress_callback.on_progress({"type": "auto_organize_progress", "status": "completed"})
|
||||
await progress_callback.on_progress(
|
||||
{"type": "auto_organize_progress", "status": "started"}
|
||||
)
|
||||
await progress_callback.on_progress(
|
||||
{"type": "auto_organize_progress", "status": "completed"}
|
||||
)
|
||||
return result
|
||||
|
||||
monkeypatch.setattr(
|
||||
@@ -562,7 +682,9 @@ def test_auto_organize_route_emits_progress(mock_service, monkeypatch: pytest.Mo
|
||||
async def scenario():
|
||||
client = await create_test_client(mock_service)
|
||||
try:
|
||||
response = await client.post("/api/lm/test-models/auto-organize", json={"file_paths": []})
|
||||
response = await client.post(
|
||||
"/api/lm/test-models/auto-organize", json={"file_paths": []}
|
||||
)
|
||||
payload = await response.json()
|
||||
|
||||
assert response.status == 200
|
||||
|
||||
@@ -73,6 +73,46 @@ class TestCacheEntryValidator:
|
||||
assert result.repaired is False
|
||||
assert any('sha256' in error for error in result.errors)
|
||||
|
||||
def test_validate_empty_sha256_allowed_when_hash_status_pending(self):
|
||||
"""Test validation passes when sha256 is empty but hash_status is pending (lazy hash)"""
|
||||
entry = {
|
||||
'file_path': '/models/test.safetensors',
|
||||
'sha256': '', # Empty string
|
||||
'hash_status': 'pending', # Lazy hash calculation
|
||||
}
|
||||
|
||||
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||
|
||||
assert result.is_valid is True
|
||||
assert result.entry['sha256'] == ''
|
||||
assert result.entry['hash_status'] == 'pending'
|
||||
|
||||
def test_validate_empty_sha256_fails_when_hash_status_not_pending(self):
|
||||
"""Test validation fails when sha256 is empty and hash_status is not pending"""
|
||||
entry = {
|
||||
'file_path': '/models/test.safetensors',
|
||||
'sha256': '', # Empty string
|
||||
'hash_status': 'completed', # Not pending
|
||||
}
|
||||
|
||||
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||
|
||||
assert result.is_valid is False
|
||||
assert any('sha256' in error for error in result.errors)
|
||||
|
||||
def test_validate_empty_sha256_fails_when_hash_status_missing(self):
|
||||
"""Test validation fails when sha256 is empty and hash_status is missing"""
|
||||
entry = {
|
||||
'file_path': '/models/test.safetensors',
|
||||
'sha256': '', # Empty string
|
||||
# hash_status missing (defaults to 'completed')
|
||||
}
|
||||
|
||||
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||
|
||||
assert result.is_valid is False
|
||||
assert any('sha256' in error for error in result.errors)
|
||||
|
||||
def test_validate_empty_required_field_file_path(self):
|
||||
"""Test validation fails when file_path is empty string"""
|
||||
entry = {
|
||||
|
||||
404
tests/services/use_cases/test_bulk_metadata_refresh_use_case.py
Normal file
404
tests/services/use_cases/test_bulk_metadata_refresh_use_case.py
Normal file
@@ -0,0 +1,404 @@
|
||||
"""Tests for BulkMetadataRefreshUseCase."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from types import SimpleNamespace
|
||||
from typing import Any, Dict
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from py.services.use_cases.bulk_metadata_refresh_use_case import (
|
||||
BulkMetadataRefreshUseCase,
|
||||
MetadataRefreshProgressReporter,
|
||||
)
|
||||
from py.utils import metadata_manager
|
||||
|
||||
|
||||
class MockProgressReporter:
|
||||
"""Mock progress reporter for testing."""
|
||||
|
||||
def __init__(self):
|
||||
self.progress_calls = []
|
||||
|
||||
async def on_progress(self, payload: Dict[str, Any]) -> None:
|
||||
self.progress_calls.append(payload)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_service():
|
||||
"""Create a mock service with scanner."""
|
||||
scanner = MagicMock()
|
||||
scanner.get_cached_data = AsyncMock()
|
||||
scanner.reset_cancellation = MagicMock()
|
||||
scanner.is_cancelled = MagicMock(return_value=False)
|
||||
scanner.update_single_model_cache = AsyncMock(return_value=True)
|
||||
scanner.calculate_hash_for_model = AsyncMock(return_value="calculated_hash_123")
|
||||
|
||||
service = MagicMock()
|
||||
service.scanner = scanner
|
||||
service.model_type = "checkpoint"
|
||||
|
||||
return service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_metadata_sync():
|
||||
"""Create a mock metadata sync service."""
|
||||
sync = MagicMock()
|
||||
sync.fetch_and_update_model = AsyncMock(return_value=(True, None))
|
||||
return sync
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_settings():
|
||||
"""Create mock settings service."""
|
||||
settings = MagicMock()
|
||||
settings.get = MagicMock(return_value=False)
|
||||
return settings
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def use_case(mock_service, mock_metadata_sync, mock_settings):
|
||||
"""Create a BulkMetadataRefreshUseCase instance."""
|
||||
return BulkMetadataRefreshUseCase(
|
||||
service=mock_service,
|
||||
metadata_sync=mock_metadata_sync,
|
||||
settings_service=mock_settings,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch.object(metadata_manager.MetadataManager, "hydrate_model_data")
|
||||
async def test_pending_hash_calculated_on_demand(mock_hydrate, use_case, mock_service, mock_metadata_sync):
|
||||
"""Test that models with pending hash status get their hash calculated on demand."""
|
||||
mock_hydrate.return_value = None
|
||||
|
||||
# Setup cache with a model that has pending hash
|
||||
pending_model = {
|
||||
"file_path": "/extra_ckpt/model.safetensors",
|
||||
"sha256": "", # Empty hash
|
||||
"hash_status": "pending",
|
||||
"model_name": "Test Model",
|
||||
"folder": "extra_ckpt",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
cache = SimpleNamespace(raw_data=[pending_model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
# Execute
|
||||
result = await use_case.execute()
|
||||
|
||||
# Verify hash was calculated
|
||||
mock_service.scanner.calculate_hash_for_model.assert_called_once_with(
|
||||
"/extra_ckpt/model.safetensors"
|
||||
)
|
||||
|
||||
# Verify model hash was updated
|
||||
assert pending_model["sha256"] == "calculated_hash_123"
|
||||
assert pending_model["hash_status"] == "completed"
|
||||
|
||||
# Verify metadata sync was called with the calculated hash
|
||||
mock_metadata_sync.fetch_and_update_model.assert_called_once()
|
||||
call_args = mock_metadata_sync.fetch_and_update_model.call_args[1]
|
||||
assert call_args["sha256"] == "calculated_hash_123"
|
||||
|
||||
assert result["success"] is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch.object(metadata_manager.MetadataManager, "hydrate_model_data")
|
||||
async def test_skip_model_when_hash_calculation_fails(mock_hydrate, use_case, mock_service, mock_metadata_sync):
|
||||
"""Test that models are skipped when hash calculation fails."""
|
||||
mock_hydrate.return_value = None
|
||||
|
||||
# Setup model with pending hash
|
||||
pending_model = {
|
||||
"file_path": "/extra_ckpt/model.safetensors",
|
||||
"sha256": "",
|
||||
"hash_status": "pending",
|
||||
"model_name": "Test Model",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
# Make hash calculation fail
|
||||
mock_service.scanner.calculate_hash_for_model.return_value = None
|
||||
|
||||
cache = SimpleNamespace(raw_data=[pending_model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
# Execute
|
||||
result = await use_case.execute()
|
||||
|
||||
# Verify hash was attempted
|
||||
mock_service.scanner.calculate_hash_for_model.assert_called_once()
|
||||
|
||||
# Verify metadata sync was NOT called (model skipped)
|
||||
mock_metadata_sync.fetch_and_update_model.assert_not_called()
|
||||
|
||||
# Verify result shows processed but no success
|
||||
assert result["success"] is True
|
||||
assert result["processed"] == 1
|
||||
assert result["updated"] == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_skip_model_when_scanner_does_not_support_lazy_hash(
|
||||
use_case, mock_service, mock_metadata_sync
|
||||
):
|
||||
"""Test that models are skipped when scanner doesn't support lazy hash calculation."""
|
||||
# Setup model with pending hash
|
||||
pending_model = {
|
||||
"file_path": "/models/model.safetensors",
|
||||
"sha256": "",
|
||||
"hash_status": "pending",
|
||||
"model_name": "Test Model",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
# Remove calculate_hash_for_model method (simulating LoRA scanner)
|
||||
del mock_service.scanner.calculate_hash_for_model
|
||||
|
||||
cache = SimpleNamespace(raw_data=[pending_model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
# Execute
|
||||
result = await use_case.execute()
|
||||
|
||||
# Verify metadata sync was NOT called
|
||||
mock_metadata_sync.fetch_and_update_model.assert_not_called()
|
||||
|
||||
assert result["success"] is True
|
||||
assert result["processed"] == 1
|
||||
assert result["updated"] == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch.object(metadata_manager.MetadataManager, "hydrate_model_data")
|
||||
async def test_normal_model_with_existing_hash_not_affected(mock_hydrate, use_case, mock_service, mock_metadata_sync):
|
||||
"""Test that models with existing hash work normally."""
|
||||
mock_hydrate.return_value = None
|
||||
|
||||
# Setup model with existing hash
|
||||
existing_model = {
|
||||
"file_path": "/models/model.safetensors",
|
||||
"sha256": "existing_hash_abc",
|
||||
"hash_status": "completed",
|
||||
"model_name": "Test Model",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
cache = SimpleNamespace(raw_data=[existing_model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
# Execute
|
||||
result = await use_case.execute()
|
||||
|
||||
# Verify hash calculation was NOT called
|
||||
assert not mock_service.scanner.calculate_hash_for_model.called
|
||||
|
||||
# Verify metadata sync was called with existing hash
|
||||
mock_metadata_sync.fetch_and_update_model.assert_called_once()
|
||||
call_args = mock_metadata_sync.fetch_and_update_model.call_args[1]
|
||||
assert call_args["sha256"] == "existing_hash_abc"
|
||||
|
||||
assert result["success"] is True
|
||||
assert result["updated"] == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch.object(metadata_manager.MetadataManager, "hydrate_model_data")
|
||||
async def test_mixed_models_some_pending_some_existing(mock_hydrate, use_case, mock_service, mock_metadata_sync):
|
||||
"""Test handling of mixed models: some with pending hash, some with existing hash."""
|
||||
mock_hydrate.return_value = None
|
||||
|
||||
pending_model = {
|
||||
"file_path": "/extra_ckpt/pending_model.safetensors",
|
||||
"sha256": "",
|
||||
"hash_status": "pending",
|
||||
"model_name": "Pending Model",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
existing_model = {
|
||||
"file_path": "/models/existing_model.safetensors",
|
||||
"sha256": "existing_hash_xyz",
|
||||
"hash_status": "completed",
|
||||
"model_name": "Existing Model",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
cache = SimpleNamespace(raw_data=[pending_model, existing_model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
# Execute
|
||||
result = await use_case.execute()
|
||||
|
||||
# Verify hash was calculated only for pending model
|
||||
mock_service.scanner.calculate_hash_for_model.assert_called_once_with(
|
||||
"/extra_ckpt/pending_model.safetensors"
|
||||
)
|
||||
|
||||
# Verify metadata sync was called for both
|
||||
assert mock_metadata_sync.fetch_and_update_model.call_count == 2
|
||||
|
||||
assert result["success"] is True
|
||||
assert result["processed"] == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch.object(metadata_manager.MetadataManager, "hydrate_model_data")
|
||||
async def test_progress_callback_receives_updates(mock_hydrate, use_case, mock_service):
|
||||
"""Test that progress callback receives correct updates."""
|
||||
mock_hydrate.return_value = None
|
||||
|
||||
model = {
|
||||
"file_path": "/models/model.safetensors",
|
||||
"sha256": "hash123",
|
||||
"model_name": "Test Model",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
cache = SimpleNamespace(raw_data=[model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
reporter = MockProgressReporter()
|
||||
|
||||
# Execute
|
||||
await use_case.execute(progress_callback=reporter)
|
||||
|
||||
# Verify progress was reported
|
||||
assert len(reporter.progress_calls) >= 2
|
||||
|
||||
# Check started status
|
||||
started_calls = [c for c in reporter.progress_calls if c["status"] == "started"]
|
||||
assert len(started_calls) == 1
|
||||
|
||||
# Check completed status
|
||||
completed_calls = [c for c in reporter.progress_calls if c["status"] == "completed"]
|
||||
assert len(completed_calls) == 1
|
||||
assert completed_calls[0]["processed"] == 1
|
||||
assert completed_calls[0]["success"] == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch.object(metadata_manager.MetadataManager, "hydrate_model_data")
|
||||
async def test_respects_skip_metadata_refresh_flag(mock_hydrate, use_case, mock_service, mock_metadata_sync):
|
||||
"""Test that models with skip_metadata_refresh=True are skipped."""
|
||||
mock_hydrate.return_value = None
|
||||
|
||||
skip_model = {
|
||||
"file_path": "/models/skip_model.safetensors",
|
||||
"sha256": "hash123",
|
||||
"model_name": "Skip Model",
|
||||
"skip_metadata_refresh": True,
|
||||
"civitai": {},
|
||||
}
|
||||
|
||||
normal_model = {
|
||||
"file_path": "/models/normal_model.safetensors",
|
||||
"sha256": "hash456",
|
||||
"model_name": "Normal Model",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
cache = SimpleNamespace(raw_data=[skip_model, normal_model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
# Execute
|
||||
result = await use_case.execute()
|
||||
|
||||
# Verify only normal model was processed
|
||||
assert mock_metadata_sync.fetch_and_update_model.call_count == 1
|
||||
call_args = mock_metadata_sync.fetch_and_update_model.call_args[1]
|
||||
assert call_args["file_path"] == "/models/normal_model.safetensors"
|
||||
|
||||
assert result["processed"] == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch.object(metadata_manager.MetadataManager, "hydrate_model_data")
|
||||
async def test_respects_skip_paths(mock_hydrate, use_case, mock_service, mock_metadata_sync):
|
||||
"""Test that models in skip paths are excluded."""
|
||||
mock_hydrate.return_value = None
|
||||
|
||||
# Setup settings to skip certain paths
|
||||
use_case._settings.get = MagicMock(side_effect=lambda key, default=None: {
|
||||
"enable_metadata_archive_db": False,
|
||||
"metadata_refresh_skip_paths": ["skip_folder"],
|
||||
}.get(key, default))
|
||||
|
||||
skip_path_model = {
|
||||
"file_path": "/models/skip_folder/model.safetensors",
|
||||
"sha256": "hash123",
|
||||
"model_name": "Skip Path Model",
|
||||
"folder": "skip_folder",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
normal_model = {
|
||||
"file_path": "/models/normal/model.safetensors",
|
||||
"sha256": "hash456",
|
||||
"model_name": "Normal Model",
|
||||
"folder": "normal",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
cache = SimpleNamespace(raw_data=[skip_path_model, normal_model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
# Execute
|
||||
result = await use_case.execute()
|
||||
|
||||
# Verify only normal model was processed
|
||||
assert mock_metadata_sync.fetch_and_update_model.call_count == 1
|
||||
call_args = mock_metadata_sync.fetch_and_update_model.call_args[1]
|
||||
assert "normal" in call_args["file_path"]
|
||||
|
||||
assert result["processed"] == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_model_without_hash_skipped(use_case, mock_service, mock_metadata_sync):
|
||||
"""Test that models without hash (and not pending) are skipped."""
|
||||
no_hash_model = {
|
||||
"file_path": "/models/no_hash_model.safetensors",
|
||||
"sha256": "", # Empty but NOT pending
|
||||
"hash_status": "completed", # Not pending
|
||||
"model_name": "No Hash Model",
|
||||
"civitai": {},
|
||||
"from_civitai": False,
|
||||
"civitai_deleted": False,
|
||||
}
|
||||
|
||||
cache = SimpleNamespace(raw_data=[no_hash_model], resort=AsyncMock())
|
||||
mock_service.scanner.get_cached_data.return_value = cache
|
||||
|
||||
# Execute
|
||||
result = await use_case.execute()
|
||||
|
||||
# Verify metadata sync was NOT called
|
||||
mock_metadata_sync.fetch_and_update_model.assert_not_called()
|
||||
|
||||
assert result["processed"] == 1
|
||||
assert result["updated"] == 0
|
||||
Reference in New Issue
Block a user