diff --git a/Makefile b/Makefile index 8b24e13..d2c8ec1 100644 --- a/Makefile +++ b/Makefile @@ -1,18 +1,16 @@ - +build-wasm: + wasm-pack build --target no-modules --out-dir pkg crates/lora-inspector-wasm --release --weak-refs test: cargo test --workspace -build-wasm: - wasm-pack build --target no-modules --out-dir pkg crates/lora-inspector-wasm --release --weak-refs - build-dev-wasm: wasm-pack build --target no-modules --out-dir pkg crates/lora-inspector-wasm --release --weak-refs dev-wasm: cd crates/lora-inspector-wasm && yarn vite -dev-wasm-2: +dev-wasm-cors: cd crates/lora-inspector-wasm/ && python simple-cors-server.py diff --git a/crates/inspector/src/weight.rs b/crates/inspector/src/weight.rs index 426024a..52df9d4 100644 --- a/crates/inspector/src/weight.rs +++ b/crates/inspector/src/weight.rs @@ -464,9 +464,9 @@ impl Weight for BufferedLoRAWeight { // .collect() // } - fn dora_scale(&self, base_name: &str) -> Result { - self.get(&format!("{base_name}.dora_scale")) - } + // fn dora_scale(&self, base_name: &str) -> Result { + // self.get(&format!("{base_name}.dora_scale")) + // } fn dims(&self) -> HashSet { self.buffered @@ -480,14 +480,10 @@ impl Weight for BufferedLoRAWeight { } else if k.contains("lokr_w1") { self.get(k).map(|v| v.dims()[0]).ok() } else if k.contains("b1.weight") { - // dbg!(self.get(k).map(|v| v.dims().to_vec()).unwrap()); - // dbg!(self.get(k).map(|v| v.dims()[0]).ok()); - // self.get(k).map(|v| v.dims().last().copied()).ok().flatten() self.get(k).map(|v| v.dims()[0]).ok() } else if k.contains("oft_diag") { self.get(k).map(|v| v.dims().last().copied()).ok().flatten() } else if k.contains("oft_blocks") { - // dbg!(self.get(k).map(|v| v.dims().to_vec()).unwrap()); self.get(k).map(|v| v.dims().last().copied()).ok().flatten() } else { None @@ -538,16 +534,37 @@ pub trait WeightKey { pub trait Weight { fn get(&self, key: &str) -> Result; + /// Most common precision datatype fn precision(&self) -> Option; + + /// Scale LoRA weights by the alpha and combine the A/B weights + /// + /// # Errors + /// + /// This function will return an error if tensor operations fail. fn scale_lora_weight(&self, base_name: &str) -> Result; fn scale_glora_weights(&self, base_name: &str) -> Result; + /// Scale the weights by the alpha and combine with the LoHa/Hada/FedPara weights + /// + /// # Errors + /// + /// This function will return an error if the tensor operations fail. fn scale_hada_weight(&self, base_name: &str) -> Result; + + /// Scale the weights by the alpha and combine with the LoKr weights + /// + /// # Errors + /// + /// This function will return an error if the tensor operations fail. fn scale_lokr_weight(&self, base_name: &str) -> Result; + /// Unique alphas in the tensors fn alphas(&self) -> HashSet; - // fn dora_scales(&self) -> Vec>; - fn dora_scale(&self, key: &str) -> Result; + + /// Unique dimensions in the tensors fn dims(&self) -> HashSet; + + /// All shapes dimensions by HashMap of tensor modules fn shapes(&self) -> HashMap>; } @@ -604,14 +621,6 @@ impl WeightKey for LoRAWeight { self.keys_by_key("alpha") } - // fn up_keys(&self) -> Vec { - // self.keys_by_key("lora_up") - // } - // - // fn down_keys(&self) -> Vec { - // self.keys_by_key("lora_down") - // } - fn base_names(&self) -> Vec { self.weight_keys() .iter() @@ -899,9 +908,9 @@ impl Weight for LoRAWeight { }) } - fn dora_scale(&self, base_name: &str) -> Result { - self.get(&format!("{base_name}.dora_scale")) - } + // fn dora_scale(&self, base_name: &str) -> Result { + // self.get(&format!("{base_name}.dora_scale")) + // } fn dims(&self) -> HashSet { self.tensors diff --git a/crates/lora-inspector-wasm/assets/js/main.js b/crates/lora-inspector-wasm/assets/js/main.js index ac589e3..fc4897c 100644 --- a/crates/lora-inspector-wasm/assets/js/main.js +++ b/crates/lora-inspector-wasm/assets/js/main.js @@ -1,4 +1,3 @@ -"use strict"; import React from "react"; import ReactDOM from "react-dom/client"; @@ -9,7 +8,7 @@ function Header({ metadata }) { } function ModelSpec({ metadata }) { - let training = [ + const training = [ h("div", { className: "row space-apart", key: "training_timings" }, [ metadata.has("ss_training_started_at") && h(MetaAttribute, { @@ -31,12 +30,11 @@ function ModelSpec({ metadata }) { h(MetaAttribute, { key: "elapsed_at", name: "Elapsed", - value: - ( - (metadata.get("ss_training_finished_at") - - metadata.get("ss_training_started_at")) / - 60 - ).toPrecision(4) + " minutes", + value: `${( + (metadata.get("ss_training_finished_at") - + metadata.get("ss_training_started_at")) / + 60 + ).toPrecision(4)} minutes`, }), ]), @@ -376,13 +374,11 @@ function BOFTNetwork({ metadata }) { setDims(resp.dims); }); }, []); - return [ - h(MetaAttribute, { - name: "Network factor", - valueClassName: "rank", - value: dims.join(", "), - }), - ]; + return h(MetaAttribute, { + name: "Network factor", + valueClassName: "rank", + value: dims.join(", "), + }); } function LoKrNetwork({ metadata }) { @@ -406,11 +402,9 @@ function LoKrNetwork({ metadata }) { function LoRANetwork({ metadata }) { const [alphas, setAlphas] = React.useState([ - (metadata && metadata.get("ss_network_alpha")) ?? undefined, - ]); - const [dims, setDims] = React.useState([ - (metadata && metadata.get("ss_network_dim")) ?? undefined, + metadata?.get("ss_network_alpha"), ]); + const [dims, setDims] = React.useState([metadata?.get("ss_network_dim")]); React.useEffect(() => { trySyncMessage( { messageType: "alphas", name: mainFilename }, @@ -443,11 +437,13 @@ function LoRANetwork({ metadata }) { .map((alpha) => { if (typeof alpha === "number") { return alpha.toPrecision(2); - } else if (alpha.includes(".")) { - return parseFloat(alpha).toPrecision(2); - } else { - return parseInt(alpha); } + + if (alpha.includes(".")) { + return Number.parseFloat(alpha).toPrecision(2); + } + + return Number.parseInt(alpha); }) .join(", "), key: "network-alpha", @@ -593,7 +589,7 @@ function Weight({ metadata, filename }) { ]; } -function Precision({}) { +function Precision() { const [precision, setPrecision] = React.useState(""); React.useEffect(() => { @@ -668,7 +664,7 @@ function Blocks({ metadata, filename }) { }); return function cleanup() {}; - }, [hasBlockWeights]); + }, [filename, hasBlockWeights]); React.useEffect(() => { trySyncMessage( @@ -696,13 +692,13 @@ function Blocks({ metadata, filename }) { }, filename, ).then((resp) => { - if (resp.precision == "bf16") { + if (resp.precision === "bf16") { setCanHaveBlockWeights(false); } }); } }); - }, []); + }, [filename]); React.useEffect(() => { if (!teChartRef.current && !unetChartRef.current) { @@ -715,7 +711,7 @@ function Blocks({ metadata, filename }) { labels: dataset.map(([k, _]) => k), // Our series array that contains series objects or in this case series data arrays series: [ - dataset.map(([_k, v]) => v["mean"]), + dataset.map(([_k, v]) => v.mean), // dataset.map(([k, v]) => strBlocks.get(k)), ], }; @@ -746,9 +742,7 @@ function Blocks({ metadata, filename }) { y: -10, }, textAnchor: "middle", - labelInterpolationFnc: function (value) { - return value.toPrecision(4); - }, + labelInterpolationFnc: (value) => value.toPrecision(4), }), ], }); @@ -756,10 +750,10 @@ function Blocks({ metadata, filename }) { let seq = 0; // Once the chart is fully created we reset the sequence - chart.on("created", function () { + chart.on("created", () => { seq = 0; }); - chart.on("draw", function (data) { + chart.on("draw", (data) => { if (data.type === "point") { // If the drawn element is a line we do a simple opacity fade in. This could also be achieved using CSS3 animations. data.element.animate({ @@ -789,14 +783,14 @@ function Blocks({ metadata, filename }) { if (teMagBlocks.size > 0) { makeChart( // We are removing elements that are 0 because they cause the chart to find them as undefined - Array.from(teMagBlocks).filter(([_k, v]) => v["mean"] !== 0), + Array.from(teMagBlocks).filter(([_k, v]) => v.mean !== 0), teChartRef, ); } if (unetMagBlocks.size > 0) { makeChart( // We are removing elements that are 0 because they cause the chart to find them as undefined - Array.from(unetMagBlocks).filter(([_k, v]) => v["mean"] !== 0), + Array.from(unetMagBlocks).filter(([_k, v]) => v.mean !== 0), unetChartRef, ); } @@ -808,7 +802,8 @@ function Blocks({ metadata, filename }) { { className: "block-weights-container" }, "Block weights not supported for this network type or precision.", ); - } else if (!hasBlockWeights) { + } + if (!hasBlockWeights) { return h( "div", { className: "block-weights-container" }, @@ -818,7 +813,7 @@ function Blocks({ metadata, filename }) { className: "primary", onClick: (e) => { e.preventDefault(); - setHasBlockWeights((state) => (state ? false : true)); + setHasBlockWeights((state) => state); }, }, "Get block weights", @@ -859,8 +854,8 @@ function Blocks({ metadata, filename }) { // }), h(MetaAttribute, { className: "te-block", - name: `${k} avg l2 norm ${v["metadata"]["type"]}`, - value: v["mean"].toPrecision(6), + name: `${k} avg l2 norm ${v.metadata.type}`, + value: v.mean.toPrecision(6), valueClassName: "number", }), ); @@ -888,8 +883,8 @@ function Blocks({ metadata, filename }) { // }), h(MetaAttribute, { className: "unet-block", - name: `${k} avg l2 norm ${v["metadata"]["type"]}`, - value: v["mean"].toPrecision(6), + name: `${k} avg l2 norm ${v.metadata.type}`, + value: v.mean.toPrecision(6), valueClassName: "number", }), ); @@ -969,7 +964,7 @@ function Batch({ metadata }) { for (const dataset of datasets) { if ("batch_size_per_device" in dataset) { - batchSize = dataset["batch_size_per_device"]; + batchSize = dataset.batch_size_per_device; } } } @@ -1009,10 +1004,10 @@ function Noise({ metadata }) { name: "IP noise gamma", valueClassName: "number", value: metadata.get("ss_ip_noise_gamma"), - ...(metadata.get("ss_ip_noise_gamma_random_strength") != undefined && { + ...(metadata.get("ss_ip_noise_gamma_random_strength") !== undefined && { secondaryName: "Random strength:", secondary: - metadata.get("ss_ip_noise_gamma_random_strength") == "True" + metadata.get("ss_ip_noise_gamma_random_strength") === "True" ? "True" : "False", // secondaryClassName: "number", @@ -1022,10 +1017,10 @@ function Noise({ metadata }) { name: "Noise offset", valueClassName: "number", value: metadata.get("ss_noise_offset"), - ...(metadata.get("ss_ip_noise_gamma_random_strength") != undefined && { + ...(metadata.get("ss_ip_noise_gamma_random_strength") !== undefined && { secondaryName: "Random strength:", secondary: - metadata.get("ss_noise_offset_random_strength") == "True" + metadata.get("ss_noise_offset_random_strength") === "True" ? "True" : "False", // secondaryClassName: "number", @@ -1097,7 +1092,7 @@ function Loss({ metadata }) { valueClassName: "boolean", value: metadata.get("ss_zero_terminal_snr"), }), - metadata.has("ss_masked_loss") != undefined && + metadata.has("ss_masked_loss") !== undefined && h(MetaAttribute, { name: "Masked Loss", value: metadata.get("ss_masked_loss"), @@ -1159,22 +1154,22 @@ function Buckets({ dataset, metadata }) { { key: "buckets", className: "row space-apart" }, h(MetaAttribute, { name: "Buckets", - value: dataset["enable_bucket"] ? "True" : "False", + value: dataset.enable_bucket ? "True" : "False", }), h(MetaAttribute, { name: "Min bucket resolution", valueClassName: "number", - value: dataset["min_bucket_reso"], + value: dataset.min_bucket_reso, }), h(MetaAttribute, { name: "Max bucket resolution", valueClassName: "number", - value: dataset["max_bucket_reso"], + value: dataset.max_bucket_reso, }), h(MetaAttribute, { name: "Resolution", valueClassName: "number", - value: `${dataset["resolution"][0]}x${dataset["resolution"][0]}`, + value: `${dataset.resolution[0]}x${dataset.resolution[0]}`, }), ), @@ -1187,9 +1182,9 @@ function Buckets({ dataset, metadata }) { h( "div", { key: "subsets", className: "subsets" }, - dataset["subsets"].map((subset, i) => + dataset.subsets.map((subset, i) => h(Subset, { - key: `subset-${subset["image_dir"]}-${i}`, + key: `subset-${subset.image_dir}-${i}`, metadata, subset, }), @@ -1199,7 +1194,7 @@ function Buckets({ dataset, metadata }) { h( "div", { key: "tag-frequencies", className: "tag-frequencies row space-apart" }, - Object.entries(dataset["tag_frequency"]).map(([dir, frequency]) => + Object.entries(dataset.tag_frequency).map(([dir, frequency]) => h( "div", { key: dir }, @@ -1217,25 +1212,25 @@ function Buckets({ dataset, metadata }) { function BucketInfo({ metadata, dataset }) { // No bucket info - if (!dataset["bucket_info"]) { + if (!dataset.bucket_info) { return; } // No buckets data - if (!dataset["bucket_info"]["buckets"]) { + if (!dataset.bucket_info.buckets) { return; } return h("div", { className: "bucket-infos" }, [ - Object.entries(dataset["bucket_info"]["buckets"]).map(([key, bucket]) => { + Object.entries(dataset.bucket_info.buckets).map(([key, bucket]) => { return h( "div", { key, className: "bucket" }, h(MetaAttribute, { name: `Bucket ${key}`, - value: `${bucket["resolution"][0]}x${bucket["resolution"][1]}: ${ - bucket["count"] - } image${bucket["count"] > 1 ? "s" : ""}`, + value: `${bucket.resolution[0]}x${bucket.resolution[1]}: ${ + bucket.count + } image${bucket.count > 1 ? "s" : ""}`, }), ); }), @@ -1243,14 +1238,14 @@ function BucketInfo({ metadata, dataset }) { } function Subset({ subset, metadata }) { - const tf = (v, defaults = undefined, opts) => { + const tf = (v, defaults = undefined, opts = {}) => { let className = ""; if (v === true) { if (v !== defaults) { className = "changed"; } return { - valueClassName: opts?.valueClassName ?? "" + " option " + className, + valueClassName: opts?.valueClassName ?? ` option ${className}`, value: "true", }; } @@ -1258,7 +1253,7 @@ function Subset({ subset, metadata }) { className = "changed"; } return { - valueClassName: opts?.valueClassName ?? "" + " option " + className, + valueClassName: opts?.valueClassName ?? ` option ${className}`, value: "false", }; }; @@ -1268,71 +1263,71 @@ function Subset({ subset, metadata }) { { className: "subset row space-apart" }, h(MetaAttribute, { name: "Image count", - value: subset["img_count"], + value: subset.img_count, valueClassName: "number", }), h(MetaAttribute, { name: "Image dir", - value: subset["image_dir"], + value: subset.image_dir, valueClassName: "", }), h(MetaAttribute, { name: "Flip augmentation", - ...tf(subset["flip_aug"], false), + ...tf(subset.flip_aug, false), }), h(MetaAttribute, { name: "Color augmentation", - ...tf(subset["color_aug"], false), + ...tf(subset.color_aug, false), }), h(MetaAttribute, { name: "Num repeats", - value: subset["num_repeats"], + value: subset.num_repeats, valueClassName: "number", }), h(MetaAttribute, { name: "Is regularization", - ...tf(subset["is_reg"], false), + ...tf(subset.is_reg, false), }), - h(MetaAttribute, { name: "Class token", value: subset["class_tokens"] }), + h(MetaAttribute, { name: "Class token", value: subset.class_tokens }), h(MetaAttribute, { name: "Keep tokens", - value: subset["keep_tokens"], + value: subset.keep_tokens, valueClassName: "number", }), "keep_tokens_separator" in subset && h(MetaAttribute, { name: "Keep tokens separator", - value: subset["keep_tokens_separator"], + value: subset.keep_tokens_separator, }), "caption_separator" in subset && h(MetaAttribute, { name: "Caption separator", - value: subset["caption_separator"], + value: subset.caption_separator, }), "secondary_separator" in subset && h(MetaAttribute, { name: "Secondary separator", - value: subset["secondary_separator"], + value: subset.secondary_separator, }), "enable_wildcard" in subset && h(MetaAttribute, { name: "Enable wildcard", - ...tf(subset["enable_wildcard"], false), + ...tf(subset.enable_wildcard, false), }), "shuffle_caption" in subset && h(MetaAttribute, { name: "Shuffle caption", - ...tf(subset["shuffle_caption"], false), + ...tf(subset.shuffle_caption, false), }), "caption_prefix" in subset && h(MetaAttribute, { name: "Caption prefix", - value: subset["caption_prefix"], + value: subset.caption_prefix, }), "caption_suffix" in subset && h(MetaAttribute, { name: "Caption suffix", - value: subset["caption_suffix"], + value: subset.caption_suffix, }), ); } @@ -1341,14 +1336,14 @@ function TagFrequency({ tagFrequency, metadata }) { const [showMore, setShowMore] = React.useState(false); const allTags = Object.entries(tagFrequency).sort((a, b) => a[1] < b[1]); - const sortedTags = showMore == false ? allTags.slice(0, 50) : allTags; + const sortedTags = showMore === false ? allTags.slice(0, 50) : allTags; return [ sortedTags.map(([tag, count], i) => { const alt = i % 2 > 0 ? " alt-row" : ""; return h( "div", - { className: "tag-frequency" + alt, key: tag }, + { className: `tag-frequency${alt}`, key: tag }, h("div", {}, count), h("div", {}, tag), ); @@ -1430,7 +1425,7 @@ function Advanced({ metadata, filename }) { setAllKeys(resp.keys); }, ); - }, []); + }, [filename]); React.useEffect(() => { trySyncMessage( @@ -1457,18 +1452,18 @@ function Advanced({ metadata, filename }) { }, filename, ).then((resp) => { - if (resp.precision == "bf16") { + if (resp.precision === "bf16") { setCanHaveStatistics(false); } }); } }); - }, []); + }, [filename]); if (DEBUG) { React.useEffect(() => { advancedRef.current.scrollIntoView({ behavior: "smooth" }); - }, []); + }, [advancedRef.current.scrollIntoView]); } return [ @@ -1597,7 +1592,7 @@ function Statistics({ baseNames, filename }) { }); // }); // }); - }, [calcStatistics, baseNames]); + }, [filename, calcStatistics, baseNames]); React.useEffect(() => { if (!calcStatistics) { @@ -1627,7 +1622,7 @@ function Statistics({ baseNames, filename }) { ); return function cleanup() {}; - }, [calcStatistics]); + }, [filename, baseNames.length, calcStatistics]); if (!hasStatistics && !calcStatistics) { return h( @@ -1723,7 +1718,7 @@ function Statistics({ baseNames, filename }) { "bases", bases.map((v) => ({ ...v, - stat: Object.fromEntries(v["stat"]), + stat: Object.fromEntries(v.stat), })), ); }, @@ -1793,7 +1788,6 @@ function compileTextEncoderLayers(bases) { const subType = match.groups.sub_type; const layerKey = layerType === "self_attn" ? "attn" : "mlp"; - let value; let subKey; switch (subType) { @@ -1855,8 +1849,8 @@ function parseSDKey(key) { let isConv = false; let isAttention = false; let isSampler = false; - let isProjection = false; - let isFeedForward = false; + const isProjection = false; + const isFeedForward = false; let type; let blockType; @@ -1870,8 +1864,8 @@ function parseSDKey(key) { if (matches) { const groups = matches.groups; type = "encoder"; - blockId = parseInt(groups["block_id"]); - blockType = groups["block_type"]; + blockId = Number.parseInt(groups.block_id); + blockType = groups.block_type; name = `TE${padTwo(blockId)}`; @@ -1885,52 +1879,52 @@ function parseSDKey(key) { if (matches) { const groups = matches.groups; - type = groups["type"]; - blockType = groups["block_type"]; - blockId = parseInt(groups["block_id"]); - subBlockId = parseInt(groups["subblock_id"]); + type = groups.type; + blockType = groups.block_type; + blockId = Number.parseInt(groups.block_id); + subBlockId = Number.parseInt(groups.subblock_id); // console.log(groups["block_id"]); - if (groups["type"] === "attentions") { + if (groups.type === "attentions") { idx = 3 * blockId + subBlockId; isAttention = true; - } else if (groups["type"] === "resnets") { + } else if (groups.type === "resnets") { idx = 3 * blockId + subBlockId; isConv = true; } else if ( - groups["type"] === "upsamplers" || - groups["type"] === "downsamplers" + groups.type === "upsamplers" || + groups.type === "downsamplers" ) { idx = 3 * blockId + 2; isSampler = true; } - if (groups["block_type"] === "down") { + if (groups.block_type === "down") { blockIdx = 1 + idx; name = `IN${padTwo(idx)}`; - } else if (groups["block_type"] === "up") { + } else if (groups.block_type === "up") { blockIdx = NUM_OF_BLOCKS + 1 + idx; name = `OUT${padTwo(idx)}`; - } else if (groups["block_type"] === "mid") { + } else if (groups.block_type === "mid") { blockIdx = NUM_OF_BLOCKS; } // Handle the mid block } else if (key.includes("mid_block_")) { const midMatch = key.match(MID_SDRE); - name = `MID`; + name = "MID"; if (midMatch) { const groups = midMatch.groups; - type = groups["type"]; - blockType = groups["block_type"]; - blockId = parseInt(groups["block_id"]); - subBlockId = parseInt(groups["subblock_id"]); + type = groups.type; + blockType = groups.block_type; + blockId = Number.parseInt(groups.block_id); + subBlockId = Number.parseInt(groups.subblock_id); name = `MID${padTwo(blockId)}`; - if (groups.type == "attentions") { + if (groups.type === "attentions") { isAttention = true; } else if (groups.type === "resnets") { isConv = true; @@ -1979,8 +1973,8 @@ function compileUnetLayers(bases) { // we have a list of names and we want to extract the different components and put back together to use // with Attention - const re = - /lora_unet_(down_blocks|mid_block|up_blocks)_(?\d+)_(?mlp|self_attn)_(?k_proj|q_proj|v_proj|out_proj|fc1|fc2)/; + // const re = + // /lora_unet_(down_blocks|mid_block|up_blocks)_(?\d+)_(?mlp|self_attn)_(?k_proj|q_proj|v_proj|out_proj|fc1|fc2)/; const layers = { down: {}, @@ -2019,7 +2013,7 @@ function compileUnetLayers(bases) { continue; } - let parsedKey = parseSDKey(base.baseName); + const parsedKey = parseSDKey(base.baseName); // TODO need layer id layer = ensureLayer(layer, parsedKey.name); @@ -2142,7 +2136,7 @@ function AllKeys({ allkeys }) { h( "ul", { key: "all-keys" }, - allKeys.map((key) => { + allkeys.map((key) => { return h("li", { key }, key); }), ), @@ -2897,16 +2891,15 @@ function Raw({ metadata, filename }) { onClick: () => { sortedEntries; const data = - "text/json;charset=utf-8," + - encodeURIComponent(JSON.stringify(sortedEntries, null, 2)); + `text/json;charset=utf-8,${encodeURIComponent(JSON.stringify(sortedEntries, null, 2))}`; const a = document.createElement("a"); - a.href = "data:" + data; + a.href = `data:${data}`; a.download = `${filename.replace( ".safetensors", "", )}-metadata.json`; - var container = document.body; + const container = document.body; container.appendChild(a); a.click(); @@ -2976,7 +2969,7 @@ function Support() { function close(e) { // escape - if (e.keyCode == 27) { + if (e.keyCode === 27) { setModal(false); window.removeEventListener("keydown", close); @@ -3064,10 +3057,10 @@ function Support() { "button", { onClick: () => { - setModal(modal ? false : true); + setModal(modal); }, }, - `Support`, + "Support", ); } @@ -3093,8 +3086,8 @@ function Metadata({ metadata, filename }) { ]; } -const isAdvancedUpload = (function () { - var div = document.createElement("div"); +const isAdvancedUpload = (() => { + const div = document.createElement("div"); return ( ("draggable" in div || ("ondragstart" in div && "ondrop" in div)) && "FormData" in window && @@ -3108,6 +3101,7 @@ if (isAdvancedUpload) { const dropbox = document.querySelector("#dropbox"); +// biome-ignore lint/complexity/noForEach: need to remove [ "drag", "dragstart", @@ -3123,19 +3117,21 @@ const dropbox = document.querySelector("#dropbox"); }), ); +// biome-ignore lint/complexity/noForEach: need to remove ["dragover", "dragenter"].forEach((evtName) => { dropbox.addEventListener(evtName, () => { dropbox.classList.add("is-dragover"); }); }); +// biome-ignore lint/complexity/noForEach: need to remove ["dragleave", "dragend", "drop"].forEach((evtName) => { dropbox.addEventListener(evtName, () => { dropbox.classList.remove("is-dragover"); }); }); -let files = new Map(); +const files = new Map(); let mainFilename; const workers = new Map(); @@ -3147,11 +3143,11 @@ async function addWorker(file) { workers.set(file, worker); - return new Promise((resolve, reject) => { - let timeouts = []; + return new Promise((resolve) => { + const timeouts = []; const worker = workers.get(file); - worker.onmessage = (event) => { + worker.onmessage = () => { timeouts.map((timeout) => clearTimeout(timeout)); worker.onmessage = undefined; resolve(worker); @@ -3181,12 +3177,13 @@ function removeWorker(file) { } function clearWorkers() { + // biome-ignore lint/complexity/noForEach: Array.from(workers.keys()).forEach((key) => { removeWorker(key); }); } -// wasm_bindgen().then(() => { +// biome-ignore lint/complexity/noForEach: ["drop"].forEach((evtName) => { document.addEventListener(evtName, async (e) => { e.preventDefault(); @@ -3194,7 +3191,7 @@ function clearWorkers() { const droppedFiles = e.dataTransfer.files; for (let i = 0; i < droppedFiles.length; i++) { - if (files.item(i).type != "") { + if (files.item(i).type !== "") { addErrorMessage("Invalid filetype. Try a .safetensors file."); continue; } @@ -3204,14 +3201,14 @@ function clearWorkers() { }); }); -document.querySelector("#file").addEventListener("change", async function (e) { +document.querySelector("#file").addEventListener("change", async (e) => { e.preventDefault(); e.stopPropagation(); const files = e.target.files; for (let i = 0; i < files.length; i++) { - if (files.item(i).type != "") { + if (files.item(i).type !== "") { addErrorMessage("Invalid filetype. Try a .safetensors file."); continue; } @@ -3321,11 +3318,11 @@ function cancelLoading(file) { clearTimeout(uploadTimeoutHandler); } -window.addEventListener("keyup", (e) => { - if (e.key === "Escape") { - cancelLoading(file); - } -}); +// window.addEventListener("keyup", (e) => { +// if (e.key === "Escape") { +// cancelLoading(file); +// } +// }); function loading(file) { const loadingEle = document.createElement("div"); diff --git a/crates/lora-inspector-wasm/biome.json b/crates/lora-inspector-wasm/biome.json new file mode 100644 index 0000000..2eb0751 --- /dev/null +++ b/crates/lora-inspector-wasm/biome.json @@ -0,0 +1,30 @@ +{ + "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", + "vcs": { + "enabled": false, + "clientKind": "git", + "useIgnoreFile": false + }, + "files": { + "ignoreUnknown": false, + "ignore": [] + }, + "formatter": { + "enabled": true, + "indentStyle": "tab" + }, + "organizeImports": { + "enabled": true + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true + } + }, + "javascript": { + "formatter": { + "quoteStyle": "double" + } + } +} diff --git a/crates/lora-inspector-wasm/package.json b/crates/lora-inspector-wasm/package.json index c47ec72..99c671a 100644 --- a/crates/lora-inspector-wasm/package.json +++ b/crates/lora-inspector-wasm/package.json @@ -7,6 +7,7 @@ "author": "", "license": "ISC", "devDependencies": { + "@biomejs/biome": "1.9.4", "@playwright/test": "^1.41.1", "@types/node": "^20.11.5", "vite-plugin-static-copy": "^1.0.5", diff --git a/crates/lora-inspector-wasm/yarn.lock b/crates/lora-inspector-wasm/yarn.lock index 63ce454..409e402 100644 --- a/crates/lora-inspector-wasm/yarn.lock +++ b/crates/lora-inspector-wasm/yarn.lock @@ -5,6 +5,97 @@ __metadata: version: 8 cacheKey: 10c0 +"@biomejs/biome@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/biome@npm:1.9.4" + dependencies: + "@biomejs/cli-darwin-arm64": "npm:1.9.4" + "@biomejs/cli-darwin-x64": "npm:1.9.4" + "@biomejs/cli-linux-arm64": "npm:1.9.4" + "@biomejs/cli-linux-arm64-musl": "npm:1.9.4" + "@biomejs/cli-linux-x64": "npm:1.9.4" + "@biomejs/cli-linux-x64-musl": "npm:1.9.4" + "@biomejs/cli-win32-arm64": "npm:1.9.4" + "@biomejs/cli-win32-x64": "npm:1.9.4" + dependenciesMeta: + "@biomejs/cli-darwin-arm64": + optional: true + "@biomejs/cli-darwin-x64": + optional: true + "@biomejs/cli-linux-arm64": + optional: true + "@biomejs/cli-linux-arm64-musl": + optional: true + "@biomejs/cli-linux-x64": + optional: true + "@biomejs/cli-linux-x64-musl": + optional: true + "@biomejs/cli-win32-arm64": + optional: true + "@biomejs/cli-win32-x64": + optional: true + bin: + biome: bin/biome + checksum: 10c0/b5655c5aed9a6fffe24f7d04f15ba4444389d0e891c9ed9106fab7388ac9b4be63185852cc2a937b22940dac3e550b71032a4afd306925cfea436c33e5646b3e + languageName: node + linkType: hard + +"@biomejs/cli-darwin-arm64@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/cli-darwin-arm64@npm:1.9.4" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@biomejs/cli-darwin-x64@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/cli-darwin-x64@npm:1.9.4" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@biomejs/cli-linux-arm64-musl@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/cli-linux-arm64-musl@npm:1.9.4" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@biomejs/cli-linux-arm64@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/cli-linux-arm64@npm:1.9.4" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@biomejs/cli-linux-x64-musl@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/cli-linux-x64-musl@npm:1.9.4" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@biomejs/cli-linux-x64@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/cli-linux-x64@npm:1.9.4" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@biomejs/cli-win32-arm64@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/cli-win32-arm64@npm:1.9.4" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@biomejs/cli-win32-x64@npm:1.9.4": + version: 1.9.4 + resolution: "@biomejs/cli-win32-x64@npm:1.9.4" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@esbuild/aix-ppc64@npm:0.20.2": version: 0.20.2 resolution: "@esbuild/aix-ppc64@npm:0.20.2" @@ -1182,6 +1273,7 @@ __metadata: version: 0.0.0-use.local resolution: "lora-inspector-wasm@workspace:." dependencies: + "@biomejs/biome": "npm:1.9.4" "@playwright/test": "npm:^1.41.1" "@types/node": "npm:^20.11.5" react: "npm:^18.3.1"