15 Commits

Author SHA1 Message Date
Dariusz L
d0e6bf8b3d Update pyproject.toml 2025-08-09 03:24:49 +02:00
Dariusz L
da37900b33 Refactor: unify image handling in CanvasIO via helpers
Removed duplicate code from CanvasIO.ts and replaced it with unified helpers from ImageUtils.ts. All tensor-to-image conversions and image creation now use centralized utility functions for consistency and maintainability.
2025-08-09 03:07:18 +02:00
Dariusz L
64c5e49707 Unify mask scaling logic with scaleImageToFit util
Refactored mask scaling and drawing into the scaleImageToFit method in ImageUtils.ts. Updated CanvasIO.ts to use this utility, reducing code duplication and improving maintainability.
2025-08-09 02:43:36 +02:00
Dariusz L
06d94f6a63 Improve mask loading logic on node connection
Updated mask loading to immediately use available data from connected nodes and preserve existing masks if none is provided. Backend mask data is only fetched after workflow execution, ensuring no stale data is loaded during connection.
2025-08-09 02:33:28 +02:00
Dariusz L
b21d6e3502 implement strict image/mask input separation
Enhanced LayerForge input handling to strictly separate image and mask loading based on connection type. Images now only load when allowImage=true and masks only when allowMask=true, preventing unintended cross-loading between input types.
2025-08-09 01:44:31 +02:00
Dariusz L
285ad035b2 Improve batch images and mask handling
Fixed batch image processing to prevent duplicates and layer deletion while ensuring proper mask loading from input_mask. Images are now added as new layers without removing existing ones, and masks are always checked from backend regardless of image state.
2025-08-09 00:49:58 +02:00
Dariusz L
949ffa0143 Repair Undo/Redo in Masking Mode 2025-08-08 22:41:19 +02:00
Dariusz L
afdac52144 Added mask and image input 2025-08-08 22:23:15 +02:00
Dariusz L
bf55d13f67 Update pyproject.toml 2025-08-08 17:14:05 +02:00
Dariusz L
de83a884c2 Switch mask preview from chunked to canvas rendering
Replaced chunked rendering approach with direct canvas drawing for mask preview, then applying to main canvas. Added "Mask Opacity" slider.
2025-08-08 17:13:44 +02:00
Dariusz L
dd2a81b6f2 add advanced brush cursor visualization
Implemented dynamic brush cursor with visual feedback for size (circle radius), strength (opacity), and hardness (solid/dashed border with gradient). Added overlay canvas system for smooth cursor updates without affecting main rendering performance.
2025-08-08 14:20:55 +02:00
Dariusz L
176b9d03ac unify modifier key handling in CanvasInteractions
Implemented centralized modifier state management with ModifierState interface and getModifierState() method. This eliminates inconsistencies between event-based and state-based modifier checking across mouse, wheel, and keyboard interactions.
2025-08-08 13:50:13 +02:00
Dariusz L
e4f44c10e8 resolve TypeScript errors and memory leaks
Fixed all TypeScript compilation errors by defining a dedicated TransformOrigin type and adding proper null checks. Implemented comprehensive event handler cleanup to prevent memory leaks and improved cross-platform support with Meta key handling for macOS users.
2025-08-08 13:15:21 +02:00
Dariusz L
11dd554204 Update pyproject.toml 2025-08-06 23:09:19 +02:00
Dariusz L
9f21ff13ae Add clipspace utils with full backward support
Refactored clipspace handling into ClipspaceUtils with validateAndFixClipspace() and safeClipspacePaste() for consistent, defensive logic. Ensures full backward compatibility with all ComfyUI versions and eliminates duplicated code.
2025-08-06 23:08:02 +02:00
27 changed files with 3416 additions and 556 deletions

View File

@@ -179,6 +179,10 @@ class LayerForgeNode:
"trigger": ("INT", {"default": 0, "min": 0, "max": 99999999, "step": 1}),
"node_id": ("STRING", {"default": "0"}),
},
"optional": {
"input_image": ("IMAGE",),
"input_mask": ("MASK",),
},
"hidden": {
"prompt": ("PROMPT",),
"unique_id": ("UNIQUE_ID",),
@@ -239,7 +243,7 @@ class LayerForgeNode:
_processing_lock = threading.Lock()
def process_canvas_image(self, fit_on_add, show_preview, auto_refresh_after_generation, trigger, node_id, prompt=None, unique_id=None):
def process_canvas_image(self, fit_on_add, show_preview, auto_refresh_after_generation, trigger, node_id, input_image=None, input_mask=None, prompt=None, unique_id=None):
try:
@@ -250,6 +254,81 @@ class LayerForgeNode:
log_info(f"Lock acquired. Starting process_canvas_image for node_id: {node_id} (fallback unique_id: {unique_id})")
# Always store fresh input data, even if None, to clear stale data
log_info(f"Storing input data for node {node_id} - Image: {input_image is not None}, Mask: {input_mask is not None}")
with self.__class__._storage_lock:
input_data = {}
if input_image is not None:
# Convert image tensor(s) to base64 - handle batch
if isinstance(input_image, torch.Tensor):
# Ensure correct shape [B, H, W, C]
if input_image.dim() == 3:
input_image = input_image.unsqueeze(0)
batch_size = input_image.shape[0]
log_info(f"Processing batch of {batch_size} image(s)")
if batch_size == 1:
# Single image - keep backward compatibility
img_np = (input_image.squeeze(0).cpu().numpy() * 255).astype(np.uint8)
pil_img = Image.fromarray(img_np, 'RGB')
# Convert to base64
buffered = io.BytesIO()
pil_img.save(buffered, format="PNG")
img_str = base64.b64encode(buffered.getvalue()).decode()
input_data['input_image'] = f"data:image/png;base64,{img_str}"
input_data['input_image_width'] = pil_img.width
input_data['input_image_height'] = pil_img.height
log_debug(f"Stored single input image: {pil_img.width}x{pil_img.height}")
else:
# Multiple images - store as array
images_array = []
for i in range(batch_size):
img_np = (input_image[i].cpu().numpy() * 255).astype(np.uint8)
pil_img = Image.fromarray(img_np, 'RGB')
# Convert to base64
buffered = io.BytesIO()
pil_img.save(buffered, format="PNG")
img_str = base64.b64encode(buffered.getvalue()).decode()
images_array.append({
'data': f"data:image/png;base64,{img_str}",
'width': pil_img.width,
'height': pil_img.height
})
log_debug(f"Stored batch image {i+1}/{batch_size}: {pil_img.width}x{pil_img.height}")
input_data['input_images_batch'] = images_array
log_info(f"Stored batch of {batch_size} images")
if input_mask is not None:
# Convert mask tensor to base64
if isinstance(input_mask, torch.Tensor):
# Ensure correct shape
if input_mask.dim() == 2:
input_mask = input_mask.unsqueeze(0)
if input_mask.dim() == 3 and input_mask.shape[0] == 1:
input_mask = input_mask.squeeze(0)
# Convert to numpy and then to PIL
mask_np = (input_mask.cpu().numpy() * 255).astype(np.uint8)
pil_mask = Image.fromarray(mask_np, 'L')
# Convert to base64
mask_buffered = io.BytesIO()
pil_mask.save(mask_buffered, format="PNG")
mask_str = base64.b64encode(mask_buffered.getvalue()).decode()
input_data['input_mask'] = f"data:image/png;base64,{mask_str}"
log_debug(f"Stored input mask: {pil_mask.width}x{pil_mask.height}")
input_data['fit_on_add'] = fit_on_add
# Store in a special key for input data (overwrites any previous data)
self.__class__._canvas_data_storage[f"{node_id}_input"] = input_data
storage_key = node_id
processed_image = None
@@ -433,6 +512,63 @@ class LayerForgeNode:
log_info("WebSocket connection closed")
return ws
@PromptServer.instance.routes.get("/layerforge/get_input_data/{node_id}")
async def get_input_data(request):
try:
node_id = request.match_info["node_id"]
log_debug(f"Checking for input data for node: {node_id}")
with cls._storage_lock:
input_key = f"{node_id}_input"
input_data = cls._canvas_data_storage.get(input_key, None)
if input_data:
log_info(f"Input data found for node {node_id}, sending to frontend")
return web.json_response({
'success': True,
'has_input': True,
'data': input_data
})
else:
log_debug(f"No input data found for node {node_id}")
return web.json_response({
'success': True,
'has_input': False
})
except Exception as e:
log_error(f"Error in get_input_data: {str(e)}")
return web.json_response({
'success': False,
'error': str(e)
}, status=500)
@PromptServer.instance.routes.post("/layerforge/clear_input_data/{node_id}")
async def clear_input_data(request):
try:
node_id = request.match_info["node_id"]
log_info(f"Clearing input data for node: {node_id}")
with cls._storage_lock:
input_key = f"{node_id}_input"
if input_key in cls._canvas_data_storage:
del cls._canvas_data_storage[input_key]
log_info(f"Input data cleared for node {node_id}")
else:
log_debug(f"No input data to clear for node {node_id}")
return web.json_response({
'success': True,
'message': f'Input data cleared for node {node_id}'
})
except Exception as e:
log_error(f"Error in clear_input_data: {str(e)}")
return web.json_response({
'success': False,
'error': str(e)
}, status=500)
@PromptServer.instance.routes.get("/ycnode/get_canvas_data/{node_id}")
async def get_canvas_data(request):
try:
@@ -911,4 +1047,3 @@ def convert_tensor_to_base64(tensor, alpha_mask=None, original_alpha=None):
log_error(f"Error in convert_tensor_to_base64: {str(e)}")
log_debug(f"Tensor shape: {tensor.shape}, dtype: {tensor.dtype}")
raise

View File

@@ -61,9 +61,20 @@ export class Canvas {
});
this.offscreenCanvas = offscreenCanvas;
this.offscreenCtx = offscreenCtx;
// Create overlay canvas for brush cursor and other lightweight overlays
const { canvas: overlayCanvas, ctx: overlayCtx } = createCanvas(0, 0, '2d', {
alpha: true,
willReadFrequently: false
});
if (!overlayCtx)
throw new Error("Could not create overlay canvas context");
this.overlayCanvas = overlayCanvas;
this.overlayCtx = overlayCtx;
this.canvasContainer = null;
this.dataInitialized = false;
this.pendingDataCheck = null;
this.pendingInputDataCheck = null;
this.inputDataLoaded = false;
this.imageCache = new Map();
this.requestSaveState = () => { };
this.outputAreaShape = null;
@@ -363,6 +374,10 @@ export class Canvas {
return widget ? widget.value : false;
};
const handleExecutionStart = () => {
// Check for input data when execution starts, but don't reset the flag
log.debug('Execution started, checking for input data...');
// On start, only allow images; mask should load on mask-connect or after execution completes
this.canvasIO.checkForInputData({ allowImage: true, allowMask: false, reason: 'execution_start' });
if (getAutoRefreshValue()) {
lastExecutionStartTime = Date.now();
// Store a snapshot of the context for the upcoming batch
@@ -385,6 +400,9 @@ export class Canvas {
}
};
const handleExecutionSuccess = async () => {
// Always check for input data after execution completes
log.debug('Execution success, checking for input data...');
await this.canvasIO.checkForInputData({ allowImage: true, allowMask: true, reason: 'execution_success' });
if (getAutoRefreshValue()) {
log.info('Auto-refresh triggered, importing latest images.');
if (!this.pendingBatchContext) {

View File

@@ -2,6 +2,7 @@ import { createCanvas } from "./utils/CommonUtils.js";
import { createModuleLogger } from "./utils/LoggerUtils.js";
import { showErrorNotification } from "./utils/NotificationUtils.js";
import { webSocketManager } from "./utils/WebSocketManager.js";
import { scaleImageToFit, createImageFromSource, tensorToImageData, createImageFromImageData } from "./utils/ImageUtils.js";
const log = createModuleLogger('CanvasIO');
export class CanvasIO {
constructor(canvas) {
@@ -247,17 +248,12 @@ export class CanvasIO {
async addInputToCanvas(inputImage, inputMask) {
try {
log.debug("Adding input to canvas:", { inputImage });
const { canvas: tempCanvas, ctx: tempCtx } = createCanvas(inputImage.width, inputImage.height);
if (!tempCtx)
throw new Error("Could not create temp context");
const imgData = new ImageData(new Uint8ClampedArray(inputImage.data), inputImage.width, inputImage.height);
tempCtx.putImageData(imgData, 0, 0);
const image = new Image();
await new Promise((resolve, reject) => {
image.onload = resolve;
image.onerror = reject;
image.src = tempCanvas.toDataURL();
});
// Use unified tensorToImageData for RGB image
const imageData = tensorToImageData(inputImage, 'rgb');
if (!imageData)
throw new Error("Failed to convert input image tensor");
// Create HTMLImageElement from ImageData
const image = await createImageFromImageData(imageData);
const bounds = this.canvas.outputAreaBounds;
const scale = Math.min(bounds.width / inputImage.width * 0.8, bounds.height / inputImage.height * 0.8);
const layer = await this.canvas.canvasLayers.addLayerWithImage(image, {
@@ -283,17 +279,10 @@ export class CanvasIO {
if (!tensor || !tensor.data || !tensor.width || !tensor.height) {
throw new Error("Invalid tensor data");
}
const { canvas, ctx } = createCanvas(tensor.width, tensor.height, '2d', { willReadFrequently: true });
if (!ctx)
throw new Error("Could not create canvas context");
const imageData = new ImageData(new Uint8ClampedArray(tensor.data), tensor.width, tensor.height);
ctx.putImageData(imageData, 0, 0);
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => resolve(img);
img.onerror = (e) => reject(new Error("Failed to load image: " + e));
img.src = canvas.toDataURL();
});
const imageData = tensorToImageData(tensor, 'rgb');
if (!imageData)
throw new Error("Failed to convert tensor to image data");
return await createImageFromImageData(imageData);
}
catch (error) {
log.error("Error converting tensor to image:", error);
@@ -314,12 +303,26 @@ export class CanvasIO {
async initNodeData() {
try {
log.info("Starting node data initialization...");
// First check for input data from the backend (new feature)
await this.checkForInputData();
// If we've already loaded input data, don't continue with old initialization
if (this.canvas.inputDataLoaded) {
log.debug("Input data already loaded, skipping old initialization");
this.canvas.dataInitialized = true;
return;
}
if (!this.canvas.node || !this.canvas.node.inputs) {
log.debug("Node or inputs not ready");
return this.scheduleDataCheck();
}
if (this.canvas.node.inputs[0] && this.canvas.node.inputs[0].link) {
const imageLinkId = this.canvas.node.inputs[0].link;
// Check if we already loaded this link
if (this.canvas.lastLoadedLinkId === imageLinkId) {
log.debug(`Link ${imageLinkId} already loaded via new system, marking as initialized`);
this.canvas.dataInitialized = true;
return;
}
const imageData = window.app.nodeOutputs[imageLinkId];
if (imageData) {
log.debug("Found image data:", imageData);
@@ -331,6 +334,10 @@ export class CanvasIO {
return this.scheduleDataCheck();
}
}
else {
// No input connected, mark as initialized to stop repeated checks
this.canvas.dataInitialized = true;
}
if (this.canvas.node.inputs[1] && this.canvas.node.inputs[1].link) {
const maskLinkId = this.canvas.node.inputs[1].link;
const maskData = window.app.nodeOutputs[maskLinkId];
@@ -345,6 +352,390 @@ export class CanvasIO {
return this.scheduleDataCheck();
}
}
async checkForInputData(options) {
try {
const nodeId = this.canvas.node.id;
const allowImage = options?.allowImage ?? true;
const allowMask = options?.allowMask ?? true;
const reason = options?.reason ?? 'unspecified';
log.info(`Checking for input data for node ${nodeId}... opts: image=${allowImage}, mask=${allowMask}, reason=${reason}`);
// Track loaded links separately for image and mask
let imageLoaded = false;
let maskLoaded = false;
let imageChanged = false;
// First, try to get data from connected node's output if available (IMAGES)
if (allowImage && this.canvas.node.inputs && this.canvas.node.inputs[0] && this.canvas.node.inputs[0].link) {
const linkId = this.canvas.node.inputs[0].link;
const graph = this.canvas.node.graph;
// Always check if images have changed first
if (graph) {
const link = graph.links[linkId];
if (link) {
const sourceNode = graph.getNodeById(link.origin_id);
if (sourceNode && sourceNode.imgs && sourceNode.imgs.length > 0) {
// Create current batch identifier (all image sources combined)
const currentBatchImageSrcs = sourceNode.imgs.map((img) => img.src).join('|');
// Check if this is the same link we loaded before
if (this.canvas.lastLoadedLinkId === linkId) {
// Same link, check if images actually changed
if (this.canvas.lastLoadedImageSrc !== currentBatchImageSrcs) {
log.info(`Batch images changed for link ${linkId} (${sourceNode.imgs.length} images), will reload...`);
log.debug(`Previous batch hash: ${this.canvas.lastLoadedImageSrc?.substring(0, 100)}...`);
log.debug(`Current batch hash: ${currentBatchImageSrcs.substring(0, 100)}...`);
imageChanged = true;
// Clear the inputDataLoaded flag to force reload from backend
this.canvas.inputDataLoaded = false;
// Clear the lastLoadedImageSrc to force reload
this.canvas.lastLoadedImageSrc = undefined;
// Clear backend data to force fresh load
fetch(`/layerforge/clear_input_data/${nodeId}`, { method: 'POST' })
.then(() => log.debug("Backend input data cleared due to image change"))
.catch(err => log.error("Failed to clear backend data:", err));
}
else {
log.debug(`Batch images for link ${linkId} unchanged (${sourceNode.imgs.length} images)`);
imageLoaded = true;
}
}
else {
// Different link or first load
log.info(`New link ${linkId} detected, will load ${sourceNode.imgs.length} images`);
imageChanged = false; // It's not a change, it's a new link
imageLoaded = false; // Need to load
// Reset the inputDataLoaded flag for new link
this.canvas.inputDataLoaded = false;
}
}
}
}
if (!imageLoaded || imageChanged) {
// Reset the inputDataLoaded flag when images change
if (imageChanged) {
this.canvas.inputDataLoaded = false;
log.info("Resetting inputDataLoaded flag due to image change");
}
if (this.canvas.node.graph) {
const graph2 = this.canvas.node.graph;
const link2 = graph2.links[linkId];
if (link2) {
const sourceNode = graph2.getNodeById(link2.origin_id);
if (sourceNode && sourceNode.imgs && sourceNode.imgs.length > 0) {
// The connected node has images in its output - handle multiple images (batch)
log.info(`Found ${sourceNode.imgs.length} image(s) in connected node's output, loading all`);
// Create a combined source identifier for batch detection
const batchImageSrcs = sourceNode.imgs.map((img) => img.src).join('|');
// Mark this link and batch sources as loaded
this.canvas.lastLoadedLinkId = linkId;
this.canvas.lastLoadedImageSrc = batchImageSrcs;
// Don't clear layers - just add new ones
if (imageChanged) {
log.info("Image change detected, will add new layers");
}
// Determine add mode
const fitOnAddWidget = this.canvas.node.widgets.find((w) => w.name === "fit_on_add");
const addMode = (fitOnAddWidget && fitOnAddWidget.value) ? 'fit' : 'center';
// Add all images from the batch as separate layers
for (let i = 0; i < sourceNode.imgs.length; i++) {
const img = sourceNode.imgs[i];
await this.canvas.canvasLayers.addLayerWithImage(img, { name: `Batch Image ${i + 1}` }, // Give each layer a unique name
addMode, this.canvas.outputAreaBounds);
log.debug(`Added batch image ${i + 1}/${sourceNode.imgs.length} to canvas`);
}
this.canvas.inputDataLoaded = true;
imageLoaded = true;
log.info(`All ${sourceNode.imgs.length} input images from batch added as separate layers`);
this.canvas.render();
this.canvas.saveState();
}
}
}
}
}
// Check for mask input separately (from nodeOutputs) ONLY when allowed
if (allowMask && this.canvas.node.inputs && this.canvas.node.inputs[1] && this.canvas.node.inputs[1].link) {
const maskLinkId = this.canvas.node.inputs[1].link;
// Check if we already loaded this mask link
if (this.canvas.lastLoadedMaskLinkId === maskLinkId) {
log.debug(`Mask link ${maskLinkId} already loaded`);
maskLoaded = true;
}
else {
// Try to get mask tensor from nodeOutputs using origin_id (not link id)
const graph = this.canvas.node.graph;
let maskOutput = null;
if (graph) {
const link = graph.links[maskLinkId];
if (link && link.origin_id) {
// Use origin_id to get the actual node output
const nodeOutput = window.app?.nodeOutputs?.[link.origin_id];
log.debug(`Looking for mask output from origin node ${link.origin_id}, found:`, !!nodeOutput);
if (nodeOutput) {
log.debug(`Node ${link.origin_id} output structure:`, {
hasData: !!nodeOutput.data,
hasShape: !!nodeOutput.shape,
dataType: typeof nodeOutput.data,
shapeType: typeof nodeOutput.shape,
keys: Object.keys(nodeOutput)
});
// Only use if it has actual tensor data
if (nodeOutput.data && nodeOutput.shape) {
maskOutput = nodeOutput;
}
}
}
}
if (maskOutput && maskOutput.data && maskOutput.shape) {
try {
// Derive dimensions from shape or explicit width/height
let width = maskOutput.width || 0;
let height = maskOutput.height || 0;
const shape = maskOutput.shape; // e.g. [1,H,W] or [1,H,W,1]
if ((!width || !height) && Array.isArray(shape)) {
if (shape.length >= 3) {
height = shape[1];
width = shape[2];
}
else if (shape.length === 2) {
height = shape[0];
width = shape[1];
}
}
if (!width || !height) {
throw new Error("Cannot determine mask dimensions from nodeOutputs");
}
// Determine channels count
let channels = 1;
if (Array.isArray(shape) && shape.length >= 4) {
channels = shape[3];
}
else if (maskOutput.channels) {
channels = maskOutput.channels;
}
else {
const len = maskOutput.data.length;
channels = Math.max(1, Math.floor(len / (width * height)));
}
// Use unified tensorToImageData for masks
const maskImageData = tensorToImageData(maskOutput, 'grayscale');
if (!maskImageData)
throw new Error("Failed to convert mask tensor to image data");
// Create canvas and put image data
const { canvas: maskCanvas, ctx } = createCanvas(width, height, '2d', { willReadFrequently: true });
if (!ctx)
throw new Error("Could not create mask context");
ctx.putImageData(maskImageData, 0, 0);
// Convert to HTMLImageElement
const maskImg = await createImageFromSource(maskCanvas.toDataURL());
// Respect fit_on_add (scale to output area)
const widgets = this.canvas.node.widgets;
const fitOnAddWidget = widgets ? widgets.find((w) => w.name === "fit_on_add") : null;
const shouldFit = fitOnAddWidget && fitOnAddWidget.value;
let finalMaskImg = maskImg;
if (shouldFit) {
const bounds = this.canvas.outputAreaBounds;
finalMaskImg = await scaleImageToFit(maskImg, bounds.width, bounds.height);
}
// Apply to MaskTool (centers internally)
if (this.canvas.maskTool) {
this.canvas.maskTool.setMask(finalMaskImg, true);
this.canvas.maskAppliedFromInput = true;
this.canvas.canvasState.saveMaskState();
this.canvas.render();
// Mark this mask link as loaded to avoid re-applying
this.canvas.lastLoadedMaskLinkId = maskLinkId;
maskLoaded = true;
log.info("Applied input mask from nodeOutputs immediately on connection" + (shouldFit ? " (fitted to output area)" : ""));
}
}
catch (err) {
log.warn("Failed to apply mask from nodeOutputs immediately; will wait for backend input_mask after execution", err);
}
}
else {
// nodeOutputs exist but don't have tensor data yet (need workflow execution)
log.info(`Mask node ${this.canvas.node.graph?.links[maskLinkId]?.origin_id} found but has no tensor data yet. Mask will be applied automatically after workflow execution.`);
// Don't retry - data won't be available until workflow runs
}
}
}
// Only check backend if we have actual inputs connected
const hasImageInput = this.canvas.node.inputs && this.canvas.node.inputs[0] && this.canvas.node.inputs[0].link;
const hasMaskInput = this.canvas.node.inputs && this.canvas.node.inputs[1] && this.canvas.node.inputs[1].link;
// If mask input is disconnected, clear any currently applied mask to ensure full separation
if (!hasMaskInput) {
this.canvas.maskAppliedFromInput = false;
this.canvas.lastLoadedMaskLinkId = undefined;
log.info("Mask input disconnected - cleared mask to enforce separation from input_image");
}
if (!hasImageInput && !hasMaskInput) {
log.debug("No inputs connected, skipping backend check");
this.canvas.inputDataLoaded = true;
return;
}
// Skip backend check during mask connection if we didn't get immediate data
if (reason === "mask_connect" && !maskLoaded) {
log.info("No immediate mask data available during connection, skipping backend check to avoid stale data. Will check after execution.");
return;
}
// Check backend for input data only if we have connected inputs
const response = await fetch(`/layerforge/get_input_data/${nodeId}`);
const result = await response.json();
if (result.success && result.has_input) {
// Dedupe: skip only if backend payload matches last loaded batch hash
let backendBatchHash;
if (result.data?.input_images_batch && Array.isArray(result.data.input_images_batch)) {
backendBatchHash = result.data.input_images_batch.map((i) => i.data).join('|');
}
else if (result.data?.input_image) {
backendBatchHash = result.data.input_image;
}
// Check mask separately - don't skip if only images are unchanged AND mask is actually connected AND allowed
const shouldCheckMask = hasMaskInput && allowMask;
if (backendBatchHash && this.canvas.lastLoadedImageSrc === backendBatchHash && !shouldCheckMask) {
log.debug("Backend input data unchanged and no mask to check, skipping reload");
this.canvas.inputDataLoaded = true;
return;
}
else if (backendBatchHash && this.canvas.lastLoadedImageSrc === backendBatchHash && shouldCheckMask) {
log.debug("Images unchanged but need to check mask, continuing...");
imageLoaded = true; // Mark images as already loaded to skip reloading them
}
// Check if we already loaded image data (by checking the current link)
if (allowImage && !imageLoaded && this.canvas.node.inputs && this.canvas.node.inputs[0] && this.canvas.node.inputs[0].link) {
const currentLinkId = this.canvas.node.inputs[0].link;
if (this.canvas.lastLoadedLinkId !== currentLinkId) {
// Mark this link as loaded
this.canvas.lastLoadedLinkId = currentLinkId;
imageLoaded = false; // Will load from backend
}
}
// Check for mask data from backend ONLY when mask input is actually connected AND allowed
// Only reset if the mask link actually changed
if (allowMask && hasMaskInput && this.canvas.node.inputs && this.canvas.node.inputs[1]) {
const currentMaskLinkId = this.canvas.node.inputs[1].link;
// Only reset if this is a different mask link than what we loaded before
if (this.canvas.lastLoadedMaskLinkId !== currentMaskLinkId) {
maskLoaded = false;
log.debug(`New mask input detected (${currentMaskLinkId}), will check backend for mask data`);
}
else {
log.debug(`Same mask input (${currentMaskLinkId}), mask already loaded`);
maskLoaded = true;
}
}
else {
// No mask input connected, or mask loading not allowed right now
maskLoaded = true; // Mark as loaded to skip mask processing
if (!allowMask) {
log.debug("Mask loading is currently disabled by caller, skipping mask check");
}
else {
log.debug("No mask input connected, skipping mask check");
}
}
log.info("Input data found from backend, adding to canvas");
const inputData = result.data;
// Compute backend batch hash for dedupe and state
let backendHashNow;
if (inputData?.input_images_batch && Array.isArray(inputData.input_images_batch)) {
backendHashNow = inputData.input_images_batch.map((i) => i.data).join('|');
}
else if (inputData?.input_image) {
backendHashNow = inputData.input_image;
}
// Just update the hash without removing any layers
if (backendHashNow) {
log.info("New backend input data detected, adding new layers");
this.canvas.lastLoadedImageSrc = backendHashNow;
}
// Mark that we've loaded input data for this execution
this.canvas.inputDataLoaded = true;
// Determine add mode based on fit_on_add setting
const widgets = this.canvas.node.widgets;
const fitOnAddWidget = widgets ? widgets.find((w) => w.name === "fit_on_add") : null;
const addMode = (fitOnAddWidget && fitOnAddWidget.value) ? 'fit' : 'center';
// Load input image(s) only if image input is actually connected, not already loaded, and allowed
if (allowImage && !imageLoaded && hasImageInput) {
if (inputData.input_images_batch) {
// Handle batch of images
const batch = inputData.input_images_batch;
log.info(`Processing batch of ${batch.length} images from backend`);
for (let i = 0; i < batch.length; i++) {
const imgData = batch[i];
const img = await createImageFromSource(imgData.data);
// Add image to canvas with unique name
await this.canvas.canvasLayers.addLayerWithImage(img, { name: `Batch Image ${i + 1}` }, addMode, this.canvas.outputAreaBounds);
log.debug(`Added batch image ${i + 1}/${batch.length} from backend`);
}
log.info(`All ${batch.length} batch images added from backend`);
this.canvas.render();
this.canvas.saveState();
}
else if (inputData.input_image) {
// Handle single image (backward compatibility)
const img = await createImageFromSource(inputData.input_image);
// Add image to canvas at output area position
await this.canvas.canvasLayers.addLayerWithImage(img, {}, addMode, this.canvas.outputAreaBounds);
log.info("Single input image added as new layer to canvas");
this.canvas.render();
this.canvas.saveState();
}
else {
log.debug("No input image data from backend");
}
}
else if (!hasImageInput && (inputData.input_images_batch || inputData.input_image)) {
log.debug("Backend has image data but no image input connected, skipping image load");
}
// Handle mask separately only if mask input is actually connected, allowed, and not already loaded
if (allowMask && !maskLoaded && hasMaskInput && inputData.input_mask) {
log.info("Processing input mask");
// Load mask image
const maskImg = await createImageFromSource(inputData.input_mask);
// Determine if we should fit the mask or use it at original size
const fitOnAddWidget2 = this.canvas.node.widgets.find((w) => w.name === "fit_on_add");
const shouldFit = fitOnAddWidget2 && fitOnAddWidget2.value;
let finalMaskImg = maskImg;
if (shouldFit && this.canvas.maskTool) {
const bounds = this.canvas.outputAreaBounds;
finalMaskImg = await scaleImageToFit(maskImg, bounds.width, bounds.height);
}
// Apply to MaskTool (centers internally)
if (this.canvas.maskTool) {
this.canvas.maskTool.setMask(finalMaskImg, true);
}
this.canvas.maskAppliedFromInput = true;
// Save the mask state
this.canvas.canvasState.saveMaskState();
log.info("Applied input mask to mask tool" + (shouldFit ? " (fitted to output area)" : " (original size)"));
}
else if (!hasMaskInput && inputData.input_mask) {
log.debug("Backend has mask data but no mask input connected, skipping mask load");
}
else if (!allowMask && inputData.input_mask) {
log.debug("Mask input data present in backend but mask loading is disabled by caller; skipping");
}
}
else {
log.debug("No input data from backend");
// Don't schedule another check - we'll only check when explicitly triggered
}
}
catch (error) {
log.error("Error checking for input data:", error);
// Don't schedule another check on error
}
}
scheduleInputDataCheck() {
// Schedule a retry for mask data check when nodeOutputs are not ready yet
if (this.canvas.pendingInputDataCheck) {
clearTimeout(this.canvas.pendingInputDataCheck);
}
this.canvas.pendingInputDataCheck = window.setTimeout(() => {
this.canvas.pendingInputDataCheck = null;
log.debug("Retrying input data check for mask...");
}, 500); // Shorter delay for mask data retry
}
scheduleDataCheck() {
if (this.canvas.pendingDataCheck) {
clearTimeout(this.canvas.pendingDataCheck);
@@ -423,51 +814,10 @@ export class CanvasIO {
}
}
convertTensorToImageData(tensor) {
try {
const shape = tensor.shape;
const height = shape[1];
const width = shape[2];
const channels = shape[3];
log.debug("Converting tensor:", {
shape: shape,
dataRange: {
min: tensor.min_val,
max: tensor.max_val
}
});
const imageData = new ImageData(width, height);
const data = new Uint8ClampedArray(width * height * 4);
const flatData = tensor.data;
const pixelCount = width * height;
for (let i = 0; i < pixelCount; i++) {
const pixelIndex = i * 4;
const tensorIndex = i * channels;
for (let c = 0; c < channels; c++) {
const value = flatData[tensorIndex + c];
const normalizedValue = (value - tensor.min_val) / (tensor.max_val - tensor.min_val);
data[pixelIndex + c] = Math.round(normalizedValue * 255);
}
data[pixelIndex + 3] = 255;
}
imageData.data.set(data);
return imageData;
}
catch (error) {
log.error("Error converting tensor:", error);
return null;
}
return tensorToImageData(tensor, 'rgb');
}
async createImageFromData(imageData) {
return new Promise((resolve, reject) => {
const { canvas, ctx } = createCanvas(imageData.width, imageData.height, '2d', { willReadFrequently: true });
if (!ctx)
throw new Error("Could not create canvas context");
ctx.putImageData(imageData, 0, 0);
const img = new Image();
img.onload = () => resolve(img);
img.onerror = reject;
img.src = canvas.toDataURL();
});
return createImageFromImageData(imageData);
}
async processMaskData(maskData) {
try {
@@ -527,12 +877,7 @@ export class CanvasIO {
log.info(`Received ${result.images.length} new images, adding to canvas.`);
const newLayers = [];
for (const imageData of result.images) {
const img = new Image();
await new Promise((resolve, reject) => {
img.onload = resolve;
img.onerror = reject;
img.src = imageData;
});
const img = await createImageFromSource(imageData);
let processedImage = img;
// If there's a custom shape, clip the image to that shape
if (this.canvas.outputAreaShape && this.canvas.outputAreaShape.isClosed) {
@@ -559,33 +904,27 @@ export class CanvasIO {
}
}
async clipImageToShape(image, shape) {
return new Promise((resolve, reject) => {
const { canvas, ctx } = createCanvas(image.width, image.height);
if (!ctx) {
reject(new Error("Could not create canvas context for clipping"));
return;
}
// Draw the image first
ctx.drawImage(image, 0, 0);
// Calculate custom shape position accounting for extensions
// Custom shape should maintain its relative position within the original canvas area
const ext = this.canvas.outputAreaExtensionEnabled ? this.canvas.outputAreaExtensions : { top: 0, bottom: 0, left: 0, right: 0 };
const shapeOffsetX = ext.left; // Add left extension to maintain relative position
const shapeOffsetY = ext.top; // Add top extension to maintain relative position
// Create a clipping mask using the shape with extension offset
ctx.globalCompositeOperation = 'destination-in';
ctx.beginPath();
ctx.moveTo(shape.points[0].x + shapeOffsetX, shape.points[0].y + shapeOffsetY);
for (let i = 1; i < shape.points.length; i++) {
ctx.lineTo(shape.points[i].x + shapeOffsetX, shape.points[i].y + shapeOffsetY);
}
ctx.closePath();
ctx.fill();
// Create a new image from the clipped canvas
const clippedImage = new Image();
clippedImage.onload = () => resolve(clippedImage);
clippedImage.onerror = () => reject(new Error("Failed to create clipped image"));
clippedImage.src = canvas.toDataURL();
});
const { canvas, ctx } = createCanvas(image.width, image.height);
if (!ctx) {
throw new Error("Could not create canvas context for clipping");
}
// Draw the image first
ctx.drawImage(image, 0, 0);
// Calculate custom shape position accounting for extensions
// Custom shape should maintain its relative position within the original canvas area
const ext = this.canvas.outputAreaExtensionEnabled ? this.canvas.outputAreaExtensions : { top: 0, bottom: 0, left: 0, right: 0 };
const shapeOffsetX = ext.left; // Add left extension to maintain relative position
const shapeOffsetY = ext.top; // Add top extension to maintain relative position
// Create a clipping mask using the shape with extension offset
ctx.globalCompositeOperation = 'destination-in';
ctx.beginPath();
ctx.moveTo(shape.points[0].x + shapeOffsetX, shape.points[0].y + shapeOffsetY);
for (let i = 1; i < shape.points.length; i++) {
ctx.lineTo(shape.points[i].x + shapeOffsetX, shape.points[i].y + shapeOffsetY);
}
ctx.closePath();
ctx.fill();
// Create a new image from the clipped canvas
return await createImageFromSource(canvas.toDataURL());
}
}

View File

@@ -3,16 +3,33 @@ import { snapToGrid, getSnapAdjustment } from "./utils/CommonUtils.js";
const log = createModuleLogger('CanvasInteractions');
export class CanvasInteractions {
constructor(canvas) {
// Bound event handlers to enable proper removeEventListener and avoid leaks
this.onMouseDown = (e) => this.handleMouseDown(e);
this.onMouseMove = (e) => this.handleMouseMove(e);
this.onMouseUp = (e) => this.handleMouseUp(e);
this.onMouseEnter = (e) => { this.canvas.isMouseOver = true; this.handleMouseEnter(e); };
this.onMouseLeave = (e) => { this.canvas.isMouseOver = false; this.handleMouseLeave(e); };
this.onWheel = (e) => this.handleWheel(e);
this.onKeyDown = (e) => this.handleKeyDown(e);
this.onKeyUp = (e) => this.handleKeyUp(e);
this.onDragOver = (e) => this.handleDragOver(e);
this.onDragEnter = (e) => this.handleDragEnter(e);
this.onDragLeave = (e) => this.handleDragLeave(e);
this.onDrop = (e) => { this.handleDrop(e); };
this.onContextMenu = (e) => this.handleContextMenu(e);
this.onBlur = () => this.handleBlur();
this.onPaste = (e) => this.handlePasteEvent(e);
this.canvas = canvas;
this.interaction = {
mode: 'none',
panStart: { x: 0, y: 0 },
dragStart: { x: 0, y: 0 },
transformOrigin: {},
transformOrigin: null,
resizeHandle: null,
resizeAnchor: { x: 0, y: 0 },
canvasResizeStart: { x: 0, y: 0 },
isCtrlPressed: false,
isMetaPressed: false,
isAltPressed: false,
isShiftPressed: false,
isSPressed: false,
@@ -32,18 +49,29 @@ export class CanvasInteractions {
view: this.canvas.getMouseViewCoordinates(e)
};
}
getModifierState(e) {
return {
ctrl: this.interaction.isCtrlPressed || e?.ctrlKey || false,
shift: this.interaction.isShiftPressed || e?.shiftKey || false,
alt: this.interaction.isAltPressed || e?.altKey || false,
meta: this.interaction.isMetaPressed || e?.metaKey || false,
};
}
preventEventDefaults(e) {
e.preventDefault();
e.stopPropagation();
}
performZoomOperation(worldCoords, zoomFactor) {
const rect = this.canvas.canvas.getBoundingClientRect();
const mouseBufferX = (worldCoords.x - this.canvas.viewport.x) * this.canvas.viewport.zoom;
const mouseBufferY = (worldCoords.y - this.canvas.viewport.y) * this.canvas.viewport.zoom;
const newZoom = Math.max(0.1, Math.min(10, this.canvas.viewport.zoom * zoomFactor));
this.canvas.viewport.zoom = newZoom;
this.canvas.viewport.x = worldCoords.x - (mouseBufferX / this.canvas.viewport.zoom);
this.canvas.viewport.y = worldCoords.y - (mouseBufferY / this.canvas.viewport.zoom);
// Update stroke overlay if mask tool is drawing during zoom
if (this.canvas.maskTool.isDrawing) {
this.canvas.maskTool.handleViewportChange();
}
this.canvas.onViewportChange?.();
}
renderAndSave(shouldSave = false) {
@@ -64,29 +92,39 @@ export class CanvasInteractions {
}
}
setupEventListeners() {
this.canvas.canvas.addEventListener('mousedown', this.handleMouseDown.bind(this));
this.canvas.canvas.addEventListener('mousemove', this.handleMouseMove.bind(this));
this.canvas.canvas.addEventListener('mouseup', this.handleMouseUp.bind(this));
this.canvas.canvas.addEventListener('mouseleave', this.handleMouseLeave.bind(this));
this.canvas.canvas.addEventListener('wheel', this.handleWheel.bind(this), { passive: false });
this.canvas.canvas.addEventListener('keydown', this.handleKeyDown.bind(this));
this.canvas.canvas.addEventListener('keyup', this.handleKeyUp.bind(this));
this.canvas.canvas.addEventListener('mousedown', this.onMouseDown);
this.canvas.canvas.addEventListener('mousemove', this.onMouseMove);
this.canvas.canvas.addEventListener('mouseup', this.onMouseUp);
this.canvas.canvas.addEventListener('wheel', this.onWheel, { passive: false });
this.canvas.canvas.addEventListener('keydown', this.onKeyDown);
this.canvas.canvas.addEventListener('keyup', this.onKeyUp);
// Add a blur event listener to the window to reset key states
window.addEventListener('blur', this.handleBlur.bind(this));
document.addEventListener('paste', this.handlePasteEvent.bind(this));
this.canvas.canvas.addEventListener('mouseenter', (e) => {
this.canvas.isMouseOver = true;
this.handleMouseEnter(e);
});
this.canvas.canvas.addEventListener('mouseleave', (e) => {
this.canvas.isMouseOver = false;
this.handleMouseLeave(e);
});
this.canvas.canvas.addEventListener('dragover', this.handleDragOver.bind(this));
this.canvas.canvas.addEventListener('dragenter', this.handleDragEnter.bind(this));
this.canvas.canvas.addEventListener('dragleave', this.handleDragLeave.bind(this));
this.canvas.canvas.addEventListener('drop', this.handleDrop.bind(this));
this.canvas.canvas.addEventListener('contextmenu', this.handleContextMenu.bind(this));
window.addEventListener('blur', this.onBlur);
document.addEventListener('paste', this.onPaste);
this.canvas.canvas.addEventListener('mouseenter', this.onMouseEnter);
this.canvas.canvas.addEventListener('mouseleave', this.onMouseLeave);
this.canvas.canvas.addEventListener('dragover', this.onDragOver);
this.canvas.canvas.addEventListener('dragenter', this.onDragEnter);
this.canvas.canvas.addEventListener('dragleave', this.onDragLeave);
this.canvas.canvas.addEventListener('drop', this.onDrop);
this.canvas.canvas.addEventListener('contextmenu', this.onContextMenu);
}
teardownEventListeners() {
this.canvas.canvas.removeEventListener('mousedown', this.onMouseDown);
this.canvas.canvas.removeEventListener('mousemove', this.onMouseMove);
this.canvas.canvas.removeEventListener('mouseup', this.onMouseUp);
this.canvas.canvas.removeEventListener('wheel', this.onWheel);
this.canvas.canvas.removeEventListener('keydown', this.onKeyDown);
this.canvas.canvas.removeEventListener('keyup', this.onKeyUp);
window.removeEventListener('blur', this.onBlur);
document.removeEventListener('paste', this.onPaste);
this.canvas.canvas.removeEventListener('mouseenter', this.onMouseEnter);
this.canvas.canvas.removeEventListener('mouseleave', this.onMouseLeave);
this.canvas.canvas.removeEventListener('dragover', this.onDragOver);
this.canvas.canvas.removeEventListener('dragenter', this.onDragEnter);
this.canvas.canvas.removeEventListener('dragleave', this.onDragLeave);
this.canvas.canvas.removeEventListener('drop', this.onDrop);
this.canvas.canvas.removeEventListener('contextmenu', this.onContextMenu);
}
/**
* Sprawdza czy punkt znajduje się w obszarze któregokolwiek z zaznaczonych layerów
@@ -124,9 +162,10 @@ export class CanvasInteractions {
handleMouseDown(e) {
this.canvas.canvas.focus();
const coords = this.getMouseCoordinates(e);
const mods = this.getModifierState(e);
if (this.interaction.mode === 'drawingMask') {
this.canvas.maskTool.handleMouseDown(coords.world, coords.view);
this.canvas.render();
// Don't render here - mask tool will handle its own drawing
return;
}
if (this.canvas.shapeTool.isActive) {
@@ -135,11 +174,11 @@ export class CanvasInteractions {
}
// --- Ostateczna, poprawna kolejność sprawdzania ---
// 1. Akcje globalne z modyfikatorami (mają najwyższy priorytet)
if (e.shiftKey && e.ctrlKey) {
if (mods.shift && mods.ctrl) {
this.startCanvasMove(coords.world);
return;
}
if (e.shiftKey) {
if (mods.shift) {
// Clear custom shape when starting canvas resize
if (this.canvas.outputAreaShape) {
// If auto-apply shape mask is enabled, remove the mask before clearing the shape
@@ -163,7 +202,7 @@ export class CanvasInteractions {
}
return;
}
if (e.button !== 0) { // Środkowy przycisk
if (e.button === 1) { // Środkowy przycisk
this.startPanning(e);
return;
}
@@ -179,7 +218,7 @@ export class CanvasInteractions {
return;
}
// 4. Domyślna akcja na tle (lewy przycisk bez modyfikatorów)
this.startPanningOrClearSelection(e);
this.startPanning(e, true); // clearSelection = true
}
handleMouseMove(e) {
const coords = this.getMouseCoordinates(e);
@@ -199,7 +238,7 @@ export class CanvasInteractions {
switch (this.interaction.mode) {
case 'drawingMask':
this.canvas.maskTool.handleMouseMove(coords.world, coords.view);
this.canvas.render();
// Don't render during mask drawing - it's handled by mask tool internally
break;
case 'panning':
this.panViewport(e);
@@ -221,6 +260,10 @@ export class CanvasInteractions {
break;
default:
this.updateCursor(coords.world);
// Update brush cursor on overlay if mask tool is active
if (this.canvas.maskTool.isActive) {
this.canvas.canvasRenderer.drawMaskBrushCursor(coords.world);
}
break;
}
// --- DYNAMICZNY PODGLĄD LINII CUSTOM SHAPE ---
@@ -232,6 +275,7 @@ export class CanvasInteractions {
const coords = this.getMouseCoordinates(e);
if (this.interaction.mode === 'drawingMask') {
this.canvas.maskTool.handleMouseUp(coords.view);
// Render only once after drawing is complete
this.canvas.render();
return;
}
@@ -315,8 +359,17 @@ export class CanvasInteractions {
this.performZoomOperation(coords.world, zoomFactor);
}
else {
// Layer transformation when layers are selected
this.handleLayerWheelTransformation(e);
// Check if mouse is over any selected layer
const isOverSelectedLayer = this.isPointInSelectedLayers(coords.world.x, coords.world.y);
if (isOverSelectedLayer) {
// Layer transformation when layers are selected and mouse is over selected layer
this.handleLayerWheelTransformation(e);
}
else {
// Zoom operation when mouse is not over selected layers
const zoomFactor = e.deltaY < 0 ? 1.1 : 1 / 1.1;
this.performZoomOperation(coords.world, zoomFactor);
}
}
this.canvas.render();
if (!this.canvas.maskTool.isActive) {
@@ -324,14 +377,15 @@ export class CanvasInteractions {
}
}
handleLayerWheelTransformation(e) {
const mods = this.getModifierState(e);
const rotationStep = 5 * (e.deltaY > 0 ? -1 : 1);
const direction = e.deltaY < 0 ? 1 : -1;
this.canvas.canvasSelection.selectedLayers.forEach((layer) => {
if (e.shiftKey) {
this.handleLayerRotation(layer, e.ctrlKey, direction, rotationStep);
if (mods.shift) {
this.handleLayerRotation(layer, mods.ctrl, direction, rotationStep);
}
else {
this.handleLayerScaling(layer, e.ctrlKey, e.deltaY);
this.handleLayerScaling(layer, mods.ctrl, e.deltaY);
}
});
}
@@ -376,7 +430,7 @@ export class CanvasInteractions {
}
}
calculateGridBasedScaling(oldHeight, deltaY) {
const gridSize = 64;
const gridSize = 64; // Grid size - could be made configurable in the future
const direction = deltaY > 0 ? -1 : 1;
let targetHeight;
if (direction > 0) {
@@ -401,6 +455,8 @@ export class CanvasInteractions {
handleKeyDown(e) {
if (e.key === 'Control')
this.interaction.isCtrlPressed = true;
if (e.key === 'Meta')
this.interaction.isMetaPressed = true;
if (e.key === 'Shift')
this.interaction.isShiftPressed = true;
if (e.key === 'Alt') {
@@ -418,11 +474,12 @@ export class CanvasInteractions {
return;
}
// Globalne skróty (Undo/Redo/Copy/Paste)
if (e.ctrlKey || e.metaKey) {
const mods = this.getModifierState(e);
if (mods.ctrl || mods.meta) {
let handled = true;
switch (e.key.toLowerCase()) {
case 'z':
if (e.shiftKey) {
if (mods.shift) {
this.canvas.redo();
}
else {
@@ -449,7 +506,7 @@ export class CanvasInteractions {
}
// Skróty kontekstowe (zależne od zaznaczenia)
if (this.canvas.canvasSelection.selectedLayers.length > 0) {
const step = e.shiftKey ? 10 : 1;
const step = mods.shift ? 10 : 1;
let needsRender = false;
// Używamy e.code dla spójności i niezależności od układu klawiatury
const movementKeys = ['ArrowLeft', 'ArrowRight', 'ArrowUp', 'ArrowDown', 'BracketLeft', 'BracketRight'];
@@ -485,6 +542,8 @@ export class CanvasInteractions {
handleKeyUp(e) {
if (e.key === 'Control')
this.interaction.isCtrlPressed = false;
if (e.key === 'Meta')
this.interaction.isMetaPressed = false;
if (e.key === 'Shift')
this.interaction.isShiftPressed = false;
if (e.key === 'Alt')
@@ -504,6 +563,7 @@ export class CanvasInteractions {
handleBlur() {
log.debug('Window lost focus, resetting key states.');
this.interaction.isCtrlPressed = false;
this.interaction.isMetaPressed = false;
this.interaction.isAltPressed = false;
this.interaction.isShiftPressed = false;
this.interaction.isSPressed = false;
@@ -525,6 +585,11 @@ export class CanvasInteractions {
}
}
updateCursor(worldCoords) {
// If actively rotating, show grabbing cursor
if (this.interaction.mode === 'rotating') {
this.canvas.canvas.style.cursor = 'grabbing';
return;
}
const transformTarget = this.canvas.canvasLayers.getHandleAtPosition(worldCoords.x, worldCoords.y);
if (transformTarget) {
const handleName = transformTarget.handle;
@@ -572,7 +637,9 @@ export class CanvasInteractions {
}
prepareForDrag(layer, worldCoords) {
// Zaktualizuj zaznaczenie, ale nie zapisuj stanu
if (this.interaction.isCtrlPressed) {
// Support both Ctrl (Windows/Linux) and Cmd (macOS) for multi-selection
const mods = this.getModifierState();
if (mods.ctrl || mods.meta) {
const index = this.canvas.canvasSelection.selectedLayers.indexOf(layer);
if (index === -1) {
this.canvas.canvasSelection.updateSelection([...this.canvas.canvasSelection.selectedLayers, layer]);
@@ -590,10 +657,9 @@ export class CanvasInteractions {
this.interaction.mode = 'potential-drag';
this.interaction.dragStart = { ...worldCoords };
}
startPanningOrClearSelection(e) {
// Ta funkcja jest teraz wywoływana tylko gdy kliknięto na tło bez modyfikatorów.
// Domyślna akcja: wyczyść zaznaczenie i rozpocznij panoramowanie.
if (!this.interaction.isCtrlPressed) {
startPanning(e, clearSelection = true) {
// Unified panning method - can optionally clear selection
if (clearSelection && !this.interaction.isCtrlPressed) {
this.canvas.canvasSelection.updateSelection([]);
}
this.interaction.mode = 'panning';
@@ -642,19 +708,16 @@ export class CanvasInteractions {
this.canvas.render();
this.canvas.saveState();
}
startPanning(e) {
if (!this.interaction.isCtrlPressed) {
this.canvas.canvasSelection.updateSelection([]);
}
this.interaction.mode = 'panning';
this.interaction.panStart = { x: e.clientX, y: e.clientY };
}
panViewport(e) {
const dx = e.clientX - this.interaction.panStart.x;
const dy = e.clientY - this.interaction.panStart.y;
this.canvas.viewport.x -= dx / this.canvas.viewport.zoom;
this.canvas.viewport.y -= dy / this.canvas.viewport.zoom;
this.interaction.panStart = { x: e.clientX, y: e.clientY };
// Update stroke overlay if mask tool is drawing during pan
if (this.canvas.maskTool.isDrawing) {
this.canvas.maskTool.handleViewportChange();
}
this.canvas.render();
this.canvas.onViewportChange?.();
}
@@ -709,7 +772,7 @@ export class CanvasInteractions {
mouseY = Math.abs(mouseY - snapToGrid(mouseY)) < snapThreshold ? snapToGrid(mouseY) : mouseY;
}
const o = this.interaction.transformOrigin;
if (o.rotation === undefined || o.width === undefined || o.height === undefined || o.centerX === undefined || o.centerY === undefined)
if (!o)
return;
const handle = this.interaction.resizeHandle;
const anchor = this.interaction.resizeAnchor;
@@ -856,7 +919,7 @@ export class CanvasInteractions {
if (!layer)
return;
const o = this.interaction.transformOrigin;
if (o.rotation === undefined || o.centerX === undefined || o.centerY === undefined)
if (!o)
return;
const startAngle = Math.atan2(this.interaction.dragStart.y - o.centerY, this.interaction.dragStart.x - o.centerX);
const currentAngle = Math.atan2(worldCoords.y - o.centerY, worldCoords.x - o.centerX);

View File

@@ -7,6 +7,9 @@ export class CanvasRenderer {
this.lastRenderTime = 0;
this.renderInterval = 1000 / 60;
this.isDirty = false;
// Initialize overlay canvases
this.initOverlay();
this.initStrokeOverlay();
}
/**
* Helper function to draw text with background at world coordinates
@@ -102,10 +105,12 @@ export class CanvasRenderer {
if (maskImage && this.canvas.maskTool.isOverlayVisible) {
ctx.save();
if (this.canvas.maskTool.isActive) {
// In draw mask mode, use the previewOpacity value from the slider
ctx.globalCompositeOperation = 'source-over';
ctx.globalAlpha = 0.5;
ctx.globalAlpha = this.canvas.maskTool.previewOpacity;
}
else {
// When not in draw mask mode, show mask at full opacity
ctx.globalCompositeOperation = 'source-over';
ctx.globalAlpha = 1.0;
}
@@ -158,6 +163,11 @@ export class CanvasRenderer {
this.canvas.canvas.height = this.canvas.offscreenCanvas.height;
}
this.canvas.ctx.drawImage(this.canvas.offscreenCanvas, 0, 0);
// Ensure overlay canvases are in DOM and properly sized
this.addOverlayToDOM();
this.updateOverlaySize();
this.addStrokeOverlayToDOM();
this.updateStrokeOverlaySize();
// Update Batch Preview UI positions
if (this.canvas.batchPreviewManagers && this.canvas.batchPreviewManagers.length > 0) {
this.canvas.batchPreviewManagers.forEach((manager) => {
@@ -583,4 +593,243 @@ export class CanvasRenderer {
padding: 8
});
}
/**
* Initialize overlay canvas for lightweight overlays like brush cursor
*/
initOverlay() {
// Setup overlay canvas to match main canvas
this.updateOverlaySize();
// Position overlay canvas on top of main canvas
this.canvas.overlayCanvas.style.position = 'absolute';
this.canvas.overlayCanvas.style.left = '0px';
this.canvas.overlayCanvas.style.top = '0px';
this.canvas.overlayCanvas.style.pointerEvents = 'none';
this.canvas.overlayCanvas.style.zIndex = '20'; // Above other overlays
// Add overlay to DOM when main canvas is added
this.addOverlayToDOM();
log.debug('Overlay canvas initialized');
}
/**
* Add overlay canvas to DOM if main canvas has a parent
*/
addOverlayToDOM() {
if (this.canvas.canvas.parentElement && !this.canvas.overlayCanvas.parentElement) {
this.canvas.canvas.parentElement.appendChild(this.canvas.overlayCanvas);
log.debug('Overlay canvas added to DOM');
}
}
/**
* Update overlay canvas size to match main canvas
*/
updateOverlaySize() {
if (this.canvas.overlayCanvas.width !== this.canvas.canvas.clientWidth ||
this.canvas.overlayCanvas.height !== this.canvas.canvas.clientHeight) {
this.canvas.overlayCanvas.width = Math.max(1, this.canvas.canvas.clientWidth);
this.canvas.overlayCanvas.height = Math.max(1, this.canvas.canvas.clientHeight);
log.debug(`Overlay canvas resized to ${this.canvas.overlayCanvas.width}x${this.canvas.overlayCanvas.height}`);
}
}
/**
* Clear overlay canvas
*/
clearOverlay() {
this.canvas.overlayCtx.clearRect(0, 0, this.canvas.overlayCanvas.width, this.canvas.overlayCanvas.height);
}
/**
* Initialize a dedicated overlay for real-time mask stroke preview
*/
initStrokeOverlay() {
// Create canvas if not created yet
if (!this.strokeOverlayCanvas) {
this.strokeOverlayCanvas = document.createElement('canvas');
const ctx = this.strokeOverlayCanvas.getContext('2d');
if (!ctx) {
throw new Error('Failed to get 2D context for stroke overlay canvas');
}
this.strokeOverlayCtx = ctx;
}
// Size match main canvas
this.updateStrokeOverlaySize();
// Position above main canvas but below cursor overlay
this.strokeOverlayCanvas.style.position = 'absolute';
this.strokeOverlayCanvas.style.left = '0px';
this.strokeOverlayCanvas.style.top = '0px';
this.strokeOverlayCanvas.style.pointerEvents = 'none';
this.strokeOverlayCanvas.style.zIndex = '19'; // Below cursor overlay (20)
// Opacity is now controlled by MaskTool.previewOpacity
this.strokeOverlayCanvas.style.opacity = String(this.canvas.maskTool.previewOpacity || 0.5);
// Add to DOM
this.addStrokeOverlayToDOM();
log.debug('Stroke overlay canvas initialized');
}
/**
* Add stroke overlay canvas to DOM if needed
*/
addStrokeOverlayToDOM() {
if (this.canvas.canvas.parentElement && !this.strokeOverlayCanvas.parentElement) {
this.canvas.canvas.parentElement.appendChild(this.strokeOverlayCanvas);
log.debug('Stroke overlay canvas added to DOM');
}
}
/**
* Ensure stroke overlay size matches main canvas
*/
updateStrokeOverlaySize() {
const w = Math.max(1, this.canvas.canvas.clientWidth);
const h = Math.max(1, this.canvas.canvas.clientHeight);
if (this.strokeOverlayCanvas.width !== w || this.strokeOverlayCanvas.height !== h) {
this.strokeOverlayCanvas.width = w;
this.strokeOverlayCanvas.height = h;
log.debug(`Stroke overlay resized to ${w}x${h}`);
}
}
/**
* Clear the stroke overlay
*/
clearMaskStrokeOverlay() {
if (!this.strokeOverlayCtx)
return;
this.strokeOverlayCtx.clearRect(0, 0, this.strokeOverlayCanvas.width, this.strokeOverlayCanvas.height);
}
/**
* Draw a preview stroke segment onto the stroke overlay in screen space
* Uses line drawing with gradient to match MaskTool's drawLineOnChunk exactly
*/
drawMaskStrokeSegment(startWorld, endWorld) {
// Ensure overlay is present and sized
this.updateStrokeOverlaySize();
const zoom = this.canvas.viewport.zoom;
const toScreen = (p) => ({
x: (p.x - this.canvas.viewport.x) * zoom,
y: (p.y - this.canvas.viewport.y) * zoom
});
const startScreen = toScreen(startWorld);
const endScreen = toScreen(endWorld);
const brushRadius = (this.canvas.maskTool.brushSize / 2) * zoom;
const hardness = this.canvas.maskTool.brushHardness;
const strength = this.canvas.maskTool.brushStrength;
// If strength is 0, don't draw anything
if (strength <= 0) {
return;
}
this.strokeOverlayCtx.save();
// Draw line segment exactly as MaskTool does
this.strokeOverlayCtx.beginPath();
this.strokeOverlayCtx.moveTo(startScreen.x, startScreen.y);
this.strokeOverlayCtx.lineTo(endScreen.x, endScreen.y);
// Match the gradient setup from MaskTool's drawLineOnChunk
if (hardness === 1) {
this.strokeOverlayCtx.strokeStyle = `rgba(255, 255, 255, ${strength})`;
}
else {
const innerRadius = brushRadius * hardness;
const gradient = this.strokeOverlayCtx.createRadialGradient(endScreen.x, endScreen.y, innerRadius, endScreen.x, endScreen.y, brushRadius);
gradient.addColorStop(0, `rgba(255, 255, 255, ${strength})`);
gradient.addColorStop(1, `rgba(255, 255, 255, 0)`);
this.strokeOverlayCtx.strokeStyle = gradient;
}
// Match line properties from MaskTool
this.strokeOverlayCtx.lineWidth = this.canvas.maskTool.brushSize * zoom;
this.strokeOverlayCtx.lineCap = 'round';
this.strokeOverlayCtx.lineJoin = 'round';
this.strokeOverlayCtx.globalCompositeOperation = 'source-over';
this.strokeOverlayCtx.stroke();
this.strokeOverlayCtx.restore();
}
/**
* Redraws the entire stroke overlay from world coordinates
* Used when viewport changes during drawing to maintain visual consistency
*/
redrawMaskStrokeOverlay(strokePoints) {
if (strokePoints.length < 2)
return;
// Clear the overlay first
this.clearMaskStrokeOverlay();
// Redraw all segments with current viewport
for (let i = 1; i < strokePoints.length; i++) {
this.drawMaskStrokeSegment(strokePoints[i - 1], strokePoints[i]);
}
}
/**
* Draw mask brush cursor on overlay canvas with visual feedback for size, strength and hardness
* @param worldPoint World coordinates of cursor
*/
drawMaskBrushCursor(worldPoint) {
if (!this.canvas.maskTool.isActive || !this.canvas.isMouseOver) {
this.clearOverlay();
return;
}
// Update overlay size if needed
this.updateOverlaySize();
// Clear previous cursor
this.clearOverlay();
// Convert world coordinates to screen coordinates
const screenX = (worldPoint.x - this.canvas.viewport.x) * this.canvas.viewport.zoom;
const screenY = (worldPoint.y - this.canvas.viewport.y) * this.canvas.viewport.zoom;
// Get brush properties
const brushRadius = (this.canvas.maskTool.brushSize / 2) * this.canvas.viewport.zoom;
const brushStrength = this.canvas.maskTool.brushStrength;
const brushHardness = this.canvas.maskTool.brushHardness;
// Save context state
this.canvas.overlayCtx.save();
// If strength is 0, just draw outline
if (brushStrength > 0) {
// Draw inner fill to visualize brush effect - matches actual brush rendering
const gradient = this.canvas.overlayCtx.createRadialGradient(screenX, screenY, 0, screenX, screenY, brushRadius);
// Preview alpha - subtle to not obscure content
const previewAlpha = brushStrength * 0.15; // Very subtle preview (max 15% opacity)
if (brushHardness === 1) {
// Hard brush - uniform fill within radius
gradient.addColorStop(0, `rgba(255, 255, 255, ${previewAlpha})`);
gradient.addColorStop(1, `rgba(255, 255, 255, ${previewAlpha})`);
}
else {
// Soft brush - gradient fade matching actual brush
gradient.addColorStop(0, `rgba(255, 255, 255, ${previewAlpha})`);
if (brushHardness > 0) {
gradient.addColorStop(brushHardness, `rgba(255, 255, 255, ${previewAlpha})`);
}
gradient.addColorStop(1, `rgba(255, 255, 255, 0)`);
}
this.canvas.overlayCtx.beginPath();
this.canvas.overlayCtx.arc(screenX, screenY, brushRadius, 0, 2 * Math.PI);
this.canvas.overlayCtx.fillStyle = gradient;
this.canvas.overlayCtx.fill();
}
// Draw outer circle (SIZE indicator)
this.canvas.overlayCtx.beginPath();
this.canvas.overlayCtx.arc(screenX, screenY, brushRadius, 0, 2 * Math.PI);
// Stroke opacity based on strength (dimmer when strength is 0)
const strokeOpacity = brushStrength > 0 ? (0.4 + brushStrength * 0.4) : 0.3;
this.canvas.overlayCtx.strokeStyle = `rgba(255, 255, 255, ${strokeOpacity})`;
this.canvas.overlayCtx.lineWidth = 1.5;
// Visual feedback for hardness
if (brushHardness > 0.8) {
// Hard brush - solid line
this.canvas.overlayCtx.setLineDash([]);
}
else {
// Soft brush - dashed line
const dashLength = 2 + (1 - brushHardness) * 4;
this.canvas.overlayCtx.setLineDash([dashLength, dashLength]);
}
this.canvas.overlayCtx.stroke();
// Center dot for small brushes
if (brushRadius < 5) {
this.canvas.overlayCtx.beginPath();
this.canvas.overlayCtx.arc(screenX, screenY, 1, 0, 2 * Math.PI);
this.canvas.overlayCtx.fillStyle = `rgba(255, 255, 255, ${strokeOpacity})`;
this.canvas.overlayCtx.fill();
}
// Restore context state
this.canvas.overlayCtx.restore();
}
/**
* Update overlay position when viewport changes
*/
updateOverlayPosition() {
// Overlay canvas is positioned absolutely, so it doesn't need repositioning
// Just ensure it's the right size
this.updateOverlaySize();
}
}

View File

@@ -404,12 +404,10 @@ If you see dark images or masks in the output, make sure node_id is set to ${cor
}
if (this.maskUndoStack.length > 0) {
const prevState = this.maskUndoStack[this.maskUndoStack.length - 1];
const maskCanvas = this.canvas.maskTool.getMask();
const maskCtx = maskCanvas.getContext('2d', { willReadFrequently: true });
if (maskCtx) {
maskCtx.clearRect(0, 0, maskCanvas.width, maskCanvas.height);
maskCtx.drawImage(prevState, 0, 0);
}
// Use the new restoreMaskFromSavedState method that properly clears chunks first
this.canvas.maskTool.restoreMaskFromSavedState(prevState);
// Clear stroke overlay to prevent old drawing previews from persisting
this.canvas.canvasRenderer.clearMaskStrokeOverlay();
this.canvas.render();
}
this.canvas.updateHistoryButtons();
@@ -420,12 +418,10 @@ If you see dark images or masks in the output, make sure node_id is set to ${cor
const nextState = this.maskRedoStack.pop();
if (nextState) {
this.maskUndoStack.push(nextState);
const maskCanvas = this.canvas.maskTool.getMask();
const maskCtx = maskCanvas.getContext('2d', { willReadFrequently: true });
if (maskCtx) {
maskCtx.clearRect(0, 0, maskCanvas.width, maskCanvas.height);
maskCtx.drawImage(nextState, 0, 0);
}
// Use the new restoreMaskFromSavedState method that properly clears chunks first
this.canvas.maskTool.restoreMaskFromSavedState(nextState);
// Clear stroke overlay to prevent old drawing previews from persisting
this.canvas.canvasRenderer.clearMaskStrokeOverlay();
this.canvas.render();
}
this.canvas.updateHistoryButtons();

View File

@@ -554,6 +554,25 @@ async function createCanvasWidget(node, widget, app) {
setTimeout(() => canvas.render(), 0);
}
}),
$el("div.painter-slider-container.mask-control", { style: { display: 'none' } }, [
$el("label", { for: "preview-opacity-slider", textContent: "Mask Opacity:" }),
$el("input", {
id: "preview-opacity-slider",
type: "range",
min: "0",
max: "1",
step: "0.05",
value: "0.5",
oninput: (e) => {
const value = e.target.value;
canvas.maskTool.setPreviewOpacity(parseFloat(value));
const valueEl = document.getElementById('preview-opacity-value');
if (valueEl)
valueEl.textContent = `${Math.round(parseFloat(value) * 100)}%`;
}
}),
$el("div.slider-value", { id: "preview-opacity-value" }, ["50%"])
]),
$el("div.painter-slider-container.mask-control", { style: { display: 'none' } }, [
$el("label", { for: "brush-size-slider", textContent: "Size:" }),
$el("input", {
@@ -892,7 +911,9 @@ async function createCanvasWidget(node, widget, app) {
height: "100%"
}
}, [controlPanel, canvasContainer, layersPanelContainer]);
node.addDOMWidget("mainContainer", "widget", mainContainer);
if (node.addDOMWidget) {
node.addDOMWidget("mainContainer", "widget", mainContainer);
}
const openEditorBtn = controlPanel.querySelector(`#open-editor-btn-${node.id}`);
let backdrop = null;
let originalParent = null;
@@ -981,7 +1002,11 @@ async function createCanvasWidget(node, widget, app) {
if (!window.canvasExecutionStates) {
window.canvasExecutionStates = new Map();
}
node.canvasWidget = canvas;
// Store the entire widget object, not just the canvas
node.canvasWidget = {
canvas: canvas,
panel: controlPanel
};
setTimeout(() => {
canvas.loadInitialState();
if (canvas.canvasLayersPanel) {
@@ -998,7 +1023,7 @@ async function createCanvasWidget(node, widget, app) {
if (canvas && canvas.setPreviewVisibility) {
canvas.setPreviewVisibility(value);
}
if (node.graph && node.graph.canvas) {
if (node.graph && node.graph.canvas && node.setDirtyCanvas) {
node.setDirtyCanvas(true, true);
}
};
@@ -1077,9 +1102,144 @@ app.registerExtension({
const canvasWidget = await createCanvasWidget(this, null, app);
canvasNodeInstances.set(this.id, canvasWidget);
log.info(`Registered CanvasNode instance for ID: ${this.id}`);
// Store the canvas widget on the node
this.canvasWidget = canvasWidget;
// Check if there are already connected inputs
setTimeout(() => {
this.setDirtyCanvas(true, true);
}, 100);
if (this.inputs && this.inputs.length > 0) {
// Check if input_image (index 0) is connected
if (this.inputs[0] && this.inputs[0].link) {
log.info("Input image already connected on node creation, checking for data...");
if (canvasWidget.canvas && canvasWidget.canvas.canvasIO) {
canvasWidget.canvas.inputDataLoaded = false;
// Only allow images on init; mask should load only on mask connect or execution
canvasWidget.canvas.canvasIO.checkForInputData({ allowImage: true, allowMask: false, reason: "init_image_connected" });
}
}
}
if (this.setDirtyCanvas) {
this.setDirtyCanvas(true, true);
}
}, 500);
};
// Add onConnectionsChange handler to detect when inputs are connected
nodeType.prototype.onConnectionsChange = function (type, index, connected, link_info) {
log.info(`onConnectionsChange called: type=${type}, index=${index}, connected=${connected}`, link_info);
// Check if this is an input connection (type 1 = INPUT)
if (type === 1) {
// Get the canvas widget - it might be in different places
const canvasWidget = this.canvasWidget;
const canvas = canvasWidget?.canvas || canvasWidget;
if (!canvas || !canvas.canvasIO) {
log.warn("Canvas not ready in onConnectionsChange, scheduling retry...");
// Retry multiple times with increasing delays
const retryDelays = [500, 1000, 2000];
let retryCount = 0;
const tryAgain = () => {
const retryCanvas = this.canvasWidget?.canvas || this.canvasWidget;
if (retryCanvas && retryCanvas.canvasIO) {
log.info("Canvas now ready, checking for input data...");
if (connected) {
retryCanvas.inputDataLoaded = false;
// Respect which input triggered the connection:
const opts = (index === 1)
? { allowImage: false, allowMask: true, reason: "mask_connect" }
: { allowImage: true, allowMask: false, reason: "image_connect" };
retryCanvas.canvasIO.checkForInputData(opts);
}
}
else if (retryCount < retryDelays.length) {
log.warn(`Canvas still not ready, retry ${retryCount + 1}/${retryDelays.length}...`);
setTimeout(tryAgain, retryDelays[retryCount++]);
}
else {
log.error("Canvas failed to initialize after multiple retries");
}
};
setTimeout(tryAgain, retryDelays[retryCount++]);
return;
}
// Handle input_image connection (index 0)
if (index === 0) {
if (connected && link_info) {
log.info("Input image connected, marking for data check...");
// Reset the input data loaded flag to allow loading the new connection
canvas.inputDataLoaded = false;
// Also reset the last loaded image source and link ID to allow the new image
canvas.lastLoadedImageSrc = undefined;
canvas.lastLoadedLinkId = undefined;
// Mark that we have a pending input connection
canvas.hasPendingInputConnection = true;
// If mask input is not connected and a mask was auto-applied from input_mask before, clear it now
if (!(this.inputs && this.inputs[1] && this.inputs[1].link)) {
if (canvas.maskAppliedFromInput && canvas.maskTool) {
canvas.maskTool.clear();
canvas.render();
canvas.maskAppliedFromInput = false;
canvas.lastLoadedMaskLinkId = undefined;
log.info("Cleared auto-applied mask because input_image connected without input_mask");
}
}
// Check for data immediately when connected
setTimeout(() => {
log.info("Checking for input data after connection...");
// Only load images here; masks should not auto-load on image connect
canvas.canvasIO.checkForInputData({ allowImage: true, allowMask: false, reason: "image_connect" });
}, 500);
}
else {
log.info("Input image disconnected");
canvas.hasPendingInputConnection = false;
// Reset when disconnected so a new connection can load
canvas.inputDataLoaded = false;
canvas.lastLoadedImageSrc = undefined;
canvas.lastLoadedLinkId = undefined;
}
}
// Handle input_mask connection (index 1)
if (index === 1) {
if (connected && link_info) {
log.info("Input mask connected");
// DON'T clear existing mask when connecting a new input
// Reset the loaded mask link ID to allow loading from the new connection
canvas.lastLoadedMaskLinkId = undefined;
// Mark that we have a pending mask connection
canvas.hasPendingMaskConnection = true;
// Check for data immediately when connected
setTimeout(() => {
log.info("Checking for input data after mask connection...");
// Only load mask here if it's immediately available from the connected node
// Don't load stale masks from backend storage
canvas.canvasIO.checkForInputData({ allowImage: false, allowMask: true, reason: "mask_connect" });
}, 500);
}
else {
log.info("Input mask disconnected");
canvas.hasPendingMaskConnection = false;
// If the current mask came from input_mask, clear it to avoid affecting images when mask is not connected
if (canvas.maskAppliedFromInput && canvas.maskTool) {
canvas.maskAppliedFromInput = false;
canvas.lastLoadedMaskLinkId = undefined;
log.info("Cleared auto-applied mask due to mask input disconnection");
}
}
}
}
};
// Add onExecuted handler to check for input data after workflow execution
const originalOnExecuted = nodeType.prototype.onExecuted;
nodeType.prototype.onExecuted = function (message) {
log.info("Node executed, checking for input data...");
const canvas = this.canvasWidget?.canvas || this.canvasWidget;
if (canvas && canvas.canvasIO) {
// Don't reset inputDataLoaded - just check for new data
// On execution we allow both image and mask to load
canvas.canvasIO.checkForInputData({ allowImage: true, allowMask: true, reason: "execution" });
}
// Call original if it exists
if (originalOnExecuted) {
originalOnExecuted.apply(this, arguments);
}
};
const onRemoved = nodeType.prototype.onRemoved;
nodeType.prototype.onRemoved = function () {

View File

@@ -424,7 +424,6 @@ export class MaskEditorIntegration {
boundsPos: { x: bounds.x, y: bounds.y },
maskSize: { width: bounds.width, height: bounds.height }
});
// Use the chunk system instead of direct canvas manipulation
this.maskTool.setMask(maskAsImage);
// Update node preview using PreviewUtils
await updateNodePreview(this.canvas, this.node, true);

View File

@@ -3,11 +3,15 @@ import { createCanvas } from "./utils/CommonUtils.js";
const log = createModuleLogger('Mask_tool');
export class MaskTool {
constructor(canvasInstance, callbacks = {}) {
// Track strokes during drawing for efficient overlay updates
this.currentStrokePoints = [];
this.ACTIVE_MASK_UPDATE_DELAY = 16; // ~60fps throttling
this.SHAPE_PREVIEW_THROTTLE_DELAY = 16; // ~60fps throttling for preview
this.canvasInstance = canvasInstance;
this.mainCanvas = canvasInstance.canvas;
this.onStateChange = callbacks.onStateChange || null;
// Initialize stroke tracking for overlay drawing
this.currentStrokePoints = [];
// Initialize chunked mask system
this.maskChunks = new Map();
this.chunkSize = 512;
@@ -28,8 +32,9 @@ export class MaskTool {
this.isOverlayVisible = true;
this.isActive = false;
this.brushSize = 20;
this.brushStrength = 0.5;
this.brushHardness = 0.5;
this._brushStrength = 0.5;
this._brushHardness = 0.5;
this._previewOpacity = 0.5; // Default 50% opacity for preview
this.isDrawing = false;
this.lastPosition = null;
const { canvas: previewCanvas, ctx: previewCtx } = createCanvas(1, 1, '2d', { willReadFrequently: true });
@@ -79,8 +84,27 @@ export class MaskTool {
this.canvasInstance.canvas.parentElement.appendChild(this.previewCanvas);
}
}
// Getters for brush properties
get brushStrength() {
return this._brushStrength;
}
get brushHardness() {
return this._brushHardness;
}
get previewOpacity() {
return this._previewOpacity;
}
setBrushHardness(hardness) {
this.brushHardness = Math.max(0, Math.min(1, hardness));
this._brushHardness = Math.max(0, Math.min(1, hardness));
}
setPreviewOpacity(opacity) {
this._previewOpacity = Math.max(0, Math.min(1, opacity));
// Update the stroke overlay canvas opacity when preview opacity changes
if (this.canvasInstance.canvasRenderer && this.canvasInstance.canvasRenderer.strokeOverlayCanvas) {
this.canvasInstance.canvasRenderer.strokeOverlayCanvas.style.opacity = String(this._previewOpacity);
}
// Trigger canvas render to update mask display opacity
this.canvasInstance.render();
}
initMaskCanvas() {
// Initialize chunked system
@@ -671,16 +695,17 @@ export class MaskTool {
this.brushSize = Math.max(1, size);
}
setBrushStrength(strength) {
this.brushStrength = Math.max(0, Math.min(1, strength));
this._brushStrength = Math.max(0, Math.min(1, strength));
}
handleMouseDown(worldCoords, viewCoords) {
if (!this.isActive)
return;
this.isDrawing = true;
this.lastPosition = worldCoords;
// Activate chunks around the drawing position for performance
this.updateActiveChunksForDrawing(worldCoords);
this.draw(worldCoords);
// Initialize stroke tracking for live preview
this.currentStrokePoints = [worldCoords];
// Clear any previous stroke overlay
this.canvasInstance.canvasRenderer.clearMaskStrokeOverlay();
this.clearPreview();
}
handleMouseMove(worldCoords, viewCoords) {
@@ -689,14 +714,69 @@ export class MaskTool {
}
if (!this.isActive || !this.isDrawing)
return;
// Dynamically update active chunks as user moves while drawing
this.updateActiveChunksForDrawing(worldCoords);
this.draw(worldCoords);
// Add point to stroke tracking
this.currentStrokePoints.push(worldCoords);
// Draw interpolated segments for smooth strokes without gaps
if (this.lastPosition) {
// Calculate distance between last and current position
const dx = worldCoords.x - this.lastPosition.x;
const dy = worldCoords.y - this.lastPosition.y;
const distance = Math.sqrt(dx * dx + dy * dy);
// If distance is small, just draw a single segment
if (distance < this.brushSize / 4) {
this.canvasInstance.canvasRenderer.drawMaskStrokeSegment(this.lastPosition, worldCoords);
}
else {
// Interpolate points for smooth drawing without gaps
const interpolatedPoints = this.interpolatePoints(this.lastPosition, worldCoords, distance);
// Draw all interpolated segments
for (let i = 0; i < interpolatedPoints.length - 1; i++) {
this.canvasInstance.canvasRenderer.drawMaskStrokeSegment(interpolatedPoints[i], interpolatedPoints[i + 1]);
}
}
}
this.lastPosition = worldCoords;
}
/**
* Interpolates points between two positions to create smooth strokes without gaps
* Based on the BrushTool's approach for eliminating dotted lines during fast drawing
*/
interpolatePoints(start, end, distance) {
const points = [];
// Calculate number of interpolated points based on brush size
// More points = smoother line
const stepSize = Math.max(1, this.brushSize / 6); // Adjust divisor for smoothness
const numSteps = Math.ceil(distance / stepSize);
// Always include start point
points.push(start);
// Interpolate intermediate points
for (let i = 1; i < numSteps; i++) {
const t = i / numSteps;
points.push({
x: start.x + (end.x - start.x) * t,
y: start.y + (end.y - start.y) * t
});
}
// Always include end point
points.push(end);
return points;
}
/**
* Called when viewport changes during drawing to update stroke overlay
* This ensures the stroke preview scales correctly with zoom changes
*/
handleViewportChange() {
if (this.isDrawing && this.currentStrokePoints.length > 1) {
// Redraw the entire stroke overlay with new viewport settings
this.canvasInstance.canvasRenderer.redrawMaskStrokeOverlay(this.currentStrokePoints);
}
}
handleMouseLeave() {
this.previewVisible = false;
this.clearPreview();
// Clear overlay canvases when mouse leaves
this.canvasInstance.canvasRenderer.clearOverlay();
this.canvasInstance.canvasRenderer.clearMaskStrokeOverlay();
}
handleMouseEnter() {
this.previewVisible = true;
@@ -706,10 +786,15 @@ export class MaskTool {
return;
if (this.isDrawing) {
this.isDrawing = false;
// Commit the stroke from overlay to actual mask chunks
this.commitStrokeToChunks();
// Clear stroke overlay and reset state
this.canvasInstance.canvasRenderer.clearMaskStrokeOverlay();
this.currentStrokePoints = [];
this.lastPosition = null;
this.currentDrawingChunk = null;
// After drawing is complete, update active canvas to show all chunks
this.updateActiveMaskCanvas(true); // forceShowAll = true
this.updateActiveMaskCanvas(true); // Force full update
this.completeMaskOperation();
this.drawBrushPreview(viewCoords);
}
@@ -724,6 +809,38 @@ export class MaskTool {
// This prevents unnecessary recomposition during drawing
this.updateActiveCanvasIfNeeded(this.lastPosition, worldCoords);
}
/**
* Commits the current stroke from overlay to actual mask chunks
* This replays the entire stroke path with interpolation to ensure pixel-perfect accuracy
*/
commitStrokeToChunks() {
if (this.currentStrokePoints.length < 2) {
return; // Need at least 2 points for a stroke
}
log.debug(`Committing stroke with ${this.currentStrokePoints.length} points to chunks`);
// Replay the entire stroke path with interpolation for smooth, accurate lines
for (let i = 1; i < this.currentStrokePoints.length; i++) {
const startPoint = this.currentStrokePoints[i - 1];
const endPoint = this.currentStrokePoints[i];
// Calculate distance between points
const dx = endPoint.x - startPoint.x;
const dy = endPoint.y - startPoint.y;
const distance = Math.sqrt(dx * dx + dy * dy);
if (distance < this.brushSize / 4) {
// Small distance - draw single segment
this.drawOnChunks(startPoint, endPoint);
}
else {
// Large distance - interpolate for smooth line without gaps
const interpolatedPoints = this.interpolatePoints(startPoint, endPoint, distance);
// Draw all interpolated segments
for (let j = 0; j < interpolatedPoints.length - 1; j++) {
this.drawOnChunks(interpolatedPoints[j], interpolatedPoints[j + 1]);
}
}
}
log.debug("Stroke committed to chunks successfully with interpolation");
}
/**
* Draws a line between two world coordinates on the appropriate chunks
*/
@@ -767,13 +884,13 @@ export class MaskTool {
chunk.ctx.moveTo(startLocal.x, startLocal.y);
chunk.ctx.lineTo(endLocal.x, endLocal.y);
const gradientRadius = this.brushSize / 2;
if (this.brushHardness === 1) {
chunk.ctx.strokeStyle = `rgba(255, 255, 255, ${this.brushStrength})`;
if (this._brushHardness === 1) {
chunk.ctx.strokeStyle = `rgba(255, 255, 255, ${this._brushStrength})`;
}
else {
const innerRadius = gradientRadius * this.brushHardness;
const innerRadius = gradientRadius * this._brushHardness;
const gradient = chunk.ctx.createRadialGradient(endLocal.x, endLocal.y, innerRadius, endLocal.x, endLocal.y, gradientRadius);
gradient.addColorStop(0, `rgba(255, 255, 255, ${this.brushStrength})`);
gradient.addColorStop(0, `rgba(255, 255, 255, ${this._brushStrength})`);
gradient.addColorStop(1, `rgba(255, 255, 255, 0)`);
chunk.ctx.strokeStyle = gradient;
}
@@ -805,28 +922,17 @@ export class MaskTool {
return true; // For now, always draw - more precise intersection can be added later
}
/**
* Updates active canvas when drawing affects chunks with throttling to prevent lag
* During drawing, only updates the affected active chunks for performance
* Updates active canvas when drawing affects chunks
* Since we now use overlay during drawing, this is only called after drawing is complete
*/
updateActiveCanvasIfNeeded(startWorld, endWorld) {
// Calculate which chunks were affected by this drawing operation
const minX = Math.min(startWorld.x, endWorld.x) - this.brushSize;
const maxX = Math.max(startWorld.x, endWorld.x) + this.brushSize;
const minY = Math.min(startWorld.y, endWorld.y) - this.brushSize;
const maxY = Math.max(startWorld.y, endWorld.y) + this.brushSize;
const affectedChunkMinX = Math.floor(minX / this.chunkSize);
const affectedChunkMinY = Math.floor(minY / this.chunkSize);
const affectedChunkMaxX = Math.floor(maxX / this.chunkSize);
const affectedChunkMaxY = Math.floor(maxY / this.chunkSize);
// During drawing, only update affected chunks that are active for performance
if (this.isDrawing) {
// Use throttled partial update for active chunks only
this.scheduleThrottledActiveMaskUpdate(affectedChunkMinX, affectedChunkMinY, affectedChunkMaxX, affectedChunkMaxY);
}
else {
// This method is now simplified - we only update after drawing is complete
// The overlay handles all live preview, so we don't need complex chunk activation
if (!this.isDrawing) {
// Not drawing - do full update to show all chunks
this.updateActiveMaskCanvas(true);
}
// During drawing, we don't update chunks at all - overlay handles preview
}
/**
* Schedules a throttled update of the active mask canvas to prevent excessive redraws
@@ -903,18 +1009,12 @@ export class MaskTool {
}
drawBrushPreview(viewCoords) {
if (!this.previewVisible || this.isDrawing) {
this.clearPreview();
this.canvasInstance.canvasRenderer.clearOverlay();
return;
}
this.clearPreview();
const zoom = this.canvasInstance.viewport.zoom;
const radius = (this.brushSize / 2) * zoom;
this.previewCtx.beginPath();
this.previewCtx.arc(viewCoords.x, viewCoords.y, radius, 0, 2 * Math.PI);
this.previewCtx.strokeStyle = 'rgba(255, 255, 255, 0.8)';
this.previewCtx.lineWidth = 1;
this.previewCtx.setLineDash([2, 4]);
this.previewCtx.stroke();
// Use overlay canvas instead of preview canvas for brush cursor
const worldCoords = this.canvasInstance.lastMousePosition;
this.canvasInstance.canvasRenderer.drawMaskBrushCursor(worldCoords);
}
clearPreview() {
this.previewCtx.clearRect(0, 0, this.previewCanvas.width, this.previewCanvas.height);
@@ -1252,6 +1352,23 @@ export class MaskTool {
this.canvasInstance.render();
log.info("Cleared all mask data from all chunks");
}
/**
* Clears all chunks and restores mask from saved state
* This is used during undo/redo operations to ensure clean state restoration
*/
restoreMaskFromSavedState(savedMaskCanvas) {
// First, clear ALL chunks to ensure no leftover data
this.clearAllMaskChunks();
// Now apply the saved mask state to chunks
if (savedMaskCanvas.width > 0 && savedMaskCanvas.height > 0) {
// Apply the saved mask to the chunk system at the correct position
const bounds = this.canvasInstance.outputAreaBounds;
this.applyMaskCanvasToChunks(savedMaskCanvas, this.x, this.y);
}
// Update the active mask canvas to show the restored state
this.updateActiveMaskCanvas(true);
log.debug("Restored mask from saved state with clean chunk system");
}
getMask() {
// Return the current active mask canvas which shows all chunks
// Only update if there are pending changes to avoid unnecessary redraws
@@ -1345,13 +1462,44 @@ export class MaskTool {
this.isOverlayVisible = !this.isOverlayVisible;
log.info(`Mask overlay visibility toggled to: ${this.isOverlayVisible}`);
}
setMask(image) {
// Clear existing mask chunks in the output area first
setMask(image, isFromInputMask = false) {
const bounds = this.canvasInstance.outputAreaBounds;
this.clearMaskInArea(bounds.x, bounds.y, image.width, image.height);
// Add the new mask using the chunk system
this.addMask(image);
log.info(`MaskTool set new mask using chunk system at bounds (${bounds.x}, ${bounds.y})`);
if (isFromInputMask) {
// For INPUT MASK - process black background to transparent using luminance
// Center like input images
const centerX = bounds.x + (bounds.width - image.width) / 2;
const centerY = bounds.y + (bounds.height - image.height) / 2;
// Prepare mask where alpha = luminance (white = applied, black = transparent)
const { canvas: maskCanvas, ctx } = createCanvas(image.width, image.height, '2d', { willReadFrequently: true });
if (!ctx)
throw new Error("Could not create mask processing context");
ctx.drawImage(image, 0, 0);
const imgData = ctx.getImageData(0, 0, image.width, image.height);
const data = imgData.data;
for (let i = 0; i < data.length; i += 4) {
const r = data[i], g = data[i + 1], b = data[i + 2];
const lum = Math.round(0.299 * r + 0.587 * g + 0.114 * b);
data[i] = 255; // force white color (color channels ignored downstream)
data[i + 1] = 255;
data[i + 2] = 255;
data[i + 3] = lum; // alpha encodes mask strength: white -> strong, black -> 0
}
ctx.putImageData(imgData, 0, 0);
// Clear target area and apply to chunked system at centered position
this.clearMaskInArea(centerX, centerY, image.width, image.height);
this.applyMaskCanvasToChunks(maskCanvas, centerX, centerY);
// Refresh state and UI
this.updateActiveMaskCanvas(true);
this.canvasInstance.canvasState.saveMaskState();
this.canvasInstance.render();
log.info(`MaskTool set INPUT MASK at centered position (${centerX}, ${centerY}) using luminance as alpha`);
}
else {
// For SAM Detector and other sources - just clear and add without processing
this.clearMaskInArea(bounds.x, bounds.y, bounds.width, bounds.height);
this.addMask(image);
log.info(`MaskTool set mask using chunk system at bounds (${bounds.x}, ${bounds.y})`);
}
}
/**
* Clears mask data in a specific area by clearing affected chunks

View File

@@ -6,6 +6,7 @@ import { uploadCanvasAsImage, uploadImageBlob } from "./utils/ImageUploadUtils.j
import { processImageToMask } from "./utils/MaskProcessingUtils.js";
import { convertToImage } from "./utils/ImageUtils.js";
import { updateNodePreview } from "./utils/PreviewUtils.js";
import { validateAndFixClipspace } from "./utils/ClipspaceUtils.js";
const log = createModuleLogger('SAMDetectorIntegration');
/**
* SAM Detector Integration for LayerForge
@@ -241,35 +242,61 @@ async function handleSAMDetectorResult(node, resultImage) {
// Try to reload the image with a fresh request
log.debug("Attempting to reload SAM result image");
const originalSrc = resultImage.src;
// Add cache-busting parameter to force fresh load
const url = new URL(originalSrc);
url.searchParams.set('_t', Date.now().toString());
await new Promise((resolve, reject) => {
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
// Copy the loaded image data to the original image
resultImage.src = img.src;
resultImage.width = img.width;
resultImage.height = img.height;
log.debug("SAM result image reloaded successfully", {
width: img.width,
height: img.height,
originalSrc: originalSrc,
newSrc: img.src
// Check if it's a data URL (base64) - don't add parameters to data URLs
if (originalSrc.startsWith('data:')) {
log.debug("Image is a data URL, skipping reload with parameters");
// For data URLs, just ensure the image is loaded
if (!resultImage.complete || resultImage.naturalWidth === 0) {
await new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => {
resultImage.width = img.width;
resultImage.height = img.height;
log.debug("Data URL image loaded successfully", {
width: img.width,
height: img.height
});
resolve(img);
};
img.onerror = (error) => {
log.error("Failed to load data URL image", error);
reject(error);
};
img.src = originalSrc; // Use original src without modifications
});
resolve(img);
};
img.onerror = (error) => {
log.error("Failed to reload SAM result image", {
originalSrc: originalSrc,
newSrc: url.toString(),
error: error
});
reject(error);
};
img.src = url.toString();
});
}
}
else {
// For regular URLs, add cache-busting parameter
const url = new URL(originalSrc);
url.searchParams.set('_t', Date.now().toString());
await new Promise((resolve, reject) => {
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
// Copy the loaded image data to the original image
resultImage.src = img.src;
resultImage.width = img.width;
resultImage.height = img.height;
log.debug("SAM result image reloaded successfully", {
width: img.width,
height: img.height,
originalSrc: originalSrc,
newSrc: img.src
});
resolve(img);
};
img.onerror = (error) => {
log.error("Failed to reload SAM result image", {
originalSrc: originalSrc,
newSrc: url.toString(),
error: error
});
reject(error);
};
img.src = url.toString();
});
}
}
}
catch (error) {
@@ -289,27 +316,37 @@ async function handleSAMDetectorResult(node, resultImage) {
// Apply mask to LayerForge canvas using MaskTool.setMask method
log.debug("Checking canvas and maskTool availability", {
hasCanvas: !!canvas,
hasCanvasProperty: !!canvas.canvas,
canvasCanvasKeys: canvas.canvas ? Object.keys(canvas.canvas) : [],
hasMaskTool: !!canvas.maskTool,
hasCanvasMaskTool: !!(canvas.canvas && canvas.canvas.maskTool),
maskToolType: typeof canvas.maskTool,
canvasMaskToolType: canvas.canvas ? typeof canvas.canvas.maskTool : 'undefined',
canvasKeys: Object.keys(canvas)
});
if (!canvas.maskTool) {
// Get the actual Canvas object and its maskTool
const actualCanvas = canvas.canvas || canvas;
const maskTool = actualCanvas.maskTool;
if (!maskTool) {
log.error("MaskTool is not available. Canvas state:", {
hasCanvas: !!canvas,
hasActualCanvas: !!actualCanvas,
canvasConstructor: canvas.constructor.name,
actualCanvasConstructor: actualCanvas ? actualCanvas.constructor.name : 'undefined',
canvasKeys: Object.keys(canvas),
maskToolValue: canvas.maskTool
actualCanvasKeys: actualCanvas ? Object.keys(actualCanvas) : [],
maskToolValue: maskTool
});
throw new Error("Mask tool not available or not initialized");
}
log.debug("Applying SAM mask to canvas using addMask method");
// Use the addMask method which overlays on existing mask without clearing it
canvas.maskTool.addMask(maskAsImage);
log.debug("Applying SAM mask to canvas using setMask method");
// Use the setMask method which clears existing mask and sets new one
maskTool.setMask(maskAsImage);
// Update canvas and save state (same as MaskEditorIntegration)
canvas.render();
canvas.saveState();
actualCanvas.render();
actualCanvas.saveState();
// Update node preview using PreviewUtils
await updateNodePreview(canvas, node, true);
await updateNodePreview(actualCanvas, node, true);
log.info("SAM Detector mask applied successfully to LayerForge canvas");
// Show success notification
showSuccessNotification("SAM Detector mask applied to LayerForge!");
@@ -324,6 +361,8 @@ async function handleSAMDetectorResult(node, resultImage) {
node.samOriginalImgSrc = null;
}
}
// Store original onClipspaceEditorSave function to restore later
let originalOnClipspaceEditorSave = null;
// Function to setup SAM Detector hook in menu options
export function setupSAMDetectorHook(node, options) {
// Hook into "Open in SAM Detector" with delay since Impact Pack adds it asynchronously
@@ -337,18 +376,56 @@ export function setupSAMDetectorHook(node, options) {
try {
log.info("Intercepted 'Open in SAM Detector' - automatically sending to clipspace and starting monitoring");
// Automatically send canvas to clipspace and start monitoring
if (node.canvasWidget && node.canvasWidget.canvas) {
const canvas = node.canvasWidget; // canvasWidget IS the Canvas object
// Use ImageUploadUtils to upload canvas
if (node.canvasWidget) {
const canvasWidget = node.canvasWidget;
const canvas = canvasWidget.canvas || canvasWidget; // Get actual Canvas object
// Use ImageUploadUtils to upload canvas and get server URL (Impact Pack compatibility)
const uploadResult = await uploadCanvasAsImage(canvas, {
filenamePrefix: 'layerforge-sam',
nodeId: node.id
});
log.debug("Uploaded canvas for SAM Detector", {
filename: uploadResult.filename,
imageUrl: uploadResult.imageUrl,
width: uploadResult.imageElement.width,
height: uploadResult.imageElement.height
});
// Set the image to the node for clipspace
node.imgs = [uploadResult.imageElement];
node.clipspaceImg = uploadResult.imageElement;
// Ensure proper clipspace structure for updated ComfyUI
if (!ComfyApp.clipspace) {
ComfyApp.clipspace = {};
}
// Set up clipspace with proper indices
ComfyApp.clipspace.imgs = [uploadResult.imageElement];
ComfyApp.clipspace.selectedIndex = 0;
ComfyApp.clipspace.combinedIndex = 0;
ComfyApp.clipspace.img_paste_mode = 'selected';
// Copy to ComfyUI clipspace
ComfyApp.copyToClipspace(node);
// Override onClipspaceEditorSave to fix clipspace structure before pasteFromClipspace
if (!originalOnClipspaceEditorSave) {
originalOnClipspaceEditorSave = ComfyApp.onClipspaceEditorSave;
ComfyApp.onClipspaceEditorSave = function () {
log.debug("SAM Detector onClipspaceEditorSave called, using unified clipspace validation");
// Use the unified clipspace validation function
const isValid = validateAndFixClipspace();
if (!isValid) {
log.error("Clipspace validation failed, cannot proceed with paste");
return;
}
// Call the original function
if (originalOnClipspaceEditorSave) {
originalOnClipspaceEditorSave.call(ComfyApp);
}
// Restore the original function after use
if (originalOnClipspaceEditorSave) {
ComfyApp.onClipspaceEditorSave = originalOnClipspaceEditorSave;
originalOnClipspaceEditorSave = null;
}
};
}
// Start monitoring for SAM Detector results
startSAMDetectorMonitoring(node);
log.info("Canvas automatically sent to clipspace and monitoring started");

View File

@@ -1,10 +1,9 @@
import { createModuleLogger } from "./LoggerUtils.js";
import { showNotification, showInfoNotification } from "./NotificationUtils.js";
import { withErrorHandling, createValidationError, createNetworkError, createFileError } from "../ErrorHandler.js";
import { safeClipspacePaste } from "./ClipspaceUtils.js";
// @ts-ignore
import { api } from "../../../scripts/api.js";
// @ts-ignore
import { ComfyApp } from "../../../scripts/app.js";
const log = createModuleLogger('ClipboardManager');
export class ClipboardManager {
constructor(canvas) {
@@ -39,7 +38,12 @@ export class ClipboardManager {
*/
this.tryClipspacePaste = withErrorHandling(async (addMode) => {
log.info("Attempting to paste from ComfyUI Clipspace");
ComfyApp.pasteFromClipspace(this.canvas.node);
// Use the unified clipspace validation and paste function
const pasteSuccess = safeClipspacePaste(this.canvas.node);
if (!pasteSuccess) {
log.debug("Safe clipspace paste failed");
return false;
}
if (this.canvas.node.imgs && this.canvas.node.imgs.length > 0) {
const clipspaceImage = this.canvas.node.imgs[0];
if (clipspaceImage && clipspaceImage.src) {

View File

@@ -0,0 +1,99 @@
import { createModuleLogger } from "./LoggerUtils.js";
// @ts-ignore
import { ComfyApp } from "../../../scripts/app.js";
const log = createModuleLogger('ClipspaceUtils');
/**
* Validates and fixes ComfyUI clipspace structure to prevent 'Cannot read properties of undefined' errors
* @returns {boolean} - True if clipspace is valid and ready to use, false otherwise
*/
export function validateAndFixClipspace() {
log.debug("Validating and fixing clipspace structure");
// Check if clipspace exists
if (!ComfyApp.clipspace) {
log.debug("ComfyUI clipspace is not available");
return false;
}
// Validate clipspace structure
if (!ComfyApp.clipspace.imgs || ComfyApp.clipspace.imgs.length === 0) {
log.debug("ComfyUI clipspace has no images");
return false;
}
log.debug("Current clipspace state:", {
hasImgs: !!ComfyApp.clipspace.imgs,
imgsLength: ComfyApp.clipspace.imgs?.length,
selectedIndex: ComfyApp.clipspace.selectedIndex,
combinedIndex: ComfyApp.clipspace.combinedIndex,
img_paste_mode: ComfyApp.clipspace.img_paste_mode
});
// Ensure required indices are set
if (ComfyApp.clipspace.selectedIndex === undefined || ComfyApp.clipspace.selectedIndex === null) {
ComfyApp.clipspace.selectedIndex = 0;
log.debug("Fixed clipspace selectedIndex to 0");
}
if (ComfyApp.clipspace.combinedIndex === undefined || ComfyApp.clipspace.combinedIndex === null) {
ComfyApp.clipspace.combinedIndex = 0;
log.debug("Fixed clipspace combinedIndex to 0");
}
if (!ComfyApp.clipspace.img_paste_mode) {
ComfyApp.clipspace.img_paste_mode = 'selected';
log.debug("Fixed clipspace img_paste_mode to 'selected'");
}
// Ensure indices are within bounds
const maxIndex = ComfyApp.clipspace.imgs.length - 1;
if (ComfyApp.clipspace.selectedIndex > maxIndex) {
ComfyApp.clipspace.selectedIndex = maxIndex;
log.debug(`Fixed clipspace selectedIndex to ${maxIndex} (max available)`);
}
if (ComfyApp.clipspace.combinedIndex > maxIndex) {
ComfyApp.clipspace.combinedIndex = maxIndex;
log.debug(`Fixed clipspace combinedIndex to ${maxIndex} (max available)`);
}
// Verify the image at combinedIndex exists and has src
const combinedImg = ComfyApp.clipspace.imgs[ComfyApp.clipspace.combinedIndex];
if (!combinedImg || !combinedImg.src) {
log.debug("Image at combinedIndex is missing or has no src, trying to find valid image");
// Try to use the first available image
for (let i = 0; i < ComfyApp.clipspace.imgs.length; i++) {
if (ComfyApp.clipspace.imgs[i] && ComfyApp.clipspace.imgs[i].src) {
ComfyApp.clipspace.combinedIndex = i;
log.debug(`Fixed combinedIndex to ${i} (first valid image)`);
break;
}
}
// Final check - if still no valid image found
const finalImg = ComfyApp.clipspace.imgs[ComfyApp.clipspace.combinedIndex];
if (!finalImg || !finalImg.src) {
log.error("No valid images found in clipspace after attempting fixes");
return false;
}
}
log.debug("Final clipspace structure:", {
selectedIndex: ComfyApp.clipspace.selectedIndex,
combinedIndex: ComfyApp.clipspace.combinedIndex,
img_paste_mode: ComfyApp.clipspace.img_paste_mode,
imgsLength: ComfyApp.clipspace.imgs?.length,
combinedImgSrc: ComfyApp.clipspace.imgs[ComfyApp.clipspace.combinedIndex]?.src?.substring(0, 50) + '...'
});
return true;
}
/**
* Safely calls ComfyApp.pasteFromClipspace after validating clipspace structure
* @param {any} node - The ComfyUI node to paste to
* @returns {boolean} - True if paste was successful, false otherwise
*/
export function safeClipspacePaste(node) {
log.debug("Attempting safe clipspace paste");
if (!validateAndFixClipspace()) {
log.debug("Clipspace validation failed, cannot paste");
return false;
}
try {
ComfyApp.pasteFromClipspace(node);
log.debug("Successfully called pasteFromClipspace");
return true;
}
catch (error) {
log.error("Error calling pasteFromClipspace:", error);
return false;
}
}

View File

@@ -314,3 +314,102 @@ export function canvasToMaskImage(canvas) {
img.src = canvas.toDataURL();
});
}
/**
* Scales an image to fit within specified bounds while maintaining aspect ratio
* @param image - Image to scale
* @param targetWidth - Target width to fit within
* @param targetHeight - Target height to fit within
* @returns Promise with scaled Image element
*/
export async function scaleImageToFit(image, targetWidth, targetHeight) {
const scale = Math.min(targetWidth / image.width, targetHeight / image.height);
const scaledWidth = Math.max(1, Math.round(image.width * scale));
const scaledHeight = Math.max(1, Math.round(image.height * scale));
const { canvas, ctx } = createCanvas(scaledWidth, scaledHeight, '2d', { willReadFrequently: true });
if (!ctx)
throw new Error("Could not create scaled image context");
ctx.drawImage(image, 0, 0, scaledWidth, scaledHeight);
return new Promise((resolve, reject) => {
const scaledImg = new Image();
scaledImg.onload = () => resolve(scaledImg);
scaledImg.onerror = reject;
scaledImg.src = canvas.toDataURL();
});
}
/**
* Unified tensor to image data conversion
* Handles both RGB images and grayscale masks
* @param tensor - Input tensor data
* @param mode - 'rgb' for images or 'grayscale' for masks
* @returns ImageData object
*/
export function tensorToImageData(tensor, mode = 'rgb') {
try {
const shape = tensor.shape;
const height = shape[1];
const width = shape[2];
const channels = shape[3] || 1; // Default to 1 for masks
log.debug("Converting tensor:", { shape, channels, mode });
const imageData = new ImageData(width, height);
const data = new Uint8ClampedArray(width * height * 4);
const flatData = tensor.data;
const pixelCount = width * height;
const min = tensor.min_val ?? 0;
const max = tensor.max_val ?? 1;
const denom = (max - min) || 1;
for (let i = 0; i < pixelCount; i++) {
const pixelIndex = i * 4;
const tensorIndex = i * channels;
let lum;
if (mode === 'grayscale' || channels === 1) {
lum = flatData[tensorIndex];
}
else {
// Compute luminance for RGB
const r = flatData[tensorIndex + 0] ?? 0;
const g = flatData[tensorIndex + 1] ?? 0;
const b = flatData[tensorIndex + 2] ?? 0;
lum = 0.299 * r + 0.587 * g + 0.114 * b;
}
let norm = (lum - min) / denom;
if (!isFinite(norm))
norm = 0;
norm = Math.max(0, Math.min(1, norm));
const value = Math.round(norm * 255);
if (mode === 'grayscale') {
// For masks: RGB = value, A = 255 (MaskTool reads luminance)
data[pixelIndex] = value;
data[pixelIndex + 1] = value;
data[pixelIndex + 2] = value;
data[pixelIndex + 3] = 255;
}
else {
// For images: RGB from channels, A = 255
for (let c = 0; c < Math.min(3, channels); c++) {
const channelValue = flatData[tensorIndex + c];
const channelNorm = (channelValue - min) / denom;
data[pixelIndex + c] = Math.round(channelNorm * 255);
}
data[pixelIndex + 3] = 255;
}
}
imageData.data.set(data);
return imageData;
}
catch (error) {
log.error("Error converting tensor:", error);
return null;
}
}
/**
* Creates an HTMLImageElement from ImageData
* @param imageData - Input ImageData
* @returns Promise with HTMLImageElement
*/
export async function createImageFromImageData(imageData) {
const { canvas, ctx } = createCanvas(imageData.width, imageData.height, '2d', { willReadFrequently: true });
if (!ctx)
throw new Error("Could not create canvas context");
ctx.putImageData(imageData, 0, 0);
return await createImageFromSource(canvas.toDataURL());
}

View File

@@ -1,7 +1,7 @@
[project]
name = "layerforge"
description = "Photoshop-like layered canvas editor to your ComfyUI workflow. This node is perfect for complex compositing, inpainting, and outpainting, featuring multi-layer support, masking, blend modes, and precise transformations. Includes optional AI-powered background removal for streamlined image editing."
version = "1.5.3"
version = "1.5.6"
license = { text = "MIT License" }
dependencies = ["torch", "torchvision", "transformers", "aiohttp", "numpy", "tqdm", "Pillow"]

View File

@@ -84,11 +84,18 @@ export class Canvas {
node: ComfyNode;
offscreenCanvas: HTMLCanvasElement;
offscreenCtx: CanvasRenderingContext2D | null;
overlayCanvas: HTMLCanvasElement;
overlayCtx: CanvasRenderingContext2D;
onHistoryChange: ((historyInfo: { canUndo: boolean; canRedo: boolean; }) => void) | undefined;
onViewportChange: (() => void) | null;
onStateChange: (() => void) | undefined;
pendingBatchContext: any;
pendingDataCheck: number | null;
pendingInputDataCheck: number | null;
inputDataLoaded: boolean;
lastLoadedImageSrc?: string;
lastLoadedLinkId?: number;
lastLoadedMaskLinkId?: number;
previewVisible: boolean;
requestSaveState: () => void;
viewport: Viewport;
@@ -122,10 +129,22 @@ export class Canvas {
});
this.offscreenCanvas = offscreenCanvas;
this.offscreenCtx = offscreenCtx;
// Create overlay canvas for brush cursor and other lightweight overlays
const { canvas: overlayCanvas, ctx: overlayCtx } = createCanvas(0, 0, '2d', {
alpha: true,
willReadFrequently: false
});
if (!overlayCtx) throw new Error("Could not create overlay canvas context");
this.overlayCanvas = overlayCanvas;
this.overlayCtx = overlayCtx;
this.canvasContainer = null;
this.dataInitialized = false;
this.pendingDataCheck = null;
this.pendingInputDataCheck = null;
this.inputDataLoaded = false;
this.imageCache = new Map();
this.requestSaveState = () => {};
@@ -471,6 +490,11 @@ export class Canvas {
};
const handleExecutionStart = () => {
// Check for input data when execution starts, but don't reset the flag
log.debug('Execution started, checking for input data...');
// On start, only allow images; mask should load on mask-connect or after execution completes
this.canvasIO.checkForInputData({ allowImage: true, allowMask: false, reason: 'execution_start' });
if (getAutoRefreshValue()) {
lastExecutionStartTime = Date.now();
// Store a snapshot of the context for the upcoming batch
@@ -494,6 +518,10 @@ export class Canvas {
};
const handleExecutionSuccess = async () => {
// Always check for input data after execution completes
log.debug('Execution success, checking for input data...');
await this.canvasIO.checkForInputData({ allowImage: true, allowMask: true, reason: 'execution_success' });
if (getAutoRefreshValue()) {
log.info('Auto-refresh triggered, importing latest images.');

View File

@@ -2,6 +2,7 @@ import { createCanvas } from "./utils/CommonUtils.js";
import { createModuleLogger } from "./utils/LoggerUtils.js";
import { showErrorNotification } from "./utils/NotificationUtils.js";
import { webSocketManager } from "./utils/WebSocketManager.js";
import { scaleImageToFit, createImageFromSource, tensorToImageData, createImageFromImageData } from "./utils/ImageUtils.js";
import type { Canvas } from './Canvas';
import type { Layer, Shape } from './types';
@@ -282,22 +283,12 @@ export class CanvasIO {
try {
log.debug("Adding input to canvas:", { inputImage });
const { canvas: tempCanvas, ctx: tempCtx } = createCanvas(inputImage.width, inputImage.height);
if (!tempCtx) throw new Error("Could not create temp context");
// Use unified tensorToImageData for RGB image
const imageData = tensorToImageData(inputImage, 'rgb');
if (!imageData) throw new Error("Failed to convert input image tensor");
const imgData = new ImageData(
new Uint8ClampedArray(inputImage.data),
inputImage.width,
inputImage.height
);
tempCtx.putImageData(imgData, 0, 0);
const image = new Image();
await new Promise((resolve, reject) => {
image.onload = resolve;
image.onerror = reject;
image.src = tempCanvas.toDataURL();
});
// Create HTMLImageElement from ImageData
const image = await createImageFromImageData(imageData);
const bounds = this.canvas.outputAreaBounds;
const scale = Math.min(
@@ -333,23 +324,10 @@ export class CanvasIO {
throw new Error("Invalid tensor data");
}
const { canvas, ctx } = createCanvas(tensor.width, tensor.height, '2d', { willReadFrequently: true });
if (!ctx) throw new Error("Could not create canvas context");
const imageData = tensorToImageData(tensor, 'rgb');
if (!imageData) throw new Error("Failed to convert tensor to image data");
const imageData = new ImageData(
new Uint8ClampedArray(tensor.data),
tensor.width,
tensor.height
);
ctx.putImageData(imageData, 0, 0);
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => resolve(img);
img.onerror = (e) => reject(new Error("Failed to load image: " + e));
img.src = canvas.toDataURL();
});
return await createImageFromImageData(imageData);
} catch (error) {
log.error("Error converting tensor to image:", error);
throw error;
@@ -372,6 +350,16 @@ export class CanvasIO {
try {
log.info("Starting node data initialization...");
// First check for input data from the backend (new feature)
await this.checkForInputData();
// If we've already loaded input data, don't continue with old initialization
if (this.canvas.inputDataLoaded) {
log.debug("Input data already loaded, skipping old initialization");
this.canvas.dataInitialized = true;
return;
}
if (!this.canvas.node || !(this.canvas.node as any).inputs) {
log.debug("Node or inputs not ready");
return this.scheduleDataCheck();
@@ -379,6 +367,14 @@ export class CanvasIO {
if ((this.canvas.node as any).inputs[0] && (this.canvas.node as any).inputs[0].link) {
const imageLinkId = (this.canvas.node as any).inputs[0].link;
// Check if we already loaded this link
if (this.canvas.lastLoadedLinkId === imageLinkId) {
log.debug(`Link ${imageLinkId} already loaded via new system, marking as initialized`);
this.canvas.dataInitialized = true;
return;
}
const imageData = (window as any).app.nodeOutputs[imageLinkId];
if (imageData) {
@@ -389,6 +385,9 @@ export class CanvasIO {
log.debug("Image data not available yet");
return this.scheduleDataCheck();
}
} else {
// No input connected, mark as initialized to stop repeated checks
this.canvas.dataInitialized = true;
}
if ((this.canvas.node as any).inputs[1] && (this.canvas.node as any).inputs[1].link) {
@@ -407,6 +406,439 @@ export class CanvasIO {
}
}
async checkForInputData(options?: { allowImage?: boolean; allowMask?: boolean; reason?: string }): Promise<void> {
try {
const nodeId = this.canvas.node.id;
const allowImage = options?.allowImage ?? true;
const allowMask = options?.allowMask ?? true;
const reason = options?.reason ?? 'unspecified';
log.info(`Checking for input data for node ${nodeId}... opts: image=${allowImage}, mask=${allowMask}, reason=${reason}`);
// Track loaded links separately for image and mask
let imageLoaded = false;
let maskLoaded = false;
let imageChanged = false;
// First, try to get data from connected node's output if available (IMAGES)
if (allowImage && this.canvas.node.inputs && this.canvas.node.inputs[0] && this.canvas.node.inputs[0].link) {
const linkId = this.canvas.node.inputs[0].link;
const graph = (this.canvas.node as any).graph;
// Always check if images have changed first
if (graph) {
const link = graph.links[linkId];
if (link) {
const sourceNode = graph.getNodeById(link.origin_id);
if (sourceNode && sourceNode.imgs && sourceNode.imgs.length > 0) {
// Create current batch identifier (all image sources combined)
const currentBatchImageSrcs = sourceNode.imgs.map((img: HTMLImageElement) => img.src).join('|');
// Check if this is the same link we loaded before
if (this.canvas.lastLoadedLinkId === linkId) {
// Same link, check if images actually changed
if (this.canvas.lastLoadedImageSrc !== currentBatchImageSrcs) {
log.info(`Batch images changed for link ${linkId} (${sourceNode.imgs.length} images), will reload...`);
log.debug(`Previous batch hash: ${this.canvas.lastLoadedImageSrc?.substring(0, 100)}...`);
log.debug(`Current batch hash: ${currentBatchImageSrcs.substring(0, 100)}...`);
imageChanged = true;
// Clear the inputDataLoaded flag to force reload from backend
this.canvas.inputDataLoaded = false;
// Clear the lastLoadedImageSrc to force reload
this.canvas.lastLoadedImageSrc = undefined;
// Clear backend data to force fresh load
fetch(`/layerforge/clear_input_data/${nodeId}`, { method: 'POST' })
.then(() => log.debug("Backend input data cleared due to image change"))
.catch(err => log.error("Failed to clear backend data:", err));
} else {
log.debug(`Batch images for link ${linkId} unchanged (${sourceNode.imgs.length} images)`);
imageLoaded = true;
}
} else {
// Different link or first load
log.info(`New link ${linkId} detected, will load ${sourceNode.imgs.length} images`);
imageChanged = false; // It's not a change, it's a new link
imageLoaded = false; // Need to load
// Reset the inputDataLoaded flag for new link
this.canvas.inputDataLoaded = false;
}
}
}
}
if (!imageLoaded || imageChanged) {
// Reset the inputDataLoaded flag when images change
if (imageChanged) {
this.canvas.inputDataLoaded = false;
log.info("Resetting inputDataLoaded flag due to image change");
}
if ((this.canvas.node as any).graph) {
const graph2 = (this.canvas.node as any).graph;
const link2 = graph2.links[linkId];
if (link2) {
const sourceNode = graph2.getNodeById(link2.origin_id);
if (sourceNode && sourceNode.imgs && sourceNode.imgs.length > 0) {
// The connected node has images in its output - handle multiple images (batch)
log.info(`Found ${sourceNode.imgs.length} image(s) in connected node's output, loading all`);
// Create a combined source identifier for batch detection
const batchImageSrcs = sourceNode.imgs.map((img: HTMLImageElement) => img.src).join('|');
// Mark this link and batch sources as loaded
this.canvas.lastLoadedLinkId = linkId;
this.canvas.lastLoadedImageSrc = batchImageSrcs;
// Don't clear layers - just add new ones
if (imageChanged) {
log.info("Image change detected, will add new layers");
}
// Determine add mode
const fitOnAddWidget = this.canvas.node.widgets.find((w) => w.name === "fit_on_add");
const addMode = (fitOnAddWidget && fitOnAddWidget.value) ? 'fit' : 'center';
// Add all images from the batch as separate layers
for (let i = 0; i < sourceNode.imgs.length; i++) {
const img = sourceNode.imgs[i];
await this.canvas.canvasLayers.addLayerWithImage(
img,
{ name: `Batch Image ${i + 1}` }, // Give each layer a unique name
addMode,
this.canvas.outputAreaBounds
);
log.debug(`Added batch image ${i + 1}/${sourceNode.imgs.length} to canvas`);
}
this.canvas.inputDataLoaded = true;
imageLoaded = true;
log.info(`All ${sourceNode.imgs.length} input images from batch added as separate layers`);
this.canvas.render();
this.canvas.saveState();
}
}
}
}
}
// Check for mask input separately (from nodeOutputs) ONLY when allowed
if (allowMask && this.canvas.node.inputs && this.canvas.node.inputs[1] && this.canvas.node.inputs[1].link) {
const maskLinkId = this.canvas.node.inputs[1].link;
// Check if we already loaded this mask link
if (this.canvas.lastLoadedMaskLinkId === maskLinkId) {
log.debug(`Mask link ${maskLinkId} already loaded`);
maskLoaded = true;
} else {
// Try to get mask tensor from nodeOutputs using origin_id (not link id)
const graph = (this.canvas.node as any).graph;
let maskOutput = null;
if (graph) {
const link = graph.links[maskLinkId];
if (link && link.origin_id) {
// Use origin_id to get the actual node output
const nodeOutput = (window as any).app?.nodeOutputs?.[link.origin_id];
log.debug(`Looking for mask output from origin node ${link.origin_id}, found:`, !!nodeOutput);
if (nodeOutput) {
log.debug(`Node ${link.origin_id} output structure:`, {
hasData: !!nodeOutput.data,
hasShape: !!nodeOutput.shape,
dataType: typeof nodeOutput.data,
shapeType: typeof nodeOutput.shape,
keys: Object.keys(nodeOutput)
});
// Only use if it has actual tensor data
if (nodeOutput.data && nodeOutput.shape) {
maskOutput = nodeOutput;
}
}
}
}
if (maskOutput && maskOutput.data && maskOutput.shape) {
try {
// Derive dimensions from shape or explicit width/height
let width = (maskOutput.width as number) || 0;
let height = (maskOutput.height as number) || 0;
const shape = maskOutput.shape as number[]; // e.g. [1,H,W] or [1,H,W,1]
if ((!width || !height) && Array.isArray(shape)) {
if (shape.length >= 3) {
height = shape[1];
width = shape[2];
} else if (shape.length === 2) {
height = shape[0];
width = shape[1];
}
}
if (!width || !height) {
throw new Error("Cannot determine mask dimensions from nodeOutputs");
}
// Determine channels count
let channels = 1;
if (Array.isArray(shape) && shape.length >= 4) {
channels = shape[3];
} else if ((maskOutput as any).channels) {
channels = (maskOutput as any).channels;
} else {
const len = (maskOutput.data as any).length;
channels = Math.max(1, Math.floor(len / (width * height)));
}
// Use unified tensorToImageData for masks
const maskImageData = tensorToImageData(maskOutput, 'grayscale');
if (!maskImageData) throw new Error("Failed to convert mask tensor to image data");
// Create canvas and put image data
const { canvas: maskCanvas, ctx } = createCanvas(width, height, '2d', { willReadFrequently: true });
if (!ctx) throw new Error("Could not create mask context");
ctx.putImageData(maskImageData, 0, 0);
// Convert to HTMLImageElement
const maskImg = await createImageFromSource(maskCanvas.toDataURL());
// Respect fit_on_add (scale to output area)
const widgets = this.canvas.node.widgets;
const fitOnAddWidget = widgets ? widgets.find((w: any) => w.name === "fit_on_add") : null;
const shouldFit = fitOnAddWidget && fitOnAddWidget.value;
let finalMaskImg: HTMLImageElement = maskImg;
if (shouldFit) {
const bounds = this.canvas.outputAreaBounds;
finalMaskImg = await scaleImageToFit(maskImg, bounds.width, bounds.height);
}
// Apply to MaskTool (centers internally)
if (this.canvas.maskTool) {
this.canvas.maskTool.setMask(finalMaskImg, true);
(this.canvas as any).maskAppliedFromInput = true;
this.canvas.canvasState.saveMaskState();
this.canvas.render();
// Mark this mask link as loaded to avoid re-applying
this.canvas.lastLoadedMaskLinkId = maskLinkId;
maskLoaded = true;
log.info("Applied input mask from nodeOutputs immediately on connection" + (shouldFit ? " (fitted to output area)" : ""));
}
} catch (err) {
log.warn("Failed to apply mask from nodeOutputs immediately; will wait for backend input_mask after execution", err);
}
} else {
// nodeOutputs exist but don't have tensor data yet (need workflow execution)
log.info(`Mask node ${(this.canvas.node as any).graph?.links[maskLinkId]?.origin_id} found but has no tensor data yet. Mask will be applied automatically after workflow execution.`);
// Don't retry - data won't be available until workflow runs
}
}
}
// Only check backend if we have actual inputs connected
const hasImageInput = this.canvas.node.inputs && this.canvas.node.inputs[0] && this.canvas.node.inputs[0].link;
const hasMaskInput = this.canvas.node.inputs && this.canvas.node.inputs[1] && this.canvas.node.inputs[1].link;
// If mask input is disconnected, clear any currently applied mask to ensure full separation
if (!hasMaskInput) {
(this.canvas as any).maskAppliedFromInput = false;
this.canvas.lastLoadedMaskLinkId = undefined;
log.info("Mask input disconnected - cleared mask to enforce separation from input_image");
}
if (!hasImageInput && !hasMaskInput) {
log.debug("No inputs connected, skipping backend check");
this.canvas.inputDataLoaded = true;
return;
}
// Skip backend check during mask connection if we didn't get immediate data
if (reason === "mask_connect" && !maskLoaded) {
log.info("No immediate mask data available during connection, skipping backend check to avoid stale data. Will check after execution.");
return;
}
// Check backend for input data only if we have connected inputs
const response = await fetch(`/layerforge/get_input_data/${nodeId}`);
const result = await response.json();
if (result.success && result.has_input) {
// Dedupe: skip only if backend payload matches last loaded batch hash
let backendBatchHash: string | undefined;
if (result.data?.input_images_batch && Array.isArray(result.data.input_images_batch)) {
backendBatchHash = result.data.input_images_batch.map((i: any) => i.data).join('|');
} else if (result.data?.input_image) {
backendBatchHash = result.data.input_image;
}
// Check mask separately - don't skip if only images are unchanged AND mask is actually connected AND allowed
const shouldCheckMask = hasMaskInput && allowMask;
if (backendBatchHash && this.canvas.lastLoadedImageSrc === backendBatchHash && !shouldCheckMask) {
log.debug("Backend input data unchanged and no mask to check, skipping reload");
this.canvas.inputDataLoaded = true;
return;
} else if (backendBatchHash && this.canvas.lastLoadedImageSrc === backendBatchHash && shouldCheckMask) {
log.debug("Images unchanged but need to check mask, continuing...");
imageLoaded = true; // Mark images as already loaded to skip reloading them
}
// Check if we already loaded image data (by checking the current link)
if (allowImage && !imageLoaded && this.canvas.node.inputs && this.canvas.node.inputs[0] && this.canvas.node.inputs[0].link) {
const currentLinkId = this.canvas.node.inputs[0].link;
if (this.canvas.lastLoadedLinkId !== currentLinkId) {
// Mark this link as loaded
this.canvas.lastLoadedLinkId = currentLinkId;
imageLoaded = false; // Will load from backend
}
}
// Check for mask data from backend ONLY when mask input is actually connected AND allowed
// Only reset if the mask link actually changed
if (allowMask && hasMaskInput && this.canvas.node.inputs && this.canvas.node.inputs[1]) {
const currentMaskLinkId = this.canvas.node.inputs[1].link;
// Only reset if this is a different mask link than what we loaded before
if (this.canvas.lastLoadedMaskLinkId !== currentMaskLinkId) {
maskLoaded = false;
log.debug(`New mask input detected (${currentMaskLinkId}), will check backend for mask data`);
} else {
log.debug(`Same mask input (${currentMaskLinkId}), mask already loaded`);
maskLoaded = true;
}
} else {
// No mask input connected, or mask loading not allowed right now
maskLoaded = true; // Mark as loaded to skip mask processing
if (!allowMask) {
log.debug("Mask loading is currently disabled by caller, skipping mask check");
} else {
log.debug("No mask input connected, skipping mask check");
}
}
log.info("Input data found from backend, adding to canvas");
const inputData = result.data;
// Compute backend batch hash for dedupe and state
let backendHashNow: string | undefined;
if (inputData?.input_images_batch && Array.isArray(inputData.input_images_batch)) {
backendHashNow = inputData.input_images_batch.map((i: any) => i.data).join('|');
} else if (inputData?.input_image) {
backendHashNow = inputData.input_image;
}
// Just update the hash without removing any layers
if (backendHashNow) {
log.info("New backend input data detected, adding new layers");
this.canvas.lastLoadedImageSrc = backendHashNow;
}
// Mark that we've loaded input data for this execution
this.canvas.inputDataLoaded = true;
// Determine add mode based on fit_on_add setting
const widgets = this.canvas.node.widgets;
const fitOnAddWidget = widgets ? widgets.find((w: any) => w.name === "fit_on_add") : null;
const addMode = (fitOnAddWidget && fitOnAddWidget.value) ? 'fit' : 'center';
// Load input image(s) only if image input is actually connected, not already loaded, and allowed
if (allowImage && !imageLoaded && hasImageInput) {
if (inputData.input_images_batch) {
// Handle batch of images
const batch = inputData.input_images_batch;
log.info(`Processing batch of ${batch.length} images from backend`);
for (let i = 0; i < batch.length; i++) {
const imgData = batch[i];
const img = await createImageFromSource(imgData.data);
// Add image to canvas with unique name
await this.canvas.canvasLayers.addLayerWithImage(
img,
{ name: `Batch Image ${i + 1}` },
addMode,
this.canvas.outputAreaBounds
);
log.debug(`Added batch image ${i + 1}/${batch.length} from backend`);
}
log.info(`All ${batch.length} batch images added from backend`);
this.canvas.render();
this.canvas.saveState();
} else if (inputData.input_image) {
// Handle single image (backward compatibility)
const img = await createImageFromSource(inputData.input_image);
// Add image to canvas at output area position
await this.canvas.canvasLayers.addLayerWithImage(
img,
{},
addMode,
this.canvas.outputAreaBounds
);
log.info("Single input image added as new layer to canvas");
this.canvas.render();
this.canvas.saveState();
} else {
log.debug("No input image data from backend");
}
} else if (!hasImageInput && (inputData.input_images_batch || inputData.input_image)) {
log.debug("Backend has image data but no image input connected, skipping image load");
}
// Handle mask separately only if mask input is actually connected, allowed, and not already loaded
if (allowMask && !maskLoaded && hasMaskInput && inputData.input_mask) {
log.info("Processing input mask");
// Load mask image
const maskImg = await createImageFromSource(inputData.input_mask);
// Determine if we should fit the mask or use it at original size
const fitOnAddWidget2 = this.canvas.node.widgets.find((w) => w.name === "fit_on_add");
const shouldFit = fitOnAddWidget2 && fitOnAddWidget2.value;
let finalMaskImg: HTMLImageElement = maskImg;
if (shouldFit && this.canvas.maskTool) {
const bounds = this.canvas.outputAreaBounds;
finalMaskImg = await scaleImageToFit(maskImg, bounds.width, bounds.height);
}
// Apply to MaskTool (centers internally)
if (this.canvas.maskTool) {
this.canvas.maskTool.setMask(finalMaskImg, true);
}
(this.canvas as any).maskAppliedFromInput = true;
// Save the mask state
this.canvas.canvasState.saveMaskState()
log.info("Applied input mask to mask tool" + (shouldFit ? " (fitted to output area)" : " (original size)"));
} else if (!hasMaskInput && inputData.input_mask) {
log.debug("Backend has mask data but no mask input connected, skipping mask load");
} else if (!allowMask && inputData.input_mask) {
log.debug("Mask input data present in backend but mask loading is disabled by caller; skipping");
}
} else {
log.debug("No input data from backend");
// Don't schedule another check - we'll only check when explicitly triggered
}
} catch (error) {
log.error("Error checking for input data:", error);
// Don't schedule another check on error
}
}
scheduleInputDataCheck(): void {
// Schedule a retry for mask data check when nodeOutputs are not ready yet
if (this.canvas.pendingInputDataCheck) {
clearTimeout(this.canvas.pendingInputDataCheck);
}
this.canvas.pendingInputDataCheck = window.setTimeout(() => {
this.canvas.pendingInputDataCheck = null;
log.debug("Retrying input data check for mask...");
}, 500); // Shorter delay for mask data retry
}
scheduleDataCheck(): void {
if (this.canvas.pendingDataCheck) {
clearTimeout(this.canvas.pendingDataCheck);
@@ -499,59 +931,11 @@ export class CanvasIO {
}
convertTensorToImageData(tensor: any): ImageData | null {
try {
const shape = tensor.shape;
const height = shape[1];
const width = shape[2];
const channels = shape[3];
log.debug("Converting tensor:", {
shape: shape,
dataRange: {
min: tensor.min_val,
max: tensor.max_val
}
});
const imageData = new ImageData(width, height);
const data = new Uint8ClampedArray(width * height * 4);
const flatData = tensor.data;
const pixelCount = width * height;
for (let i = 0; i < pixelCount; i++) {
const pixelIndex = i * 4;
const tensorIndex = i * channels;
for (let c = 0; c < channels; c++) {
const value = flatData[tensorIndex + c];
const normalizedValue = (value - tensor.min_val) / (tensor.max_val - tensor.min_val);
data[pixelIndex + c] = Math.round(normalizedValue * 255);
}
data[pixelIndex + 3] = 255;
}
imageData.data.set(data);
return imageData;
} catch (error) {
log.error("Error converting tensor:", error);
return null;
}
return tensorToImageData(tensor, 'rgb');
}
async createImageFromData(imageData: ImageData): Promise<HTMLImageElement> {
return new Promise((resolve, reject) => {
const { canvas, ctx } = createCanvas(imageData.width, imageData.height, '2d', { willReadFrequently: true });
if (!ctx) throw new Error("Could not create canvas context");
ctx.putImageData(imageData, 0, 0);
const img = new Image();
img.onload = () => resolve(img);
img.onerror = reject;
img.src = canvas.toDataURL();
});
return createImageFromImageData(imageData);
}
async processMaskData(maskData: any): Promise<void> {
@@ -618,12 +1002,7 @@ export class CanvasIO {
const newLayers: (Layer | null)[] = [];
for (const imageData of result.images) {
const img = new Image();
await new Promise((resolve, reject) => {
img.onload = resolve;
img.onerror = reject;
img.src = imageData;
});
const img = await createImageFromSource(imageData);
let processedImage = img;
@@ -652,37 +1031,31 @@ export class CanvasIO {
}
async clipImageToShape(image: HTMLImageElement, shape: Shape): Promise<HTMLImageElement> {
return new Promise((resolve, reject) => {
const { canvas, ctx } = createCanvas(image.width, image.height);
if (!ctx) {
reject(new Error("Could not create canvas context for clipping"));
return;
}
const { canvas, ctx } = createCanvas(image.width, image.height);
if (!ctx) {
throw new Error("Could not create canvas context for clipping");
}
// Draw the image first
ctx.drawImage(image, 0, 0);
// Draw the image first
ctx.drawImage(image, 0, 0);
// Calculate custom shape position accounting for extensions
// Custom shape should maintain its relative position within the original canvas area
const ext = this.canvas.outputAreaExtensionEnabled ? this.canvas.outputAreaExtensions : { top: 0, bottom: 0, left: 0, right: 0 };
const shapeOffsetX = ext.left; // Add left extension to maintain relative position
const shapeOffsetY = ext.top; // Add top extension to maintain relative position
// Calculate custom shape position accounting for extensions
// Custom shape should maintain its relative position within the original canvas area
const ext = this.canvas.outputAreaExtensionEnabled ? this.canvas.outputAreaExtensions : { top: 0, bottom: 0, left: 0, right: 0 };
const shapeOffsetX = ext.left; // Add left extension to maintain relative position
const shapeOffsetY = ext.top; // Add top extension to maintain relative position
// Create a clipping mask using the shape with extension offset
ctx.globalCompositeOperation = 'destination-in';
ctx.beginPath();
ctx.moveTo(shape.points[0].x + shapeOffsetX, shape.points[0].y + shapeOffsetY);
for (let i = 1; i < shape.points.length; i++) {
ctx.lineTo(shape.points[i].x + shapeOffsetX, shape.points[i].y + shapeOffsetY);
}
ctx.closePath();
ctx.fill();
// Create a clipping mask using the shape with extension offset
ctx.globalCompositeOperation = 'destination-in';
ctx.beginPath();
ctx.moveTo(shape.points[0].x + shapeOffsetX, shape.points[0].y + shapeOffsetY);
for (let i = 1; i < shape.points.length; i++) {
ctx.lineTo(shape.points[i].x + shapeOffsetX, shape.points[i].y + shapeOffsetY);
}
ctx.closePath();
ctx.fill();
// Create a new image from the clipped canvas
const clippedImage = new Image();
clippedImage.onload = () => resolve(clippedImage);
clippedImage.onerror = () => reject(new Error("Failed to create clipped image"));
clippedImage.src = canvas.toDataURL();
});
// Create a new image from the clipped canvas
return await createImageFromSource(canvas.toDataURL());
}
}

View File

@@ -10,15 +10,36 @@ interface MouseCoordinates {
view: Point;
}
interface ModifierState {
ctrl: boolean;
shift: boolean;
alt: boolean;
meta: boolean;
}
interface TransformOrigin {
x: number;
y: number;
width: number;
height: number;
rotation: number;
centerX: number;
centerY: number;
originalWidth?: number;
originalHeight?: number;
cropBounds?: { x: number; y: number; width: number; height: number };
}
interface InteractionState {
mode: 'none' | 'panning' | 'dragging' | 'resizing' | 'rotating' | 'drawingMask' | 'resizingCanvas' | 'movingCanvas' | 'potential-drag' | 'drawingShape';
panStart: Point;
dragStart: Point;
transformOrigin: Partial<Layer> & { centerX?: number, centerY?: number };
transformOrigin: TransformOrigin | null;
resizeHandle: string | null;
resizeAnchor: Point;
canvasResizeStart: Point;
isCtrlPressed: boolean;
isMetaPressed: boolean;
isAltPressed: boolean;
isShiftPressed: boolean;
isSPressed: boolean;
@@ -35,17 +56,35 @@ export class CanvasInteractions {
public interaction: InteractionState;
private originalLayerPositions: Map<Layer, Point>;
// Bound event handlers to enable proper removeEventListener and avoid leaks
private onMouseDown = (e: MouseEvent) => this.handleMouseDown(e);
private onMouseMove = (e: MouseEvent) => this.handleMouseMove(e);
private onMouseUp = (e: MouseEvent) => this.handleMouseUp(e);
private onMouseEnter = (e: MouseEvent) => { this.canvas.isMouseOver = true; this.handleMouseEnter(e); };
private onMouseLeave = (e: MouseEvent) => { this.canvas.isMouseOver = false; this.handleMouseLeave(e); };
private onWheel = (e: WheelEvent) => this.handleWheel(e);
private onKeyDown = (e: KeyboardEvent) => this.handleKeyDown(e);
private onKeyUp = (e: KeyboardEvent) => this.handleKeyUp(e);
private onDragOver = (e: DragEvent) => this.handleDragOver(e);
private onDragEnter = (e: DragEvent) => this.handleDragEnter(e);
private onDragLeave = (e: DragEvent) => this.handleDragLeave(e);
private onDrop = (e: DragEvent) => { this.handleDrop(e); };
private onContextMenu = (e: MouseEvent) => this.handleContextMenu(e);
private onBlur = () => this.handleBlur();
private onPaste = (e: ClipboardEvent) => this.handlePasteEvent(e);
constructor(canvas: Canvas) {
this.canvas = canvas;
this.interaction = {
mode: 'none',
panStart: { x: 0, y: 0 },
dragStart: { x: 0, y: 0 },
transformOrigin: {},
transformOrigin: null,
resizeHandle: null,
resizeAnchor: { x: 0, y: 0 },
canvasResizeStart: { x: 0, y: 0 },
isCtrlPressed: false,
isMetaPressed: false,
isAltPressed: false,
isShiftPressed: false,
isSPressed: false,
@@ -68,13 +107,21 @@ export class CanvasInteractions {
};
}
private getModifierState(e?: MouseEvent | WheelEvent | KeyboardEvent): ModifierState {
return {
ctrl: this.interaction.isCtrlPressed || (e as any)?.ctrlKey || false,
shift: this.interaction.isShiftPressed || (e as any)?.shiftKey || false,
alt: this.interaction.isAltPressed || (e as any)?.altKey || false,
meta: this.interaction.isMetaPressed || (e as any)?.metaKey || false,
};
}
private preventEventDefaults(e: Event): void {
e.preventDefault();
e.stopPropagation();
}
private performZoomOperation(worldCoords: Point, zoomFactor: number): void {
const rect = this.canvas.canvas.getBoundingClientRect();
const mouseBufferX = (worldCoords.x - this.canvas.viewport.x) * this.canvas.viewport.zoom;
const mouseBufferY = (worldCoords.y - this.canvas.viewport.y) * this.canvas.viewport.zoom;
@@ -84,6 +131,11 @@ export class CanvasInteractions {
this.canvas.viewport.x = worldCoords.x - (mouseBufferX / this.canvas.viewport.zoom);
this.canvas.viewport.y = worldCoords.y - (mouseBufferY / this.canvas.viewport.zoom);
// Update stroke overlay if mask tool is drawing during zoom
if (this.canvas.maskTool.isDrawing) {
this.canvas.maskTool.handleViewportChange();
}
this.canvas.onViewportChange?.();
}
@@ -106,34 +158,49 @@ export class CanvasInteractions {
}
setupEventListeners(): void {
this.canvas.canvas.addEventListener('mousedown', this.handleMouseDown.bind(this) as EventListener);
this.canvas.canvas.addEventListener('mousemove', this.handleMouseMove.bind(this) as EventListener);
this.canvas.canvas.addEventListener('mouseup', this.handleMouseUp.bind(this) as EventListener);
this.canvas.canvas.addEventListener('mouseleave', this.handleMouseLeave.bind(this) as EventListener);
this.canvas.canvas.addEventListener('wheel', this.handleWheel.bind(this) as EventListener, { passive: false });
this.canvas.canvas.addEventListener('keydown', this.handleKeyDown.bind(this) as EventListener);
this.canvas.canvas.addEventListener('keyup', this.handleKeyUp.bind(this) as EventListener);
this.canvas.canvas.addEventListener('mousedown', this.onMouseDown as EventListener);
this.canvas.canvas.addEventListener('mousemove', this.onMouseMove as EventListener);
this.canvas.canvas.addEventListener('mouseup', this.onMouseUp as EventListener);
this.canvas.canvas.addEventListener('wheel', this.onWheel as EventListener, { passive: false });
this.canvas.canvas.addEventListener('keydown', this.onKeyDown as EventListener);
this.canvas.canvas.addEventListener('keyup', this.onKeyUp as EventListener);
// Add a blur event listener to the window to reset key states
window.addEventListener('blur', this.handleBlur.bind(this));
window.addEventListener('blur', this.onBlur);
document.addEventListener('paste', this.handlePasteEvent.bind(this));
document.addEventListener('paste', this.onPaste as unknown as EventListener);
this.canvas.canvas.addEventListener('mouseenter', (e: MouseEvent) => {
this.canvas.isMouseOver = true;
this.handleMouseEnter(e);
});
this.canvas.canvas.addEventListener('mouseleave', (e: MouseEvent) => {
this.canvas.isMouseOver = false;
this.handleMouseLeave(e);
});
this.canvas.canvas.addEventListener('mouseenter', this.onMouseEnter as EventListener);
this.canvas.canvas.addEventListener('mouseleave', this.onMouseLeave as EventListener);
this.canvas.canvas.addEventListener('dragover', this.handleDragOver.bind(this) as EventListener);
this.canvas.canvas.addEventListener('dragenter', this.handleDragEnter.bind(this) as EventListener);
this.canvas.canvas.addEventListener('dragleave', this.handleDragLeave.bind(this) as EventListener);
this.canvas.canvas.addEventListener('drop', this.handleDrop.bind(this) as unknown as EventListener);
this.canvas.canvas.addEventListener('dragover', this.onDragOver as EventListener);
this.canvas.canvas.addEventListener('dragenter', this.onDragEnter as EventListener);
this.canvas.canvas.addEventListener('dragleave', this.onDragLeave as EventListener);
this.canvas.canvas.addEventListener('drop', this.onDrop as unknown as EventListener);
this.canvas.canvas.addEventListener('contextmenu', this.handleContextMenu.bind(this) as EventListener);
this.canvas.canvas.addEventListener('contextmenu', this.onContextMenu as EventListener);
}
teardownEventListeners(): void {
this.canvas.canvas.removeEventListener('mousedown', this.onMouseDown as EventListener);
this.canvas.canvas.removeEventListener('mousemove', this.onMouseMove as EventListener);
this.canvas.canvas.removeEventListener('mouseup', this.onMouseUp as EventListener);
this.canvas.canvas.removeEventListener('wheel', this.onWheel as EventListener);
this.canvas.canvas.removeEventListener('keydown', this.onKeyDown as EventListener);
this.canvas.canvas.removeEventListener('keyup', this.onKeyUp as EventListener);
window.removeEventListener('blur', this.onBlur);
document.removeEventListener('paste', this.onPaste as unknown as EventListener);
this.canvas.canvas.removeEventListener('mouseenter', this.onMouseEnter as EventListener);
this.canvas.canvas.removeEventListener('mouseleave', this.onMouseLeave as EventListener);
this.canvas.canvas.removeEventListener('dragover', this.onDragOver as EventListener);
this.canvas.canvas.removeEventListener('dragenter', this.onDragEnter as EventListener);
this.canvas.canvas.removeEventListener('dragleave', this.onDragLeave as EventListener);
this.canvas.canvas.removeEventListener('drop', this.onDrop as unknown as EventListener);
this.canvas.canvas.removeEventListener('contextmenu', this.onContextMenu as EventListener);
}
/**
@@ -177,10 +244,11 @@ export class CanvasInteractions {
handleMouseDown(e: MouseEvent): void {
this.canvas.canvas.focus();
const coords = this.getMouseCoordinates(e);
const mods = this.getModifierState(e);
if (this.interaction.mode === 'drawingMask') {
this.canvas.maskTool.handleMouseDown(coords.world, coords.view);
this.canvas.render();
// Don't render here - mask tool will handle its own drawing
return;
}
@@ -192,11 +260,11 @@ export class CanvasInteractions {
// --- Ostateczna, poprawna kolejność sprawdzania ---
// 1. Akcje globalne z modyfikatorami (mają najwyższy priorytet)
if (e.shiftKey && e.ctrlKey) {
if (mods.shift && mods.ctrl) {
this.startCanvasMove(coords.world);
return;
}
if (e.shiftKey) {
if (mods.shift) {
// Clear custom shape when starting canvas resize
if (this.canvas.outputAreaShape) {
// If auto-apply shape mask is enabled, remove the mask before clearing the shape
@@ -222,7 +290,7 @@ export class CanvasInteractions {
}
return;
}
if (e.button !== 0) { // Środkowy przycisk
if (e.button === 1) { // Środkowy przycisk
this.startPanning(e);
return;
}
@@ -241,7 +309,7 @@ export class CanvasInteractions {
}
// 4. Domyślna akcja na tle (lewy przycisk bez modyfikatorów)
this.startPanningOrClearSelection(e);
this.startPanning(e, true); // clearSelection = true
}
handleMouseMove(e: MouseEvent): void {
@@ -264,7 +332,7 @@ export class CanvasInteractions {
switch (this.interaction.mode) {
case 'drawingMask':
this.canvas.maskTool.handleMouseMove(coords.world, coords.view);
this.canvas.render();
// Don't render during mask drawing - it's handled by mask tool internally
break;
case 'panning':
this.panViewport(e);
@@ -286,6 +354,10 @@ export class CanvasInteractions {
break;
default:
this.updateCursor(coords.world);
// Update brush cursor on overlay if mask tool is active
if (this.canvas.maskTool.isActive) {
this.canvas.canvasRenderer.drawMaskBrushCursor(coords.world);
}
break;
}
@@ -300,6 +372,7 @@ export class CanvasInteractions {
if (this.interaction.mode === 'drawingMask') {
this.canvas.maskTool.handleMouseUp(coords.view);
// Render only once after drawing is complete
this.canvas.render();
return;
}
@@ -397,8 +470,17 @@ export class CanvasInteractions {
const zoomFactor = e.deltaY < 0 ? 1.1 : 1 / 1.1;
this.performZoomOperation(coords.world, zoomFactor);
} else {
// Layer transformation when layers are selected
this.handleLayerWheelTransformation(e);
// Check if mouse is over any selected layer
const isOverSelectedLayer = this.isPointInSelectedLayers(coords.world.x, coords.world.y);
if (isOverSelectedLayer) {
// Layer transformation when layers are selected and mouse is over selected layer
this.handleLayerWheelTransformation(e);
} else {
// Zoom operation when mouse is not over selected layers
const zoomFactor = e.deltaY < 0 ? 1.1 : 1 / 1.1;
this.performZoomOperation(coords.world, zoomFactor);
}
}
this.canvas.render();
@@ -408,14 +490,15 @@ export class CanvasInteractions {
}
private handleLayerWheelTransformation(e: WheelEvent): void {
const mods = this.getModifierState(e);
const rotationStep = 5 * (e.deltaY > 0 ? -1 : 1);
const direction = e.deltaY < 0 ? 1 : -1;
this.canvas.canvasSelection.selectedLayers.forEach((layer: Layer) => {
if (e.shiftKey) {
this.handleLayerRotation(layer, e.ctrlKey, direction, rotationStep);
if (mods.shift) {
this.handleLayerRotation(layer, mods.ctrl, direction, rotationStep);
} else {
this.handleLayerScaling(layer, e.ctrlKey, e.deltaY);
this.handleLayerScaling(layer, mods.ctrl, e.deltaY);
}
});
}
@@ -462,7 +545,7 @@ export class CanvasInteractions {
}
private calculateGridBasedScaling(oldHeight: number, deltaY: number): number {
const gridSize = 64;
const gridSize = 64; // Grid size - could be made configurable in the future
const direction = deltaY > 0 ? -1 : 1;
let targetHeight;
@@ -487,6 +570,7 @@ export class CanvasInteractions {
handleKeyDown(e: KeyboardEvent): void {
if (e.key === 'Control') this.interaction.isCtrlPressed = true;
if (e.key === 'Meta') this.interaction.isMetaPressed = true;
if (e.key === 'Shift') this.interaction.isShiftPressed = true;
if (e.key === 'Alt') {
this.interaction.isAltPressed = true;
@@ -505,11 +589,12 @@ export class CanvasInteractions {
}
// Globalne skróty (Undo/Redo/Copy/Paste)
if (e.ctrlKey || e.metaKey) {
const mods = this.getModifierState(e);
if (mods.ctrl || mods.meta) {
let handled = true;
switch (e.key.toLowerCase()) {
case 'z':
if (e.shiftKey) {
if (mods.shift) {
this.canvas.redo();
} else {
this.canvas.undo();
@@ -536,7 +621,7 @@ export class CanvasInteractions {
// Skróty kontekstowe (zależne od zaznaczenia)
if (this.canvas.canvasSelection.selectedLayers.length > 0) {
const step = e.shiftKey ? 10 : 1;
const step = mods.shift ? 10 : 1;
let needsRender = false;
// Używamy e.code dla spójności i niezależności od układu klawiatury
@@ -571,6 +656,7 @@ export class CanvasInteractions {
handleKeyUp(e: KeyboardEvent): void {
if (e.key === 'Control') this.interaction.isCtrlPressed = false;
if (e.key === 'Meta') this.interaction.isMetaPressed = false;
if (e.key === 'Shift') this.interaction.isShiftPressed = false;
if (e.key === 'Alt') this.interaction.isAltPressed = false;
if (e.key.toLowerCase() === 's') this.interaction.isSPressed = false;
@@ -590,6 +676,7 @@ export class CanvasInteractions {
handleBlur(): void {
log.debug('Window lost focus, resetting key states.');
this.interaction.isCtrlPressed = false;
this.interaction.isMetaPressed = false;
this.interaction.isAltPressed = false;
this.interaction.isShiftPressed = false;
this.interaction.isSPressed = false;
@@ -615,6 +702,12 @@ export class CanvasInteractions {
}
updateCursor(worldCoords: Point): void {
// If actively rotating, show grabbing cursor
if (this.interaction.mode === 'rotating') {
this.canvas.canvas.style.cursor = 'grabbing';
return;
}
const transformTarget = this.canvas.canvasLayers.getHandleAtPosition(worldCoords.x, worldCoords.y);
if (transformTarget) {
@@ -663,7 +756,9 @@ export class CanvasInteractions {
prepareForDrag(layer: Layer, worldCoords: Point): void {
// Zaktualizuj zaznaczenie, ale nie zapisuj stanu
if (this.interaction.isCtrlPressed) {
// Support both Ctrl (Windows/Linux) and Cmd (macOS) for multi-selection
const mods = this.getModifierState();
if (mods.ctrl || mods.meta) {
const index = this.canvas.canvasSelection.selectedLayers.indexOf(layer);
if (index === -1) {
this.canvas.canvasSelection.updateSelection([...this.canvas.canvasSelection.selectedLayers, layer]);
@@ -681,14 +776,13 @@ export class CanvasInteractions {
this.interaction.dragStart = {...worldCoords};
}
startPanningOrClearSelection(e: MouseEvent): void {
// Ta funkcja jest teraz wywoływana tylko gdy kliknięto na tło bez modyfikatorów.
// Domyślna akcja: wyczyść zaznaczenie i rozpocznij panoramowanie.
if (!this.interaction.isCtrlPressed) {
startPanning(e: MouseEvent, clearSelection: boolean = true): void {
// Unified panning method - can optionally clear selection
if (clearSelection && !this.interaction.isCtrlPressed) {
this.canvas.canvasSelection.updateSelection([]);
}
this.interaction.mode = 'panning';
this.interaction.panStart = {x: e.clientX, y: e.clientY};
this.interaction.panStart = { x: e.clientX, y: e.clientY };
}
startCanvasResize(worldCoords: Point): void {
@@ -743,20 +837,18 @@ export class CanvasInteractions {
this.canvas.saveState();
}
startPanning(e: MouseEvent): void {
if (!this.interaction.isCtrlPressed) {
this.canvas.canvasSelection.updateSelection([]);
}
this.interaction.mode = 'panning';
this.interaction.panStart = { x: e.clientX, y: e.clientY };
}
panViewport(e: MouseEvent): void {
const dx = e.clientX - this.interaction.panStart.x;
const dy = e.clientY - this.interaction.panStart.y;
this.canvas.viewport.x -= dx / this.canvas.viewport.zoom;
this.canvas.viewport.y -= dy / this.canvas.viewport.zoom;
this.interaction.panStart = {x: e.clientX, y: e.clientY};
// Update stroke overlay if mask tool is drawing during pan
if (this.canvas.maskTool.isDrawing) {
this.canvas.maskTool.handleViewportChange();
}
this.canvas.render();
this.canvas.onViewportChange?.();
}
@@ -818,7 +910,7 @@ export class CanvasInteractions {
}
const o = this.interaction.transformOrigin;
if (o.rotation === undefined || o.width === undefined || o.height === undefined || o.centerX === undefined || o.centerY === undefined) return;
if (!o) return;
const handle = this.interaction.resizeHandle;
const anchor = this.interaction.resizeAnchor;
@@ -974,7 +1066,7 @@ export class CanvasInteractions {
if (!layer) return;
const o = this.interaction.transformOrigin;
if (o.rotation === undefined || o.centerX === undefined || o.centerY === undefined) return;
if (!o) return;
const startAngle = Math.atan2(this.interaction.dragStart.y - o.centerY, this.interaction.dragStart.x - o.centerX);
const currentAngle = Math.atan2(worldCoords.y - o.centerY, worldCoords.x - o.centerX);
let angleDiff = (currentAngle - startAngle) * 180 / Math.PI;

View File

@@ -8,12 +8,19 @@ export class CanvasRenderer {
lastRenderTime: any;
renderAnimationFrame: any;
renderInterval: any;
// Overlay used to preview in-progress mask strokes (separate from cursor overlay)
strokeOverlayCanvas!: HTMLCanvasElement;
strokeOverlayCtx!: CanvasRenderingContext2D;
constructor(canvas: any) {
this.canvas = canvas;
this.renderAnimationFrame = null;
this.lastRenderTime = 0;
this.renderInterval = 1000 / 60;
this.isDirty = false;
// Initialize overlay canvases
this.initOverlay();
this.initStrokeOverlay();
}
/**
@@ -141,9 +148,11 @@ export class CanvasRenderer {
ctx.save();
if (this.canvas.maskTool.isActive) {
// In draw mask mode, use the previewOpacity value from the slider
ctx.globalCompositeOperation = 'source-over';
ctx.globalAlpha = 0.5;
ctx.globalAlpha = this.canvas.maskTool.previewOpacity;
} else {
// When not in draw mask mode, show mask at full opacity
ctx.globalCompositeOperation = 'source-over';
ctx.globalAlpha = 1.0;
}
@@ -205,6 +214,12 @@ export class CanvasRenderer {
}
this.canvas.ctx.drawImage(this.canvas.offscreenCanvas, 0, 0);
// Ensure overlay canvases are in DOM and properly sized
this.addOverlayToDOM();
this.updateOverlaySize();
this.addStrokeOverlayToDOM();
this.updateStrokeOverlaySize();
// Update Batch Preview UI positions
if (this.canvas.batchPreviewManagers && this.canvas.batchPreviewManagers.length > 0) {
this.canvas.batchPreviewManagers.forEach((manager: any) => {
@@ -710,4 +725,290 @@ export class CanvasRenderer {
padding: 8
});
}
/**
* Initialize overlay canvas for lightweight overlays like brush cursor
*/
initOverlay(): void {
// Setup overlay canvas to match main canvas
this.updateOverlaySize();
// Position overlay canvas on top of main canvas
this.canvas.overlayCanvas.style.position = 'absolute';
this.canvas.overlayCanvas.style.left = '0px';
this.canvas.overlayCanvas.style.top = '0px';
this.canvas.overlayCanvas.style.pointerEvents = 'none';
this.canvas.overlayCanvas.style.zIndex = '20'; // Above other overlays
// Add overlay to DOM when main canvas is added
this.addOverlayToDOM();
log.debug('Overlay canvas initialized');
}
/**
* Add overlay canvas to DOM if main canvas has a parent
*/
addOverlayToDOM(): void {
if (this.canvas.canvas.parentElement && !this.canvas.overlayCanvas.parentElement) {
this.canvas.canvas.parentElement.appendChild(this.canvas.overlayCanvas);
log.debug('Overlay canvas added to DOM');
}
}
/**
* Update overlay canvas size to match main canvas
*/
updateOverlaySize(): void {
if (this.canvas.overlayCanvas.width !== this.canvas.canvas.clientWidth ||
this.canvas.overlayCanvas.height !== this.canvas.canvas.clientHeight) {
this.canvas.overlayCanvas.width = Math.max(1, this.canvas.canvas.clientWidth);
this.canvas.overlayCanvas.height = Math.max(1, this.canvas.canvas.clientHeight);
log.debug(`Overlay canvas resized to ${this.canvas.overlayCanvas.width}x${this.canvas.overlayCanvas.height}`);
}
}
/**
* Clear overlay canvas
*/
clearOverlay(): void {
this.canvas.overlayCtx.clearRect(0, 0, this.canvas.overlayCanvas.width, this.canvas.overlayCanvas.height);
}
/**
* Initialize a dedicated overlay for real-time mask stroke preview
*/
initStrokeOverlay(): void {
// Create canvas if not created yet
if (!this.strokeOverlayCanvas) {
this.strokeOverlayCanvas = document.createElement('canvas');
const ctx = this.strokeOverlayCanvas.getContext('2d');
if (!ctx) {
throw new Error('Failed to get 2D context for stroke overlay canvas');
}
this.strokeOverlayCtx = ctx;
}
// Size match main canvas
this.updateStrokeOverlaySize();
// Position above main canvas but below cursor overlay
this.strokeOverlayCanvas.style.position = 'absolute';
this.strokeOverlayCanvas.style.left = '0px';
this.strokeOverlayCanvas.style.top = '0px';
this.strokeOverlayCanvas.style.pointerEvents = 'none';
this.strokeOverlayCanvas.style.zIndex = '19'; // Below cursor overlay (20)
// Opacity is now controlled by MaskTool.previewOpacity
this.strokeOverlayCanvas.style.opacity = String(this.canvas.maskTool.previewOpacity || 0.5);
// Add to DOM
this.addStrokeOverlayToDOM();
log.debug('Stroke overlay canvas initialized');
}
/**
* Add stroke overlay canvas to DOM if needed
*/
addStrokeOverlayToDOM(): void {
if (this.canvas.canvas.parentElement && !this.strokeOverlayCanvas.parentElement) {
this.canvas.canvas.parentElement.appendChild(this.strokeOverlayCanvas);
log.debug('Stroke overlay canvas added to DOM');
}
}
/**
* Ensure stroke overlay size matches main canvas
*/
updateStrokeOverlaySize(): void {
const w = Math.max(1, this.canvas.canvas.clientWidth);
const h = Math.max(1, this.canvas.canvas.clientHeight);
if (this.strokeOverlayCanvas.width !== w || this.strokeOverlayCanvas.height !== h) {
this.strokeOverlayCanvas.width = w;
this.strokeOverlayCanvas.height = h;
log.debug(`Stroke overlay resized to ${w}x${h}`);
}
}
/**
* Clear the stroke overlay
*/
clearMaskStrokeOverlay(): void {
if (!this.strokeOverlayCtx) return;
this.strokeOverlayCtx.clearRect(0, 0, this.strokeOverlayCanvas.width, this.strokeOverlayCanvas.height);
}
/**
* Draw a preview stroke segment onto the stroke overlay in screen space
* Uses line drawing with gradient to match MaskTool's drawLineOnChunk exactly
*/
drawMaskStrokeSegment(startWorld: { x: number; y: number }, endWorld: { x: number; y: number }): void {
// Ensure overlay is present and sized
this.updateStrokeOverlaySize();
const zoom = this.canvas.viewport.zoom;
const toScreen = (p: { x: number; y: number }) => ({
x: (p.x - this.canvas.viewport.x) * zoom,
y: (p.y - this.canvas.viewport.y) * zoom
});
const startScreen = toScreen(startWorld);
const endScreen = toScreen(endWorld);
const brushRadius = (this.canvas.maskTool.brushSize / 2) * zoom;
const hardness = this.canvas.maskTool.brushHardness;
const strength = this.canvas.maskTool.brushStrength;
// If strength is 0, don't draw anything
if (strength <= 0) {
return;
}
this.strokeOverlayCtx.save();
// Draw line segment exactly as MaskTool does
this.strokeOverlayCtx.beginPath();
this.strokeOverlayCtx.moveTo(startScreen.x, startScreen.y);
this.strokeOverlayCtx.lineTo(endScreen.x, endScreen.y);
// Match the gradient setup from MaskTool's drawLineOnChunk
if (hardness === 1) {
this.strokeOverlayCtx.strokeStyle = `rgba(255, 255, 255, ${strength})`;
} else {
const innerRadius = brushRadius * hardness;
const gradient = this.strokeOverlayCtx.createRadialGradient(
endScreen.x, endScreen.y, innerRadius,
endScreen.x, endScreen.y, brushRadius
);
gradient.addColorStop(0, `rgba(255, 255, 255, ${strength})`);
gradient.addColorStop(1, `rgba(255, 255, 255, 0)`);
this.strokeOverlayCtx.strokeStyle = gradient;
}
// Match line properties from MaskTool
this.strokeOverlayCtx.lineWidth = this.canvas.maskTool.brushSize * zoom;
this.strokeOverlayCtx.lineCap = 'round';
this.strokeOverlayCtx.lineJoin = 'round';
this.strokeOverlayCtx.globalCompositeOperation = 'source-over';
this.strokeOverlayCtx.stroke();
this.strokeOverlayCtx.restore();
}
/**
* Redraws the entire stroke overlay from world coordinates
* Used when viewport changes during drawing to maintain visual consistency
*/
redrawMaskStrokeOverlay(strokePoints: { x: number; y: number }[]): void {
if (strokePoints.length < 2) return;
// Clear the overlay first
this.clearMaskStrokeOverlay();
// Redraw all segments with current viewport
for (let i = 1; i < strokePoints.length; i++) {
this.drawMaskStrokeSegment(strokePoints[i - 1], strokePoints[i]);
}
}
/**
* Draw mask brush cursor on overlay canvas with visual feedback for size, strength and hardness
* @param worldPoint World coordinates of cursor
*/
drawMaskBrushCursor(worldPoint: { x: number, y: number }): void {
if (!this.canvas.maskTool.isActive || !this.canvas.isMouseOver) {
this.clearOverlay();
return;
}
// Update overlay size if needed
this.updateOverlaySize();
// Clear previous cursor
this.clearOverlay();
// Convert world coordinates to screen coordinates
const screenX = (worldPoint.x - this.canvas.viewport.x) * this.canvas.viewport.zoom;
const screenY = (worldPoint.y - this.canvas.viewport.y) * this.canvas.viewport.zoom;
// Get brush properties
const brushRadius = (this.canvas.maskTool.brushSize / 2) * this.canvas.viewport.zoom;
const brushStrength = this.canvas.maskTool.brushStrength;
const brushHardness = this.canvas.maskTool.brushHardness;
// Save context state
this.canvas.overlayCtx.save();
// If strength is 0, just draw outline
if (brushStrength > 0) {
// Draw inner fill to visualize brush effect - matches actual brush rendering
const gradient = this.canvas.overlayCtx.createRadialGradient(
screenX, screenY, 0,
screenX, screenY, brushRadius
);
// Preview alpha - subtle to not obscure content
const previewAlpha = brushStrength * 0.15; // Very subtle preview (max 15% opacity)
if (brushHardness === 1) {
// Hard brush - uniform fill within radius
gradient.addColorStop(0, `rgba(255, 255, 255, ${previewAlpha})`);
gradient.addColorStop(1, `rgba(255, 255, 255, ${previewAlpha})`);
} else {
// Soft brush - gradient fade matching actual brush
gradient.addColorStop(0, `rgba(255, 255, 255, ${previewAlpha})`);
if (brushHardness > 0) {
gradient.addColorStop(brushHardness, `rgba(255, 255, 255, ${previewAlpha})`);
}
gradient.addColorStop(1, `rgba(255, 255, 255, 0)`);
}
this.canvas.overlayCtx.beginPath();
this.canvas.overlayCtx.arc(screenX, screenY, brushRadius, 0, 2 * Math.PI);
this.canvas.overlayCtx.fillStyle = gradient;
this.canvas.overlayCtx.fill();
}
// Draw outer circle (SIZE indicator)
this.canvas.overlayCtx.beginPath();
this.canvas.overlayCtx.arc(screenX, screenY, brushRadius, 0, 2 * Math.PI);
// Stroke opacity based on strength (dimmer when strength is 0)
const strokeOpacity = brushStrength > 0 ? (0.4 + brushStrength * 0.4) : 0.3;
this.canvas.overlayCtx.strokeStyle = `rgba(255, 255, 255, ${strokeOpacity})`;
this.canvas.overlayCtx.lineWidth = 1.5;
// Visual feedback for hardness
if (brushHardness > 0.8) {
// Hard brush - solid line
this.canvas.overlayCtx.setLineDash([]);
} else {
// Soft brush - dashed line
const dashLength = 2 + (1 - brushHardness) * 4;
this.canvas.overlayCtx.setLineDash([dashLength, dashLength]);
}
this.canvas.overlayCtx.stroke();
// Center dot for small brushes
if (brushRadius < 5) {
this.canvas.overlayCtx.beginPath();
this.canvas.overlayCtx.arc(screenX, screenY, 1, 0, 2 * Math.PI);
this.canvas.overlayCtx.fillStyle = `rgba(255, 255, 255, ${strokeOpacity})`;
this.canvas.overlayCtx.fill();
}
// Restore context state
this.canvas.overlayCtx.restore();
}
/**
* Update overlay position when viewport changes
*/
updateOverlayPosition(): void {
// Overlay canvas is positioned absolutely, so it doesn't need repositioning
// Just ensure it's the right size
this.updateOverlaySize();
}
}

View File

@@ -456,12 +456,13 @@ If you see dark images or masks in the output, make sure node_id is set to ${cor
if (this.maskUndoStack.length > 0) {
const prevState = this.maskUndoStack[this.maskUndoStack.length - 1];
const maskCanvas = this.canvas.maskTool.getMask();
const maskCtx = maskCanvas.getContext('2d', { willReadFrequently: true });
if (maskCtx) {
maskCtx.clearRect(0, 0, maskCanvas.width, maskCanvas.height);
maskCtx.drawImage(prevState, 0, 0);
}
// Use the new restoreMaskFromSavedState method that properly clears chunks first
this.canvas.maskTool.restoreMaskFromSavedState(prevState);
// Clear stroke overlay to prevent old drawing previews from persisting
this.canvas.canvasRenderer.clearMaskStrokeOverlay();
this.canvas.render();
}
@@ -474,12 +475,13 @@ If you see dark images or masks in the output, make sure node_id is set to ${cor
const nextState = this.maskRedoStack.pop();
if (nextState) {
this.maskUndoStack.push(nextState);
const maskCanvas = this.canvas.maskTool.getMask();
const maskCtx = maskCanvas.getContext('2d', { willReadFrequently: true });
if (maskCtx) {
maskCtx.clearRect(0, 0, maskCanvas.width, maskCanvas.height);
maskCtx.drawImage(nextState, 0, 0);
}
// Use the new restoreMaskFromSavedState method that properly clears chunks first
this.canvas.maskTool.restoreMaskFromSavedState(nextState);
// Clear stroke overlay to prevent old drawing previews from persisting
this.canvas.canvasRenderer.clearMaskStrokeOverlay();
this.canvas.render();
}
this.canvas.updateHistoryButtons();

View File

@@ -640,6 +640,24 @@ $el("label.clipboard-switch.mask-switch", {
setTimeout(() => canvas.render(), 0);
}
}),
$el("div.painter-slider-container.mask-control", {style: {display: 'none'}}, [
$el("label", {for: "preview-opacity-slider", textContent: "Mask Opacity:"}),
$el("input", {
id: "preview-opacity-slider",
type: "range",
min: "0",
max: "1",
step: "0.05",
value: "0.5",
oninput: (e: Event) => {
const value = (e.target as HTMLInputElement).value;
canvas.maskTool.setPreviewOpacity(parseFloat(value));
const valueEl = document.getElementById('preview-opacity-value');
if (valueEl) valueEl.textContent = `${Math.round(parseFloat(value) * 100)}%`;
}
}),
$el("div.slider-value", {id: "preview-opacity-value"}, ["50%"])
]),
$el("div.painter-slider-container.mask-control", {style: {display: 'none'}}, [
$el("label", {for: "brush-size-slider", textContent: "Size:"}),
$el("input", {
@@ -1011,7 +1029,9 @@ $el("label.clipboard-switch.mask-switch", {
}
}, [controlPanel, canvasContainer, layersPanelContainer]) as HTMLDivElement;
node.addDOMWidget("mainContainer", "widget", mainContainer);
if (node.addDOMWidget) {
node.addDOMWidget("mainContainer", "widget", mainContainer);
}
const openEditorBtn = controlPanel.querySelector(`#open-editor-btn-${node.id}`) as HTMLButtonElement;
let backdrop: HTMLDivElement | null = null;
@@ -1123,7 +1143,12 @@ $el("label.clipboard-switch.mask-switch", {
if (!(window as any).canvasExecutionStates) {
(window as any).canvasExecutionStates = new Map<string, any>();
}
(node as any).canvasWidget = canvas;
// Store the entire widget object, not just the canvas
(node as any).canvasWidget = {
canvas: canvas,
panel: controlPanel
};
setTimeout(() => {
canvas.loadInitialState();
@@ -1145,7 +1170,7 @@ $el("label.clipboard-switch.mask-switch", {
canvas.setPreviewVisibility(value);
}
if ((node as any).graph && (node as any).graph.canvas) {
if ((node as any).graph && (node as any).graph.canvas && node.setDirtyCanvas) {
node.setDirtyCanvas(true, true);
}
};
@@ -1237,10 +1262,156 @@ app.registerExtension({
const canvasWidget = await createCanvasWidget(this, null, app);
canvasNodeInstances.set(this.id, canvasWidget);
log.info(`Registered CanvasNode instance for ID: ${this.id}`);
// Store the canvas widget on the node
(this as any).canvasWidget = canvasWidget;
// Check if there are already connected inputs
setTimeout(() => {
this.setDirtyCanvas(true, true);
}, 100);
if (this.inputs && this.inputs.length > 0) {
// Check if input_image (index 0) is connected
if (this.inputs[0] && this.inputs[0].link) {
log.info("Input image already connected on node creation, checking for data...");
if (canvasWidget.canvas && canvasWidget.canvas.canvasIO) {
canvasWidget.canvas.inputDataLoaded = false;
// Only allow images on init; mask should load only on mask connect or execution
canvasWidget.canvas.canvasIO.checkForInputData({ allowImage: true, allowMask: false, reason: "init_image_connected" });
}
}
}
if (this.setDirtyCanvas) {
this.setDirtyCanvas(true, true);
}
}, 500);
};
// Add onConnectionsChange handler to detect when inputs are connected
nodeType.prototype.onConnectionsChange = function (this: ComfyNode, type: number, index: number, connected: boolean, link_info: any) {
log.info(`onConnectionsChange called: type=${type}, index=${index}, connected=${connected}`, link_info);
// Check if this is an input connection (type 1 = INPUT)
if (type === 1) {
// Get the canvas widget - it might be in different places
const canvasWidget = (this as any).canvasWidget;
const canvas = canvasWidget?.canvas || canvasWidget;
if (!canvas || !canvas.canvasIO) {
log.warn("Canvas not ready in onConnectionsChange, scheduling retry...");
// Retry multiple times with increasing delays
const retryDelays = [500, 1000, 2000];
let retryCount = 0;
const tryAgain = () => {
const retryCanvas = (this as any).canvasWidget?.canvas || (this as any).canvasWidget;
if (retryCanvas && retryCanvas.canvasIO) {
log.info("Canvas now ready, checking for input data...");
if (connected) {
retryCanvas.inputDataLoaded = false;
// Respect which input triggered the connection:
const opts = (index === 1)
? { allowImage: false, allowMask: true, reason: "mask_connect" }
: { allowImage: true, allowMask: false, reason: "image_connect" };
retryCanvas.canvasIO.checkForInputData(opts);
}
} else if (retryCount < retryDelays.length) {
log.warn(`Canvas still not ready, retry ${retryCount + 1}/${retryDelays.length}...`);
setTimeout(tryAgain, retryDelays[retryCount++]);
} else {
log.error("Canvas failed to initialize after multiple retries");
}
};
setTimeout(tryAgain, retryDelays[retryCount++]);
return;
}
// Handle input_image connection (index 0)
if (index === 0) {
if (connected && link_info) {
log.info("Input image connected, marking for data check...");
// Reset the input data loaded flag to allow loading the new connection
canvas.inputDataLoaded = false;
// Also reset the last loaded image source and link ID to allow the new image
canvas.lastLoadedImageSrc = undefined;
canvas.lastLoadedLinkId = undefined;
// Mark that we have a pending input connection
canvas.hasPendingInputConnection = true;
// If mask input is not connected and a mask was auto-applied from input_mask before, clear it now
if (!(this.inputs && this.inputs[1] && this.inputs[1].link)) {
if ((canvas as any).maskAppliedFromInput && canvas.maskTool) {
canvas.maskTool.clear();
canvas.render();
(canvas as any).maskAppliedFromInput = false;
canvas.lastLoadedMaskLinkId = undefined;
log.info("Cleared auto-applied mask because input_image connected without input_mask");
}
}
// Check for data immediately when connected
setTimeout(() => {
log.info("Checking for input data after connection...");
// Only load images here; masks should not auto-load on image connect
canvas.canvasIO.checkForInputData({ allowImage: true, allowMask: false, reason: "image_connect" });
}, 500);
} else {
log.info("Input image disconnected");
canvas.hasPendingInputConnection = false;
// Reset when disconnected so a new connection can load
canvas.inputDataLoaded = false;
canvas.lastLoadedImageSrc = undefined;
canvas.lastLoadedLinkId = undefined;
}
}
// Handle input_mask connection (index 1)
if (index === 1) {
if (connected && link_info) {
log.info("Input mask connected");
// DON'T clear existing mask when connecting a new input
// Reset the loaded mask link ID to allow loading from the new connection
canvas.lastLoadedMaskLinkId = undefined;
// Mark that we have a pending mask connection
canvas.hasPendingMaskConnection = true;
// Check for data immediately when connected
setTimeout(() => {
log.info("Checking for input data after mask connection...");
// Only load mask here if it's immediately available from the connected node
// Don't load stale masks from backend storage
canvas.canvasIO.checkForInputData({ allowImage: false, allowMask: true, reason: "mask_connect" });
}, 500);
} else {
log.info("Input mask disconnected");
canvas.hasPendingMaskConnection = false;
// If the current mask came from input_mask, clear it to avoid affecting images when mask is not connected
if ((canvas as any).maskAppliedFromInput && canvas.maskTool) {
(canvas as any).maskAppliedFromInput = false;
canvas.lastLoadedMaskLinkId = undefined;
log.info("Cleared auto-applied mask due to mask input disconnection");
}
}
}
}
};
// Add onExecuted handler to check for input data after workflow execution
const originalOnExecuted = nodeType.prototype.onExecuted;
nodeType.prototype.onExecuted = function (this: ComfyNode, message: any) {
log.info("Node executed, checking for input data...");
const canvas = (this as any).canvasWidget?.canvas || (this as any).canvasWidget;
if (canvas && canvas.canvasIO) {
// Don't reset inputDataLoaded - just check for new data
// On execution we allow both image and mask to load
canvas.canvasIO.checkForInputData({ allowImage: true, allowMask: true, reason: "execution" });
}
// Call original if it exists
if (originalOnExecuted) {
originalOnExecuted.apply(this, arguments as any);
}
};
const onRemoved = nodeType.prototype.onRemoved;

View File

@@ -507,7 +507,6 @@ export class MaskEditorIntegration {
maskSize: {width: bounds.width, height: bounds.height}
});
// Use the chunk system instead of direct canvas manipulation
this.maskTool.setMask(maskAsImage);
// Update node preview using PreviewUtils

View File

@@ -21,9 +21,10 @@ interface MaskChunk {
}
export class MaskTool {
private brushHardness: number;
private brushSize: number;
private brushStrength: number;
private _brushHardness: number;
public brushSize: number;
private _brushStrength: number;
private _previewOpacity: number;
private canvasInstance: Canvas & { canvasState: CanvasState, width: number, height: number };
public isActive: boolean;
public isDrawing: boolean;
@@ -31,6 +32,9 @@ export class MaskTool {
private lastPosition: Point | null;
private mainCanvas: HTMLCanvasElement;
// Track strokes during drawing for efficient overlay updates
private currentStrokePoints: Point[] = [];
// Chunked mask system
private maskChunks: Map<string, MaskChunk>; // Key: "x,y" (chunk coordinates)
private chunkSize: number;
@@ -72,6 +76,9 @@ export class MaskTool {
this.mainCanvas = canvasInstance.canvas;
this.onStateChange = callbacks.onStateChange || null;
// Initialize stroke tracking for overlay drawing
this.currentStrokePoints = [];
// Initialize chunked mask system
this.maskChunks = new Map();
this.chunkSize = 512;
@@ -96,8 +103,9 @@ export class MaskTool {
this.isOverlayVisible = true;
this.isActive = false;
this.brushSize = 20;
this.brushStrength = 0.5;
this.brushHardness = 0.5;
this._brushStrength = 0.5;
this._brushHardness = 0.5;
this._previewOpacity = 0.5; // Default 50% opacity for preview
this.isDrawing = false;
this.lastPosition = null;
@@ -156,8 +164,31 @@ export class MaskTool {
}
}
// Getters for brush properties
get brushStrength(): number {
return this._brushStrength;
}
get brushHardness(): number {
return this._brushHardness;
}
get previewOpacity(): number {
return this._previewOpacity;
}
setBrushHardness(hardness: number): void {
this.brushHardness = Math.max(0, Math.min(1, hardness));
this._brushHardness = Math.max(0, Math.min(1, hardness));
}
setPreviewOpacity(opacity: number): void {
this._previewOpacity = Math.max(0, Math.min(1, opacity));
// Update the stroke overlay canvas opacity when preview opacity changes
if (this.canvasInstance.canvasRenderer && this.canvasInstance.canvasRenderer.strokeOverlayCanvas) {
this.canvasInstance.canvasRenderer.strokeOverlayCanvas.style.opacity = String(this._previewOpacity);
}
// Trigger canvas render to update mask display opacity
this.canvasInstance.render();
}
initMaskCanvas(): void {
@@ -867,7 +898,7 @@ export class MaskTool {
}
setBrushStrength(strength: number): void {
this.brushStrength = Math.max(0, Math.min(1, strength));
this._brushStrength = Math.max(0, Math.min(1, strength));
}
handleMouseDown(worldCoords: Point, viewCoords: Point): void {
@@ -875,10 +906,12 @@ export class MaskTool {
this.isDrawing = true;
this.lastPosition = worldCoords;
// Activate chunks around the drawing position for performance
this.updateActiveChunksForDrawing(worldCoords);
// Initialize stroke tracking for live preview
this.currentStrokePoints = [worldCoords];
// Clear any previous stroke overlay
this.canvasInstance.canvasRenderer.clearMaskStrokeOverlay();
this.draw(worldCoords);
this.clearPreview();
}
@@ -888,16 +921,83 @@ export class MaskTool {
}
if (!this.isActive || !this.isDrawing) return;
// Dynamically update active chunks as user moves while drawing
this.updateActiveChunksForDrawing(worldCoords);
// Add point to stroke tracking
this.currentStrokePoints.push(worldCoords);
// Draw interpolated segments for smooth strokes without gaps
if (this.lastPosition) {
// Calculate distance between last and current position
const dx = worldCoords.x - this.lastPosition.x;
const dy = worldCoords.y - this.lastPosition.y;
const distance = Math.sqrt(dx * dx + dy * dy);
// If distance is small, just draw a single segment
if (distance < this.brushSize / 4) {
this.canvasInstance.canvasRenderer.drawMaskStrokeSegment(this.lastPosition, worldCoords);
} else {
// Interpolate points for smooth drawing without gaps
const interpolatedPoints = this.interpolatePoints(this.lastPosition, worldCoords, distance);
// Draw all interpolated segments
for (let i = 0; i < interpolatedPoints.length - 1; i++) {
this.canvasInstance.canvasRenderer.drawMaskStrokeSegment(
interpolatedPoints[i],
interpolatedPoints[i + 1]
);
}
}
}
this.draw(worldCoords);
this.lastPosition = worldCoords;
}
/**
* Interpolates points between two positions to create smooth strokes without gaps
* Based on the BrushTool's approach for eliminating dotted lines during fast drawing
*/
private interpolatePoints(start: Point, end: Point, distance: number): Point[] {
const points: Point[] = [];
// Calculate number of interpolated points based on brush size
// More points = smoother line
const stepSize = Math.max(1, this.brushSize / 6); // Adjust divisor for smoothness
const numSteps = Math.ceil(distance / stepSize);
// Always include start point
points.push(start);
// Interpolate intermediate points
for (let i = 1; i < numSteps; i++) {
const t = i / numSteps;
points.push({
x: start.x + (end.x - start.x) * t,
y: start.y + (end.y - start.y) * t
});
}
// Always include end point
points.push(end);
return points;
}
/**
* Called when viewport changes during drawing to update stroke overlay
* This ensures the stroke preview scales correctly with zoom changes
*/
handleViewportChange(): void {
if (this.isDrawing && this.currentStrokePoints.length > 1) {
// Redraw the entire stroke overlay with new viewport settings
this.canvasInstance.canvasRenderer.redrawMaskStrokeOverlay(this.currentStrokePoints);
}
}
handleMouseLeave(): void {
this.previewVisible = false;
this.clearPreview();
// Clear overlay canvases when mouse leaves
this.canvasInstance.canvasRenderer.clearOverlay();
this.canvasInstance.canvasRenderer.clearMaskStrokeOverlay();
}
handleMouseEnter(): void {
@@ -908,11 +1008,18 @@ export class MaskTool {
if (!this.isActive) return;
if (this.isDrawing) {
this.isDrawing = false;
// Commit the stroke from overlay to actual mask chunks
this.commitStrokeToChunks();
// Clear stroke overlay and reset state
this.canvasInstance.canvasRenderer.clearMaskStrokeOverlay();
this.currentStrokePoints = [];
this.lastPosition = null;
this.currentDrawingChunk = null;
// After drawing is complete, update active canvas to show all chunks
this.updateActiveMaskCanvas(true); // forceShowAll = true
this.updateActiveMaskCanvas(true); // Force full update
this.completeMaskOperation();
this.drawBrushPreview(viewCoords);
@@ -932,6 +1039,44 @@ export class MaskTool {
this.updateActiveCanvasIfNeeded(this.lastPosition, worldCoords);
}
/**
* Commits the current stroke from overlay to actual mask chunks
* This replays the entire stroke path with interpolation to ensure pixel-perfect accuracy
*/
private commitStrokeToChunks(): void {
if (this.currentStrokePoints.length < 2) {
return; // Need at least 2 points for a stroke
}
log.debug(`Committing stroke with ${this.currentStrokePoints.length} points to chunks`);
// Replay the entire stroke path with interpolation for smooth, accurate lines
for (let i = 1; i < this.currentStrokePoints.length; i++) {
const startPoint = this.currentStrokePoints[i - 1];
const endPoint = this.currentStrokePoints[i];
// Calculate distance between points
const dx = endPoint.x - startPoint.x;
const dy = endPoint.y - startPoint.y;
const distance = Math.sqrt(dx * dx + dy * dy);
if (distance < this.brushSize / 4) {
// Small distance - draw single segment
this.drawOnChunks(startPoint, endPoint);
} else {
// Large distance - interpolate for smooth line without gaps
const interpolatedPoints = this.interpolatePoints(startPoint, endPoint, distance);
// Draw all interpolated segments
for (let j = 0; j < interpolatedPoints.length - 1; j++) {
this.drawOnChunks(interpolatedPoints[j], interpolatedPoints[j + 1]);
}
}
}
log.debug("Stroke committed to chunks successfully with interpolation");
}
/**
* Draws a line between two world coordinates on the appropriate chunks
*/
@@ -982,15 +1127,15 @@ export class MaskTool {
const gradientRadius = this.brushSize / 2;
if (this.brushHardness === 1) {
chunk.ctx.strokeStyle = `rgba(255, 255, 255, ${this.brushStrength})`;
if (this._brushHardness === 1) {
chunk.ctx.strokeStyle = `rgba(255, 255, 255, ${this._brushStrength})`;
} else {
const innerRadius = gradientRadius * this.brushHardness;
const innerRadius = gradientRadius * this._brushHardness;
const gradient = chunk.ctx.createRadialGradient(
endLocal.x, endLocal.y, innerRadius,
endLocal.x, endLocal.y, gradientRadius
);
gradient.addColorStop(0, `rgba(255, 255, 255, ${this.brushStrength})`);
gradient.addColorStop(0, `rgba(255, 255, 255, ${this._brushStrength})`);
gradient.addColorStop(1, `rgba(255, 255, 255, 0)`);
chunk.ctx.strokeStyle = gradient;
}
@@ -1029,29 +1174,17 @@ export class MaskTool {
}
/**
* Updates active canvas when drawing affects chunks with throttling to prevent lag
* During drawing, only updates the affected active chunks for performance
* Updates active canvas when drawing affects chunks
* Since we now use overlay during drawing, this is only called after drawing is complete
*/
private updateActiveCanvasIfNeeded(startWorld: Point, endWorld: Point): void {
// Calculate which chunks were affected by this drawing operation
const minX = Math.min(startWorld.x, endWorld.x) - this.brushSize;
const maxX = Math.max(startWorld.x, endWorld.x) + this.brushSize;
const minY = Math.min(startWorld.y, endWorld.y) - this.brushSize;
const maxY = Math.max(startWorld.y, endWorld.y) + this.brushSize;
const affectedChunkMinX = Math.floor(minX / this.chunkSize);
const affectedChunkMinY = Math.floor(minY / this.chunkSize);
const affectedChunkMaxX = Math.floor(maxX / this.chunkSize);
const affectedChunkMaxY = Math.floor(maxY / this.chunkSize);
// During drawing, only update affected chunks that are active for performance
if (this.isDrawing) {
// Use throttled partial update for active chunks only
this.scheduleThrottledActiveMaskUpdate(affectedChunkMinX, affectedChunkMinY, affectedChunkMaxX, affectedChunkMaxY);
} else {
// This method is now simplified - we only update after drawing is complete
// The overlay handles all live preview, so we don't need complex chunk activation
if (!this.isDrawing) {
// Not drawing - do full update to show all chunks
this.updateActiveMaskCanvas(true);
}
// During drawing, we don't update chunks at all - overlay handles preview
}
/**
@@ -1142,20 +1275,13 @@ export class MaskTool {
drawBrushPreview(viewCoords: Point): void {
if (!this.previewVisible || this.isDrawing) {
this.clearPreview();
this.canvasInstance.canvasRenderer.clearOverlay();
return;
}
this.clearPreview();
const zoom = this.canvasInstance.viewport.zoom;
const radius = (this.brushSize / 2) * zoom;
this.previewCtx.beginPath();
this.previewCtx.arc(viewCoords.x, viewCoords.y, radius, 0, 2 * Math.PI);
this.previewCtx.strokeStyle = 'rgba(255, 255, 255, 0.8)';
this.previewCtx.lineWidth = 1;
this.previewCtx.setLineDash([2, 4]);
this.previewCtx.stroke();
// Use overlay canvas instead of preview canvas for brush cursor
const worldCoords = this.canvasInstance.lastMousePosition;
this.canvasInstance.canvasRenderer.drawMaskBrushCursor(worldCoords);
}
clearPreview(): void {
@@ -1548,6 +1674,27 @@ export class MaskTool {
log.info("Cleared all mask data from all chunks");
}
/**
* Clears all chunks and restores mask from saved state
* This is used during undo/redo operations to ensure clean state restoration
*/
restoreMaskFromSavedState(savedMaskCanvas: HTMLCanvasElement): void {
// First, clear ALL chunks to ensure no leftover data
this.clearAllMaskChunks();
// Now apply the saved mask state to chunks
if (savedMaskCanvas.width > 0 && savedMaskCanvas.height > 0) {
// Apply the saved mask to the chunk system at the correct position
const bounds = this.canvasInstance.outputAreaBounds;
this.applyMaskCanvasToChunks(savedMaskCanvas, this.x, this.y);
}
// Update the active mask canvas to show the restored state
this.updateActiveMaskCanvas(true);
log.debug("Restored mask from saved state with clean chunk system");
}
getMask(): HTMLCanvasElement {
// Return the current active mask canvas which shows all chunks
// Only update if there are pending changes to avoid unnecessary redraws
@@ -1667,15 +1814,47 @@ export class MaskTool {
log.info(`Mask overlay visibility toggled to: ${this.isOverlayVisible}`);
}
setMask(image: HTMLImageElement): void {
// Clear existing mask chunks in the output area first
setMask(image: HTMLImageElement, isFromInputMask: boolean = false): void {
const bounds = this.canvasInstance.outputAreaBounds;
this.clearMaskInArea(bounds.x, bounds.y, image.width, image.height);
// Add the new mask using the chunk system
this.addMask(image);
log.info(`MaskTool set new mask using chunk system at bounds (${bounds.x}, ${bounds.y})`);
if (isFromInputMask) {
// For INPUT MASK - process black background to transparent using luminance
// Center like input images
const centerX = bounds.x + (bounds.width - image.width) / 2;
const centerY = bounds.y + (bounds.height - image.height) / 2;
// Prepare mask where alpha = luminance (white = applied, black = transparent)
const { canvas: maskCanvas, ctx } = createCanvas(image.width, image.height, '2d', { willReadFrequently: true });
if (!ctx) throw new Error("Could not create mask processing context");
ctx.drawImage(image, 0, 0);
const imgData = ctx.getImageData(0, 0, image.width, image.height);
const data = imgData.data;
for (let i = 0; i < data.length; i += 4) {
const r = data[i], g = data[i + 1], b = data[i + 2];
const lum = Math.round(0.299 * r + 0.587 * g + 0.114 * b);
data[i] = 255; // force white color (color channels ignored downstream)
data[i + 1] = 255;
data[i + 2] = 255;
data[i + 3] = lum; // alpha encodes mask strength: white -> strong, black -> 0
}
ctx.putImageData(imgData, 0, 0);
// Clear target area and apply to chunked system at centered position
this.clearMaskInArea(centerX, centerY, image.width, image.height);
this.applyMaskCanvasToChunks(maskCanvas, centerX, centerY);
// Refresh state and UI
this.updateActiveMaskCanvas(true);
this.canvasInstance.canvasState.saveMaskState();
this.canvasInstance.render();
log.info(`MaskTool set INPUT MASK at centered position (${centerX}, ${centerY}) using luminance as alpha`);
} else {
// For SAM Detector and other sources - just clear and add without processing
this.clearMaskInArea(bounds.x, bounds.y, bounds.width, bounds.height);
this.addMask(image);
log.info(`MaskTool set mask using chunk system at bounds (${bounds.x}, ${bounds.y})`);
}
}
/**

View File

@@ -7,6 +7,7 @@ import { uploadCanvasAsImage, uploadImageBlob } from "./utils/ImageUploadUtils.j
import { processImageToMask } from "./utils/MaskProcessingUtils.js";
import { convertToImage } from "./utils/ImageUtils.js";
import { updateNodePreview } from "./utils/PreviewUtils.js";
import { validateAndFixClipspace } from "./utils/ClipspaceUtils.js";
import type { ComfyNode } from './types';
const log = createModuleLogger('SAMDetectorIntegration');
@@ -281,36 +282,61 @@ async function handleSAMDetectorResult(node: ComfyNode, resultImage: HTMLImageEl
log.debug("Attempting to reload SAM result image");
const originalSrc = resultImage.src;
// Add cache-busting parameter to force fresh load
const url = new URL(originalSrc);
url.searchParams.set('_t', Date.now().toString());
await new Promise((resolve, reject) => {
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
// Copy the loaded image data to the original image
resultImage.src = img.src;
resultImage.width = img.width;
resultImage.height = img.height;
log.debug("SAM result image reloaded successfully", {
width: img.width,
height: img.height,
originalSrc: originalSrc,
newSrc: img.src
// Check if it's a data URL (base64) - don't add parameters to data URLs
if (originalSrc.startsWith('data:')) {
log.debug("Image is a data URL, skipping reload with parameters");
// For data URLs, just ensure the image is loaded
if (!resultImage.complete || resultImage.naturalWidth === 0) {
await new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => {
resultImage.width = img.width;
resultImage.height = img.height;
log.debug("Data URL image loaded successfully", {
width: img.width,
height: img.height
});
resolve(img);
};
img.onerror = (error) => {
log.error("Failed to load data URL image", error);
reject(error);
};
img.src = originalSrc; // Use original src without modifications
});
resolve(img);
};
img.onerror = (error) => {
log.error("Failed to reload SAM result image", {
originalSrc: originalSrc,
newSrc: url.toString(),
error: error
});
reject(error);
};
img.src = url.toString();
});
}
} else {
// For regular URLs, add cache-busting parameter
const url = new URL(originalSrc);
url.searchParams.set('_t', Date.now().toString());
await new Promise((resolve, reject) => {
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
// Copy the loaded image data to the original image
resultImage.src = img.src;
resultImage.width = img.width;
resultImage.height = img.height;
log.debug("SAM result image reloaded successfully", {
width: img.width,
height: img.height,
originalSrc: originalSrc,
newSrc: img.src
});
resolve(img);
};
img.onerror = (error) => {
log.error("Failed to reload SAM result image", {
originalSrc: originalSrc,
newSrc: url.toString(),
error: error
});
reject(error);
};
img.src = url.toString();
});
}
}
} catch (error) {
log.error("Failed to load image from SAM Detector.", error);
@@ -332,32 +358,43 @@ async function handleSAMDetectorResult(node: ComfyNode, resultImage: HTMLImageEl
// Apply mask to LayerForge canvas using MaskTool.setMask method
log.debug("Checking canvas and maskTool availability", {
hasCanvas: !!canvas,
hasCanvasProperty: !!canvas.canvas,
canvasCanvasKeys: canvas.canvas ? Object.keys(canvas.canvas) : [],
hasMaskTool: !!canvas.maskTool,
hasCanvasMaskTool: !!(canvas.canvas && canvas.canvas.maskTool),
maskToolType: typeof canvas.maskTool,
canvasMaskToolType: canvas.canvas ? typeof canvas.canvas.maskTool : 'undefined',
canvasKeys: Object.keys(canvas)
});
if (!canvas.maskTool) {
// Get the actual Canvas object and its maskTool
const actualCanvas = canvas.canvas || canvas;
const maskTool = actualCanvas.maskTool;
if (!maskTool) {
log.error("MaskTool is not available. Canvas state:", {
hasCanvas: !!canvas,
hasActualCanvas: !!actualCanvas,
canvasConstructor: canvas.constructor.name,
actualCanvasConstructor: actualCanvas ? actualCanvas.constructor.name : 'undefined',
canvasKeys: Object.keys(canvas),
maskToolValue: canvas.maskTool
actualCanvasKeys: actualCanvas ? Object.keys(actualCanvas) : [],
maskToolValue: maskTool
});
throw new Error("Mask tool not available or not initialized");
}
log.debug("Applying SAM mask to canvas using addMask method");
log.debug("Applying SAM mask to canvas using setMask method");
// Use the addMask method which overlays on existing mask without clearing it
canvas.maskTool.addMask(maskAsImage);
// Use the setMask method which clears existing mask and sets new one
maskTool.setMask(maskAsImage);
// Update canvas and save state (same as MaskEditorIntegration)
canvas.render();
canvas.saveState();
actualCanvas.render();
actualCanvas.saveState();
// Update node preview using PreviewUtils
await updateNodePreview(canvas, node, true);
await updateNodePreview(actualCanvas, node, true);
log.info("SAM Detector mask applied successfully to LayerForge canvas");
@@ -376,6 +413,9 @@ async function handleSAMDetectorResult(node: ComfyNode, resultImage: HTMLImageEl
}
// Store original onClipspaceEditorSave function to restore later
let originalOnClipspaceEditorSave: (() => void) | null = null;
// Function to setup SAM Detector hook in menu options
export function setupSAMDetectorHook(node: ComfyNode, options: any[]) {
// Hook into "Open in SAM Detector" with delay since Impact Pack adds it asynchronously
@@ -395,22 +435,67 @@ export function setupSAMDetectorHook(node: ComfyNode, options: any[]) {
log.info("Intercepted 'Open in SAM Detector' - automatically sending to clipspace and starting monitoring");
// Automatically send canvas to clipspace and start monitoring
if ((node as any).canvasWidget && (node as any).canvasWidget.canvas) {
const canvas = (node as any).canvasWidget; // canvasWidget IS the Canvas object
if ((node as any).canvasWidget) {
const canvasWidget = (node as any).canvasWidget;
const canvas = canvasWidget.canvas || canvasWidget; // Get actual Canvas object
// Use ImageUploadUtils to upload canvas
// Use ImageUploadUtils to upload canvas and get server URL (Impact Pack compatibility)
const uploadResult = await uploadCanvasAsImage(canvas, {
filenamePrefix: 'layerforge-sam',
nodeId: node.id
});
log.debug("Uploaded canvas for SAM Detector", {
filename: uploadResult.filename,
imageUrl: uploadResult.imageUrl,
width: uploadResult.imageElement.width,
height: uploadResult.imageElement.height
});
// Set the image to the node for clipspace
node.imgs = [uploadResult.imageElement];
(node as any).clipspaceImg = uploadResult.imageElement;
// Ensure proper clipspace structure for updated ComfyUI
if (!ComfyApp.clipspace) {
ComfyApp.clipspace = {};
}
// Set up clipspace with proper indices
ComfyApp.clipspace.imgs = [uploadResult.imageElement];
ComfyApp.clipspace.selectedIndex = 0;
ComfyApp.clipspace.combinedIndex = 0;
ComfyApp.clipspace.img_paste_mode = 'selected';
// Copy to ComfyUI clipspace
ComfyApp.copyToClipspace(node);
// Override onClipspaceEditorSave to fix clipspace structure before pasteFromClipspace
if (!originalOnClipspaceEditorSave) {
originalOnClipspaceEditorSave = ComfyApp.onClipspaceEditorSave;
ComfyApp.onClipspaceEditorSave = function() {
log.debug("SAM Detector onClipspaceEditorSave called, using unified clipspace validation");
// Use the unified clipspace validation function
const isValid = validateAndFixClipspace();
if (!isValid) {
log.error("Clipspace validation failed, cannot proceed with paste");
return;
}
// Call the original function
if (originalOnClipspaceEditorSave) {
originalOnClipspaceEditorSave.call(ComfyApp);
}
// Restore the original function after use
if (originalOnClipspaceEditorSave) {
ComfyApp.onClipspaceEditorSave = originalOnClipspaceEditorSave;
originalOnClipspaceEditorSave = null;
}
};
}
// Start monitoring for SAM Detector results
startSAMDetectorMonitoring(node);

View File

@@ -1,6 +1,14 @@
import type { Canvas as CanvasClass } from './Canvas';
import type { CanvasLayers } from './CanvasLayers';
export interface ComfyWidget {
name: string;
type: string;
value: any;
callback?: (value: any) => void;
options?: any;
}
export interface Layer {
id: string;
image: HTMLImageElement;
@@ -32,15 +40,16 @@ export interface Layer {
export interface ComfyNode {
id: number;
type: string;
widgets: ComfyWidget[];
imgs?: HTMLImageElement[];
widgets: any[];
size: [number, number];
graph: any;
canvasWidget?: any;
size?: [number, number];
onResize?: () => void;
addDOMWidget: (name: string, type: string, element: HTMLElement, options?: any) => any;
addWidget: (type: string, name: string, value: any, callback?: (value: any) => void, options?: any) => any;
setDirtyCanvas: (force: boolean, dirty: boolean) => void;
setDirtyCanvas?: (dirty: boolean, propagate: boolean) => void;
graph?: any;
onRemoved?: () => void;
addDOMWidget?: (name: string, type: string, element: HTMLElement) => void;
inputs?: Array<{ link: any }>;
}
declare global {
@@ -79,8 +88,14 @@ export interface Canvas {
imageCache: any;
dataInitialized: boolean;
pendingDataCheck: number | null;
pendingInputDataCheck: number | null;
pendingBatchContext: any;
canvasLayers: any;
inputDataLoaded: boolean;
lastLoadedLinkId: any;
lastLoadedMaskLinkId: any;
lastLoadedImageSrc?: string;
outputAreaBounds: OutputAreaBounds;
saveState: () => void;
render: () => void;
updateSelection: (layers: Layer[]) => void;

View File

@@ -1,6 +1,7 @@
import {createModuleLogger} from "./LoggerUtils.js";
import { showNotification, showInfoNotification } from "./NotificationUtils.js";
import { withErrorHandling, createValidationError, createNetworkError, createFileError } from "../ErrorHandler.js";
import { safeClipspacePaste } from "./ClipspaceUtils.js";
// @ts-ignore
import {api} from "../../../scripts/api.js";
@@ -56,7 +57,13 @@ export class ClipboardManager {
*/
tryClipspacePaste = withErrorHandling(async (addMode: AddMode): Promise<boolean> => {
log.info("Attempting to paste from ComfyUI Clipspace");
ComfyApp.pasteFromClipspace(this.canvas.node);
// Use the unified clipspace validation and paste function
const pasteSuccess = safeClipspacePaste(this.canvas.node);
if (!pasteSuccess) {
log.debug("Safe clipspace paste failed");
return false;
}
if (this.canvas.node.imgs && this.canvas.node.imgs.length > 0) {
const clipspaceImage = this.canvas.node.imgs[0];

114
src/utils/ClipspaceUtils.ts Normal file
View File

@@ -0,0 +1,114 @@
import { createModuleLogger } from "./LoggerUtils.js";
// @ts-ignore
import { ComfyApp } from "../../../scripts/app.js";
const log = createModuleLogger('ClipspaceUtils');
/**
* Validates and fixes ComfyUI clipspace structure to prevent 'Cannot read properties of undefined' errors
* @returns {boolean} - True if clipspace is valid and ready to use, false otherwise
*/
export function validateAndFixClipspace(): boolean {
log.debug("Validating and fixing clipspace structure");
// Check if clipspace exists
if (!ComfyApp.clipspace) {
log.debug("ComfyUI clipspace is not available");
return false;
}
// Validate clipspace structure
if (!ComfyApp.clipspace.imgs || ComfyApp.clipspace.imgs.length === 0) {
log.debug("ComfyUI clipspace has no images");
return false;
}
log.debug("Current clipspace state:", {
hasImgs: !!ComfyApp.clipspace.imgs,
imgsLength: ComfyApp.clipspace.imgs?.length,
selectedIndex: ComfyApp.clipspace.selectedIndex,
combinedIndex: ComfyApp.clipspace.combinedIndex,
img_paste_mode: ComfyApp.clipspace.img_paste_mode
});
// Ensure required indices are set
if (ComfyApp.clipspace.selectedIndex === undefined || ComfyApp.clipspace.selectedIndex === null) {
ComfyApp.clipspace.selectedIndex = 0;
log.debug("Fixed clipspace selectedIndex to 0");
}
if (ComfyApp.clipspace.combinedIndex === undefined || ComfyApp.clipspace.combinedIndex === null) {
ComfyApp.clipspace.combinedIndex = 0;
log.debug("Fixed clipspace combinedIndex to 0");
}
if (!ComfyApp.clipspace.img_paste_mode) {
ComfyApp.clipspace.img_paste_mode = 'selected';
log.debug("Fixed clipspace img_paste_mode to 'selected'");
}
// Ensure indices are within bounds
const maxIndex = ComfyApp.clipspace.imgs.length - 1;
if (ComfyApp.clipspace.selectedIndex > maxIndex) {
ComfyApp.clipspace.selectedIndex = maxIndex;
log.debug(`Fixed clipspace selectedIndex to ${maxIndex} (max available)`);
}
if (ComfyApp.clipspace.combinedIndex > maxIndex) {
ComfyApp.clipspace.combinedIndex = maxIndex;
log.debug(`Fixed clipspace combinedIndex to ${maxIndex} (max available)`);
}
// Verify the image at combinedIndex exists and has src
const combinedImg = ComfyApp.clipspace.imgs[ComfyApp.clipspace.combinedIndex];
if (!combinedImg || !combinedImg.src) {
log.debug("Image at combinedIndex is missing or has no src, trying to find valid image");
// Try to use the first available image
for (let i = 0; i < ComfyApp.clipspace.imgs.length; i++) {
if (ComfyApp.clipspace.imgs[i] && ComfyApp.clipspace.imgs[i].src) {
ComfyApp.clipspace.combinedIndex = i;
log.debug(`Fixed combinedIndex to ${i} (first valid image)`);
break;
}
}
// Final check - if still no valid image found
const finalImg = ComfyApp.clipspace.imgs[ComfyApp.clipspace.combinedIndex];
if (!finalImg || !finalImg.src) {
log.error("No valid images found in clipspace after attempting fixes");
return false;
}
}
log.debug("Final clipspace structure:", {
selectedIndex: ComfyApp.clipspace.selectedIndex,
combinedIndex: ComfyApp.clipspace.combinedIndex,
img_paste_mode: ComfyApp.clipspace.img_paste_mode,
imgsLength: ComfyApp.clipspace.imgs?.length,
combinedImgSrc: ComfyApp.clipspace.imgs[ComfyApp.clipspace.combinedIndex]?.src?.substring(0, 50) + '...'
});
return true;
}
/**
* Safely calls ComfyApp.pasteFromClipspace after validating clipspace structure
* @param {any} node - The ComfyUI node to paste to
* @returns {boolean} - True if paste was successful, false otherwise
*/
export function safeClipspacePaste(node: any): boolean {
log.debug("Attempting safe clipspace paste");
if (!validateAndFixClipspace()) {
log.debug("Clipspace validation failed, cannot paste");
return false;
}
try {
ComfyApp.pasteFromClipspace(node);
log.debug("Successfully called pasteFromClipspace");
return true;
} catch (error) {
log.error("Error calling pasteFromClipspace:", error);
return false;
}
}

View File

@@ -386,3 +386,111 @@ export function canvasToMaskImage(canvas: HTMLCanvasElement): Promise<HTMLImageE
img.src = canvas.toDataURL();
});
}
/**
* Scales an image to fit within specified bounds while maintaining aspect ratio
* @param image - Image to scale
* @param targetWidth - Target width to fit within
* @param targetHeight - Target height to fit within
* @returns Promise with scaled Image element
*/
export async function scaleImageToFit(image: HTMLImageElement, targetWidth: number, targetHeight: number): Promise<HTMLImageElement> {
const scale = Math.min(targetWidth / image.width, targetHeight / image.height);
const scaledWidth = Math.max(1, Math.round(image.width * scale));
const scaledHeight = Math.max(1, Math.round(image.height * scale));
const { canvas, ctx } = createCanvas(scaledWidth, scaledHeight, '2d', { willReadFrequently: true });
if (!ctx) throw new Error("Could not create scaled image context");
ctx.drawImage(image, 0, 0, scaledWidth, scaledHeight);
return new Promise((resolve, reject) => {
const scaledImg = new Image();
scaledImg.onload = () => resolve(scaledImg);
scaledImg.onerror = reject;
scaledImg.src = canvas.toDataURL();
});
}
/**
* Unified tensor to image data conversion
* Handles both RGB images and grayscale masks
* @param tensor - Input tensor data
* @param mode - 'rgb' for images or 'grayscale' for masks
* @returns ImageData object
*/
export function tensorToImageData(tensor: any, mode: 'rgb' | 'grayscale' = 'rgb'): ImageData | null {
try {
const shape = tensor.shape;
const height = shape[1];
const width = shape[2];
const channels = shape[3] || 1; // Default to 1 for masks
log.debug("Converting tensor:", { shape, channels, mode });
const imageData = new ImageData(width, height);
const data = new Uint8ClampedArray(width * height * 4);
const flatData = tensor.data;
const pixelCount = width * height;
const min = tensor.min_val ?? 0;
const max = tensor.max_val ?? 1;
const denom = (max - min) || 1;
for (let i = 0; i < pixelCount; i++) {
const pixelIndex = i * 4;
const tensorIndex = i * channels;
let lum: number;
if (mode === 'grayscale' || channels === 1) {
lum = flatData[tensorIndex];
} else {
// Compute luminance for RGB
const r = flatData[tensorIndex + 0] ?? 0;
const g = flatData[tensorIndex + 1] ?? 0;
const b = flatData[tensorIndex + 2] ?? 0;
lum = 0.299 * r + 0.587 * g + 0.114 * b;
}
let norm = (lum - min) / denom;
if (!isFinite(norm)) norm = 0;
norm = Math.max(0, Math.min(1, norm));
const value = Math.round(norm * 255);
if (mode === 'grayscale') {
// For masks: RGB = value, A = 255 (MaskTool reads luminance)
data[pixelIndex] = value;
data[pixelIndex + 1] = value;
data[pixelIndex + 2] = value;
data[pixelIndex + 3] = 255;
} else {
// For images: RGB from channels, A = 255
for (let c = 0; c < Math.min(3, channels); c++) {
const channelValue = flatData[tensorIndex + c];
const channelNorm = (channelValue - min) / denom;
data[pixelIndex + c] = Math.round(channelNorm * 255);
}
data[pixelIndex + 3] = 255;
}
}
imageData.data.set(data);
return imageData;
} catch (error) {
log.error("Error converting tensor:", error);
return null;
}
}
/**
* Creates an HTMLImageElement from ImageData
* @param imageData - Input ImageData
* @returns Promise with HTMLImageElement
*/
export async function createImageFromImageData(imageData: ImageData): Promise<HTMLImageElement> {
const { canvas, ctx } = createCanvas(imageData.width, imageData.height, '2d', { willReadFrequently: true });
if (!ctx) throw new Error("Could not create canvas context");
ctx.putImageData(imageData, 0, 0);
return await createImageFromSource(canvas.toDataURL());
}