Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions src/BuiltinExtensions/ComfyUIBackend/WorkflowGenerator.cs
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ public string CreateLoadImageNode(ImageFile img, string param, bool resize, stri
return result;
}

/// <summary>For <see cref="CreateImageMaskCrop(JArray, JArray, int, JArray, T2IModel, double, double)"/>.</summary>
/// <summary>For <see cref="CreateImageMaskCrop(JArray, JArray, int, JArray, T2IModel, double, double, int, int, bool)"/>.</summary>
public record class ImageMaskCropData(string BoundsNode, string CroppedMask, string MaskedLatent, string ScaledImage);

/// <summary>Creates an automatic image mask-crop before sampling, to be followed by <see cref="RecompositeCropped(string, string, JArray, JArray)"/> after sampling.</summary>
Expand Down Expand Up @@ -490,12 +490,14 @@ public ImageMaskCropData CreateImageMaskCrop(JArray mask, JArray image, int grow
["width"] = NodePath(boundsNode, 2),
["height"] = NodePath(boundsNode, 3)
});
int targetWidthAuto = isCustomRes ? targetX : model?.StandardWidth <= 0 ? UserInput.GetImageWidth() : model.StandardWidth;
int targetHeightAuto = isCustomRes ? targetY : model?.StandardHeight <= 0 ? UserInput.GetImageHeight() : model.StandardHeight;
string scaledImage = CreateNode("SwarmImageScaleForMP", new JObject()
{
["image"] = NodePath(croppedImage, 0),
["width"] = isCustomRes ? targetX : model?.StandardWidth <= 0 ? UserInput.GetImageWidth() : model.StandardWidth,
["height"] = isCustomRes ? targetY : model?.StandardHeight <= 0 ? UserInput.GetImageHeight() : model.StandardHeight,
["can_shrink"] = true
["width"] = scaleWidth > 0 ? scaleWidth : targetWidthAuto,
["height"] = scaleHeight > 0 ? scaleHeight : targetHeightAuto,
["can_shrink"] = canShrink
});
JArray encoded = DoMaskedVAEEncode(vae, [scaledImage, 0], [croppedMask, 0], null);
return new(boundsNode, croppedMask, $"{encoded[0]}", scaledImage);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,9 @@ public static void Register()
if (g.UserInput.TryGet(T2IParamTypes.MaskShrinkGrow, out int shrinkGrow))
{
g.MaskShrunkInfo = g.CreateImageMaskCrop(g.FinalMask, g.FinalInputImage, shrinkGrow, g.FinalVae, g.FinalLoadedModel);
scaleWidth: g.UserInput.Get(T2IParamTypes.InitImageScaleForMPWidth, 0),
scaleHeight: g.UserInput.Get(T2IParamTypes.InitImageScaleForMPHeight, 0),
canShrink: g.UserInput.Get(T2IParamTypes.InitImageScaleForMPCanShrink, true));
currentMask = [g.MaskShrunkInfo.CroppedMask, 0];
g.FinalLatentImage = [g.MaskShrunkInfo.MaskedLatent, 0];
}
Expand Down
14 changes: 11 additions & 3 deletions src/Text2Image/T2IParamTypes.cs
Original file line number Diff line number Diff line change
Expand Up @@ -313,15 +313,14 @@ public static string ApplyStringEdit(string prior, string update)
}

public static T2IRegisteredParam<string> Prompt, NegativePrompt, AspectRatio, BackendType, RefinerMethod, FreeUApplyTo, FreeUVersion, PersonalNote, VideoFormat, VideoResolution, UnsamplerPrompt, ImageFormat, MaskBehavior, ColorCorrectionBehavior, RawResolution, SeamlessTileable, SD3TextEncs, BitDepth, Webhooks, Text2VideoFormat, WildcardSeedBehavior, SegmentSortOrder, SegmentTargetResolution, SegmentApplyAfter, TorchCompile, VideoExtendFormat, ExactBackendID, OverridePredictionType, OverrideOutpathFormat, Text2AudioTimeSignature, Text2AudioLanguage, Text2AudioKeyScale, Text2AudioStyle;
public static T2IRegisteredParam<int> Images, Steps, Width, Height, SideLength, BatchSize, VAETileSize, VAETileOverlap, VAETemporalTileSize, VAETemporalTileOverlap, ClipStopAtLayer, VideoFrames, VideoMotionBucket, VideoFPS, VideoSteps, RefinerSteps, CascadeLatentCompression, MaskShrinkGrow, MaskBlur, MaskGrow, SegmentMaskBlur, SegmentMaskGrow, SegmentMaskOversize, SegmentSteps, Text2VideoFrames, TrimVideoStartFrames, TrimVideoEndFrames, VideoExtendFrameOverlap;
public static T2IRegisteredParam<long> Seed, VariationSeed, WildcardSeed, Text2AudioBPM;
public static T2IRegisteredParam<int> Images, Steps, Width, Height, SideLength, BatchSize, VAETileSize, VAETileOverlap, VAETemporalTileSize, VAETemporalTileOverlap, ClipStopAtLayer, VideoFrames, VideoMotionBucket, VideoFPS, VideoSteps, RefinerSteps, CascadeLatentCompression, MaskShrinkGrow, MaskBlur, MaskGrow, SegmentMaskBlur, SegmentMaskGrow, SegmentMaskOversize, SegmentSteps, Text2VideoFrames, TrimVideoStartFrames, TrimVideoEndFrames, VideoExtendFrameOverlap; public static T2IRegisteredParam<long> Seed, VariationSeed, WildcardSeed, Text2AudioBPM;
public static T2IRegisteredParam<double> CFGScale, VariationSeedStrength, InitImageCreativity, InitImageResetToNorm, InitImageNoise, RefinerControl, RefinerUpscale, RefinerCFGScale, ReVisionStrength, AltResolutionHeightMult,
FreeUBlock1, FreeUBlock2, FreeUSkip1, FreeUSkip2, GlobalRegionFactor, EndStepsEarly, SamplerSigmaMin, SamplerSigmaMax, SamplerRho, VideoAugmentationLevel, VideoCFG, VideoMinCFG, Video2VideoCreativity, VideoSwapPercent, VideoExtendSwapPercent, IP2PCFG2, RegionalObjectCleanupFactor, SigmaShift, SegmentThresholdMax, SegmentCFGScale, FluxGuidanceScale, Text2AudioDuration;
public static T2IRegisteredParam<Image> InitImage, MaskImage, VideoEndFrame;
public static T2IRegisteredParam<T2IModel> Model, RefinerModel, VAE, RegionalObjectInpaintingModel, SegmentModel, VideoModel, VideoSwapModel, RefinerVAE, ClipLModel, ClipGModel, ClipVisionModel, T5XXLModel, LLaVAModel, LLaMAModel, QwenModel, MistralModel, GemmaModel, VideoExtendModel, VideoExtendSwapModel;
public static T2IRegisteredParam<List<string>> Loras, LoraWeights, LoraTencWeights, LoraSectionConfinement;
public static T2IRegisteredParam<List<Image>> PromptImages;
public static T2IRegisteredParam<bool> OutputIntermediateImages, DoNotSave, DoNotSaveIntermediates, ControlNetPreviewOnly, RevisionZeroPrompt, RemoveBackground, NoSeedIncrement, NoPreviews, VideoBoomerang, ModelSpecificEnhancements, UseInpaintingEncode, MaskCompositeUnthresholded, SaveSegmentMask, InitImageRecompositeMask, UseReferenceOnly, RefinerDoTiling, AutomaticVAE, ZeroNegative, FluxDisableGuidance, SmartImagePromptResizing, NoLoadModels, NoInternalSpecialHandling, ForwardRawBackendData, ForwardSwarmData,
public static T2IRegisteredParam<bool> OutputIntermediateImages, DoNotSave, DoNotSaveIntermediates, ControlNetPreviewOnly, RevisionZeroPrompt, RemoveBackground, NoSeedIncrement, NoPreviews, VideoBoomerang, ModelSpecificEnhancements, UseInpaintingEncode, MaskCompositeUnthresholded, SaveSegmentMask, InitImageRecompositeMask, InitImageScaleForMPCanShrink, UseReferenceOnly, RefinerDoTiling, AutomaticVAE, ZeroNegative, FluxDisableGuidance, SmartImagePromptResizing, NoLoadModels, NoInternalSpecialHandling, ForwardRawBackendData, ForwardSwarmData,
PlaceholderParamGroupStarred, PlaceholderParamGroupUser1, PlaceholderParamGroupUser2, PlaceholderParamGroupUser3;

public static T2IParamGroup GroupImagePrompting, GroupCore, GroupVariation, GroupResolution, GroupSampling, GroupInitImage, GroupRefiners, GroupRefinerOverrides,
Expand Down Expand Up @@ -489,6 +488,15 @@ public static void RegisterDefaults()
MaskShrinkGrow = Register<int>(new("Mask Shrink Grow", "If enabled, the image will be shrunk to just the mask, and then grow by this value many pixels.\nAfter that, the generation process will run in full, and the image will be composited back into the original image at the end.\nThis allows for refining small details of an image more effectively.\nThis is also known as 'Inpaint Only Masked'.\nLarger values increase the surrounding context the generation receives, lower values contain it tighter and allow the AI to create more detail.",
"8", Toggleable: true, Min: 0, Max: 512, OrderPriority: -3.7, Group: GroupInitImage, Examples: ["0", "8", "32"], DependNonDefault: MaskImage.Type.ID
));
InitImageScaleForMPWidth = Register<int>(new("Init Image Scale Width", "Optional override for the width input to the internal SwarmImageScaleForMP node used by masked init-image crop processing.\nSet to 0 to auto-select width from model/current generation settings.",
"0", IgnoreIf: "0", Min: 0, Max: 8192, OrderPriority: -3.69, Group: GroupInitImage, DependNonDefault: MaskShrinkGrow.Type.ID, Examples: ["0", "512", "768", "1024"]
));
InitImageScaleForMPHeight = Register<int>(new("Init Image Scale Height", "Optional override for the height input to the internal SwarmImageScaleForMP node used by masked init-image crop processing.\nSet to 0 to auto-select height from model/current generation settings.",
"0", IgnoreIf: "0", Min: 0, Max: 8192, OrderPriority: -3.68, Group: GroupInitImage, DependNonDefault: MaskShrinkGrow.Type.ID, Examples: ["0", "512", "768", "1024"]
));
InitImageScaleForMPCanShrink = Register<bool>(new("Init Image Scale Can Shrink", "If enabled, masked init-image crop scaling can shrink images to fit the target megapixel size.\nIf disabled, it only scales up (or keeps the current size).",
"true", IgnoreIf: "true", Group: GroupInitImage, OrderPriority: -3.67, DependNonDefault: MaskShrinkGrow.Type.ID
));
MaskBlur = Register<int>(new("Mask Blur", "If enabled, the mask will be blurred by this blur factor.\nThis makes the transition for the new image smoother.\nSet to 0 to disable.",
"4", IgnoreIf: "0", Min: 0, Max: 64, OrderPriority: -3.6, Group: GroupInitImage, Examples: ["0", "4", "8", "16"], DependNonDefault: MaskImage.Type.ID
));
Expand Down
134 changes: 133 additions & 1 deletion src/wwwroot/js/genpage/gentab/currentimagehandler.js
Original file line number Diff line number Diff line change
Expand Up @@ -756,7 +756,136 @@ function toggleStar(path, rawSrc) {
});
}

defaultButtonChoices = 'Use As Init,Edit Image,Star,Reuse Parameters';
function getSaveInputFromMetadata(metadata) {
let fallback = getGenInput();
fallback.donotsave = false;
fallback.images = 1;
if (!metadata) {
return fallback;
}
try {
let readable = interpretMetadata(metadata);
let metaObj = readable ? JSON.parse(readable) : null;
if (!metaObj?.sui_image_params || typeof metaObj.sui_image_params != 'object') {
return fallback;
}
let input = { ...metaObj.sui_image_params };
input.donotsave = false;
input.images = 1;
if (metaObj.sui_extra_data && typeof metaObj.sui_extra_data == 'object' && !Array.isArray(metaObj.sui_extra_data)) {
input.extra_metadata = { ...metaObj.sui_extra_data };
}
return input;
}
catch (e) {
console.log(`Failed to parse metadata for manual save, using current params instead: ${e}`);
return fallback;
}
}

function saveCurrentImageToHistory(img, button = null) {
if (!img || img.tagName != 'IMG') {
showError('Manual save is only supported for images.');
return;
}
if (button) {
button.disabled = true;
}
let oldSrc = img.dataset.src || img.src;
let batchId = img.dataset.batch_id || '';
let requestData = getSaveInputFromMetadata(img.dataset.metadata || currentMetadataVal || '');
let releaseButton = () => {
if (!button) {
return;
}
button.disabled = false;
};
let waitForHistoryToContain = (savedPath) => {
if (typeof imageHistoryBrowser === 'undefined') {
return;
}
let expected = getImageFullSrc(savedPath);
if (!expected) {
return;
}
let attempts = 0;
let maxAttempts = 8;
let hasExpected = () => {
if (!imageHistoryBrowser?.lastFiles) {
return false;
}
for (let file of imageHistoryBrowser.lastFiles) {
if (!file) {
continue;
}
if (file.name == expected) {
return true;
}
let fileSrc = file.data?.fullsrc || file.data?.src || file.name;
if (getImageFullSrc(fileSrc) == expected) {
return true;
}
}
return false;
};
let tryRefresh = () => {
if (hasExpected()) {
return;
}
attempts++;
if (attempts <= 6 && imageHistoryBrowser?.lightRefresh) {
imageHistoryBrowser.lightRefresh();
}
else if (imageHistoryBrowser?.refresh) {
imageHistoryBrowser.refresh();
}
if (attempts < maxAttempts) {
let delay = attempts < 4 ? 250 : 500;
setTimeout(tryRefresh, delay);
}
};
setTimeout(tryRefresh, 100);
};
let finish = (imageData) => {
requestData.image = imageData;
genericRequest('AddImageToHistory', requestData, res => {
releaseButton();
let saved = res.images?.[0];
if (!saved?.image) {
showError('Image save did not return an output file.');
return;
}
let savedMetadata = saved.metadata || img.dataset.metadata || currentMetadataVal || '{}';
setCurrentImage(saved.image, savedMetadata, batchId);
let batchContainer = document.getElementById('current_image_batch');
if (batchContainer) {
for (let block of batchContainer.getElementsByClassName('image-block')) {
if (block.dataset.src == oldSrc) {
block.dataset.src = saved.image;
block.dataset.metadata = savedMetadata;
let blockImg = block.querySelector('img');
if (blockImg) {
blockImg.src = saved.image;
}
}
}
}
if (imageFullView.isOpen() && imageFullView.currentSrc == oldSrc) {
let state = imageFullView.copyState();
imageFullView.showImage(saved.image, savedMetadata, imageFullView.currentBatchId);
imageFullView.pasteState(state);
}
imageHistoryBrowser.refresh();
doNoticePopover('Saved image and metadata.', 'notice-pop-green');
}, 0, error => {
releaseButton();
showError(error);
});
};
finish(img.src);
}

defaultButtonChoices = 'Use As Init,Edit Image,Star,Reuse Parameters,Save Image';

function getImageFullSrc(src) {
if (src == null) {
Expand Down Expand Up @@ -1095,6 +1224,9 @@ function setCurrentImage(src, metadata = '', batchId = '', previewGrow = false,
}, (metaParsed.is_starred ? ' star-button button-starred-image' : ' star-button'), 'Toggles this image as starred - starred images get moved to a separate folder and highlighted');
}
includeButton('Reuse Parameters', copy_current_image_params, '', 'Copies the parameters used to generate this image to the current generation settings');
if (isDataImage && !isVideo && !isAudio) {
includeButton('Save Image', button => saveCurrentImageToHistory(img, button), '', 'Saves this image and metadata into history. Useful when Do Not Save is enabled.');
}
if (!isDataImage) {
includeButton('View In History', () => {
let folder = imagePathClean;
Expand Down