Skip to content

Instantly share code, notes, and snippets.

View laksjdjf's full-sized avatar
🌏
On Earth

laksjdjf

🌏
On Earth
View GitHub Profile
'''
https://gist.github.com/kohya-ss/3f774da220df102548093a7abc8538ed
1. put this file in ComfyUI/custom_nodes
2. load node from <loaders>
'''
import torch
from comfy.ldm.modules.diffusionmodules.openaimodel import forward_timestep_embed, timestep_embedding, th
def apply_control(h, control, name):
================================================================================================================================================================
Layer (type (var_name)) Input Shape Output Shape Param # Kernel Shape
================================================================================================================================================================
Transformer2DModel (Transformer2DModel) [1, 4, 32, 32] [1, 8, 32, 32] 2,304 --
├─PatchEmbed (pos_embed) [1, 4, 32, 32] [1, 256, 1152] -- --
│ └─Conv2d (proj) [1, 4, 32, 32] [1, 1152, 16, 16] 19,584 [2, 2]
├─AdaLayerNormSingle (adaln_single) [1] [1, 6912] --
# ref:https://github.com/v0xie/sd-webui-cads
'''
1. put this file in ComfyUI/custom_nodes
2. load node from <loader>
'''
import torch
import numpy as np
import copy
{'down_blocks.1.attentions.0.transformer_blocks.0': 'down_blocks.1.attentions.0.transformer_blocks.0',
'down_blocks.1.attentions.0.transformer_blocks.1': 'down_blocks.1.attentions.0.transformer_blocks.1',
'down_blocks.1.attentions.1.transformer_blocks.0': 'down_blocks.1.attentions.1.transformer_blocks.0',
'down_blocks.1.attentions.1.transformer_blocks.1': 'down_blocks.1.attentions.1.transformer_blocks.1',
'down_blocks.2.attentions.0.transformer_blocks.0': 'down_blocks.2.attentions.0.transformer_blocks.0',
'down_blocks.2.attentions.0.transformer_blocks.1': 'down_blocks.2.attentions.0.transformer_blocks.1',
'down_blocks.2.attentions.0.transformer_blocks.2': 'down_blocks.2.attentions.0.transformer_blocks.2',
'down_blocks.2.attentions.0.transformer_blocks.3': 'down_blocks.2.attentions.0.transformer_blocks.6',
'down_blocks.2.attentions.1.transformer_blocks.0': 'down_blocks.2.attentions.1.transformer_blocks.0',
'down_blocks.2.attentions.1.transformer_blocks.1': 'down_blocks.2.attentions.1.transformer_block
# ref:https://github.com/tfernd/HyperTile
from einops import rearrange
'''
1. put this file in ComfyUI/custom_nodes
2. load node from <loader>
3. set nh and nw (2-4 is recommended. if 1 is set, it will be the same as original)
'''
import torch
import numpy as np
from rembg import remove
class RembgMask:
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"image": ("IMAGE", ),
@laksjdjf
laksjdjf / freeu.py
Last active September 23, 2023 01:03
'''
1. put this file in ComfyUI/custom_nodes
2. load node from <loader>
'''
import torch
from comfy.ldm.modules.diffusionmodules.openaimodel import forward_timestep_embed, timestep_embedding, th
# https://github.com/ChenyangSi/FreeU
def Fourier_filter(x, threshold, scale):
import torch
import comfy
import copy
def chunk_or_none(x, chunk_size, index):
if x is None:
return None
return x.chunk(chunk_size, dim=0)[index]
def chunk_or_none_for_control(x, chunk_size, index):
# python convert_lora_sdxl.py <input_file_name> <output_file_name> <sd2diff or diff2sd>
import torch
from safetensors.torch import load_file
from safetensors.torch import save_file
import os
import argparse
def load(file):
if os.path.splitext(file)[1] == ".safetensors":
================================================================================================================================================================
Layer (type (var_name)) Input Shape Output Shape Param # Kernel Shape
================================================================================================================================================================
SdxlUNet2DConditionModel (SdxlUNet2DConditionModel) [1, 4, 128, 128] [1, 4, 128, 128] -- --
├─Sequential (time_embed) [1, 320] [1, 1280] -- --
│ └─Linear (0) [1, 320] [1, 1280] 410,880 --
│ └─SiLU (1) [1, 1280] [1, 1280] --