Skip to content

Instantly share code, notes, and snippets.

View laksjdjf's full-sized avatar
🌏
On Earth

laksjdjf

🌏
On Earth
View GitHub Profile
# https://github.com/huggingface/transformers/blob/838b87abe231fd70be5132088d0dee72a7bb8d62/src/transformers/models/opt/modeling_opt.py#L147
"""
model = AutoModelForCausalLM.from_pretrained("p1atdev/dart-v1-sft")
apply_hook(model)
"""
import torch
import torch.nn as nn
def forward_hooker(self):
# https://huggingface.co/shadowlilac/aesthetic-shadow-v2
from transformers import pipeline
import torch
from PIL import Image
from comfy.ldm.modules.attention import optimized_attention
def optimized_forward(self):
def forward(hidden_states, head_mask = None, output_attentions = False):
query = self.query(hidden_states)
def make_unet_conversion_map():
unet_conversion_map_layer = []
# unet
# https://github.com/kohya-ss/sd-scripts/blob/2d7389185c021bc527b414563c245c5489d6328a/library/sdxl_model_util.py#L293
for i in range(3): # num_blocks is 3 in sdxl
# loop over downblocks/upblocks
for j in range(2):
# loop over resnets/attentions for downblocks
hf_down_res_prefix = f"down_blocks.{i}.resnets.{j}."
sd_down_res_prefix = f"input_blocks.{3*i + j + 1}.0."
import torch
class VisualStylePrompting:
@classmethod
def INPUT_TYPES(s):
return {"required": {
"model": ("MODEL",),
"reference": ("LATENT",),
"depth": ("INT", {"default": 0, "min": -1, "max": 12}),
"batch_size": ("INT", {"default": 1, "min": 1, "max": 64}),
# ref: ScaleCrafter https://github.com/YingqingHe/ScaleCrafter
import math
import comfy.ops
import torch.nn.functional as F
ops = comfy.ops.disable_weight_init
class ScaleCrafter:
@classmethod
def INPUT_TYPES(s):
@laksjdjf
laksjdjf / dilate_conv.py
Last active March 6, 2024 11:35
Reference from ScaleCrafter[https://arxiv.org/abs/2310.07702]
# https://arxiv.org/abs/2310.07702
import comfy.ops
ops = comfy.ops.disable_weight_init
class DilateConv:
@classmethod
def INPUT_TYPES(s):
return {
"required": {
===========================================================================================================================================================
Layer (type (var_name)) Input Shape Output Shape Param # Kernel Shape
===========================================================================================================================================================
StableCascadeUnet (StableCascadeUnet) -- [1, 4, 256, 256] -- 3
├─Linear (clip_txt_pooled_mapper) [1, 1, 1280] [1, 1, 5120] 6,558,720 --
├─LayerNorm (clip_norm) [1, 4, 1280] [1, 4, 1280] -- --
├─Sequential (embedding) [1, 4, 256, 256] [1, 320, 128, 128] -- --
│ └─PixelUnshuf
===========================================================================================================================================================
Layer (type (var_name)) Input Shape Output Shape Param # Kernel Shape
===========================================================================================================================================================
StableCascadeUnet (StableCascadeUnet) [2, 16, 24, 24] [2, 16, 24, 24] 8,923,136 3
├─Linear (clip_txt_pooled_mapper) [2, 77, 1280] [2, 77, 8192] 10,493,952 --
├─LayerNorm (clip_norm) [2, 308, 2048] [2, 308, 2048] -- --
├─Sequential (embedding) [2, 16, 24, 24] [2, 2048, 24, 24] -- --
│ └─PixelUnshuf
'''
load from sampling/custom_sampling/scheulers
input text like "999,893,...,156"
connect to SamplerCustom
'''
import torch
class TextScheduler:
@classmethod
@laksjdjf
laksjdjf / LCMSamplerRCFG.py
Last active December 21, 2023 12:02
Implementation of RCFG in https://arxiv.org/abs/2312.12491
'''
Implementation of RCFG in https://arxiv.org/abs/2312.12491
Node is in sampling/custom_sampling/samplers
original_latent is OPTIONAL
If original_latent is set, it is Self-Negative else Onetime-Negative
cfg is recommendet near 1.0 (KSAMPLER"s cfg is ignored)
delta is よくわかんない
'''
from comfy.samplers import KSAMPLER