Fix clip clone bug

This commit is contained in:
doctorpangloss 2024-05-17 07:17:33 -07:00
parent b318b4cc28
commit 4eb66f8a0a

View File

@ -1,3 +1,5 @@
import copy
import torch
from . import sd1_clip
@ -37,6 +39,12 @@ class SDXLTokenizer:
def untokenize(self, token_weight_pair):
return self.clip_g.untokenize(token_weight_pair)
def clone(self):
sdxl_tokenizer = copy.copy(self)
sdxl_tokenizer.clip_l = self.clip_l.clone()
sdxl_tokenizer.clip_g = self.clip_g.clone()
return sdxl_tokenizer
class SDXLClipModel(torch.nn.Module):
def __init__(self, device="cpu", dtype=None):