Path: blob/master/extensions-builtin/Lora/network_hada.py
2447 views
import lyco_helpers1import network234class ModuleTypeHada(network.ModuleType):5def create_module(self, net: network.Network, weights: network.NetworkWeights):6if all(x in weights.w for x in ["hada_w1_a", "hada_w1_b", "hada_w2_a", "hada_w2_b"]):7return NetworkModuleHada(net, weights)89return None101112class NetworkModuleHada(network.NetworkModule):13def __init__(self, net: network.Network, weights: network.NetworkWeights):14super().__init__(net, weights)1516if hasattr(self.sd_module, 'weight'):17self.shape = self.sd_module.weight.shape1819self.w1a = weights.w["hada_w1_a"]20self.w1b = weights.w["hada_w1_b"]21self.dim = self.w1b.shape[0]22self.w2a = weights.w["hada_w2_a"]23self.w2b = weights.w["hada_w2_b"]2425self.t1 = weights.w.get("hada_t1")26self.t2 = weights.w.get("hada_t2")2728def calc_updown(self, orig_weight):29w1a = self.w1a.to(orig_weight.device)30w1b = self.w1b.to(orig_weight.device)31w2a = self.w2a.to(orig_weight.device)32w2b = self.w2b.to(orig_weight.device)3334output_shape = [w1a.size(0), w1b.size(1)]3536if self.t1 is not None:37output_shape = [w1a.size(1), w1b.size(1)]38t1 = self.t1.to(orig_weight.device)39updown1 = lyco_helpers.make_weight_cp(t1, w1a, w1b)40output_shape += t1.shape[2:]41else:42if len(w1b.shape) == 4:43output_shape += w1b.shape[2:]44updown1 = lyco_helpers.rebuild_conventional(w1a, w1b, output_shape)4546if self.t2 is not None:47t2 = self.t2.to(orig_weight.device)48updown2 = lyco_helpers.make_weight_cp(t2, w2a, w2b)49else:50updown2 = lyco_helpers.rebuild_conventional(w2a, w2b, output_shape)5152updown = updown1 * updown25354return self.finalize_updown(updown, orig_weight, output_shape)555657