Path: blob/master/extensions-builtin/Lora/network_lokr.py
2447 views
import torch12import lyco_helpers3import network456class ModuleTypeLokr(network.ModuleType):7def create_module(self, net: network.Network, weights: network.NetworkWeights):8has_1 = "lokr_w1" in weights.w or ("lokr_w1_a" in weights.w and "lokr_w1_b" in weights.w)9has_2 = "lokr_w2" in weights.w or ("lokr_w2_a" in weights.w and "lokr_w2_b" in weights.w)10if has_1 and has_2:11return NetworkModuleLokr(net, weights)1213return None141516def make_kron(orig_shape, w1, w2):17if len(w2.shape) == 4:18w1 = w1.unsqueeze(2).unsqueeze(2)19w2 = w2.contiguous()20return torch.kron(w1, w2).reshape(orig_shape)212223class NetworkModuleLokr(network.NetworkModule):24def __init__(self, net: network.Network, weights: network.NetworkWeights):25super().__init__(net, weights)2627self.w1 = weights.w.get("lokr_w1")28self.w1a = weights.w.get("lokr_w1_a")29self.w1b = weights.w.get("lokr_w1_b")30self.dim = self.w1b.shape[0] if self.w1b is not None else self.dim31self.w2 = weights.w.get("lokr_w2")32self.w2a = weights.w.get("lokr_w2_a")33self.w2b = weights.w.get("lokr_w2_b")34self.dim = self.w2b.shape[0] if self.w2b is not None else self.dim35self.t2 = weights.w.get("lokr_t2")3637def calc_updown(self, orig_weight):38if self.w1 is not None:39w1 = self.w1.to(orig_weight.device)40else:41w1a = self.w1a.to(orig_weight.device)42w1b = self.w1b.to(orig_weight.device)43w1 = w1a @ w1b4445if self.w2 is not None:46w2 = self.w2.to(orig_weight.device)47elif self.t2 is None:48w2a = self.w2a.to(orig_weight.device)49w2b = self.w2b.to(orig_weight.device)50w2 = w2a @ w2b51else:52t2 = self.t2.to(orig_weight.device)53w2a = self.w2a.to(orig_weight.device)54w2b = self.w2b.to(orig_weight.device)55w2 = lyco_helpers.make_weight_cp(t2, w2a, w2b)5657output_shape = [w1.size(0) * w2.size(0), w1.size(1) * w2.size(1)]58if len(orig_weight.shape) == 4:59output_shape = orig_weight.shape6061updown = make_kron(output_shape, w1, w2)6263return self.finalize_updown(updown, orig_weight, output_shape)646566