Path: blob/master/extensions-builtin/Lora/network_norm.py
2447 views
import network123class ModuleTypeNorm(network.ModuleType):4def create_module(self, net: network.Network, weights: network.NetworkWeights):5if all(x in weights.w for x in ["w_norm", "b_norm"]):6return NetworkModuleNorm(net, weights)78return None91011class NetworkModuleNorm(network.NetworkModule):12def __init__(self, net: network.Network, weights: network.NetworkWeights):13super().__init__(net, weights)1415self.w_norm = weights.w.get("w_norm")16self.b_norm = weights.w.get("b_norm")1718def calc_updown(self, orig_weight):19output_shape = self.w_norm.shape20updown = self.w_norm.to(orig_weight.device)2122if self.b_norm is not None:23ex_bias = self.b_norm.to(orig_weight.device)24else:25ex_bias = None2627return self.finalize_updown(updown, orig_weight, output_shape, ex_bias)282930