latent_dim, ngf=64): super(Generator, self).__init__() # Define normalization layer norm_layer = functools.partial(nn.BatchNorm2d, affine=True, track_running_stats=True) # Define layer to combine label features self.label_comb = LabelCombiner(label_nc, latent_dim, (ngf * 8, 1, 1)) # Construct unet structure _net = UnetSkipConnectionBlock(ngf * 8, ngf * 8, submodule=self.label_comb, norm_layer=norm_layer, innermost=True) for i in range(3): _net = UnetSkipConnectionBlock(ngf * 8, ngf * 8, submodule=_net, norm_layer=norm_layer, use_dropout=True) _net = UnetSkipConnectionBlock(ngf * 4, ngf * 8, submodule=_net, norm_layer=norm_layer) _net = UnetSkipConnectionBlock(ngf * 2, ngf * 4, submodule=_net, norm_layer=norm_layer) _net = UnetSkipConnectionBlock(ngf, ngf * 2, submodule=_net, norm_layer=norm_layer) self.model = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=_net, outermost=True, norm_layer=norm_layer) def forward(self, images, labels, noise): self.label_comb.set_labels(labels, noise) # Set labels before model forward return self.model(images) """ Args: input_nc (int) : the number of channels in input images output_nc (int) : the number of channels in output images label_nc (int) : the number of classes in labels latent_dim (int) : the number of noise dimensions ngf (int) : the number of filters in the last conv layer """ バッチの均一化 ラベルとランダム情報を加工 3つをインプットして動かすだけ 一番大事。特徴量同⼠が合成