Skip to content
Snippets Groups Projects
discriminator.py 1.54 KiB
Newer Older
  • Learn to ignore specific revisions
  • Tamino Huxohl's avatar
    Tamino Huxohl committed
    import torch
    import torch.nn as nn
    
    class Conv(nn.Sequential):
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
    
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
        def __init__(self, in_channels, out_channels):
            super().__init__()
    
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
            self.append(nn.Conv3d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=1, padding="same"))
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
            self.append(nn.BatchNorm3d(num_features=out_channels))
            self.append(nn.ReLU(inplace=True))
    
    class Discriminator(nn.Module):
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
    
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
        def __init__(self, in_channels=1):
            super().__init__()
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
            #TODO: make fully connected layer dependent on input shape
            #TODO: write doc
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
    
            self.conv = nn.Sequential(
                Conv(in_channels=in_channels, out_channels=32),
                nn.MaxPool3d(kernel_size=2, stride=2),
                Conv(in_channels=32, out_channels=64),
                nn.MaxPool3d(kernel_size=2, stride=2),
                Conv(in_channels=64, out_channels=128),
                nn.MaxPool3d(kernel_size=2, stride=2),
            )
            self.fully_connected = nn.Sequential(
                nn.Linear(in_features=128 * 2 ** 3, out_features=512),
                nn.ReLU(inplace=True),
                nn.Linear(in_features=512, out_features=128),
                nn.ReLU(inplace=True),
                nn.Linear(in_features=128, out_features=1),
            )
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
        
    
    Tamino Huxohl's avatar
    Tamino Huxohl committed
        def forward(self, x):
            x = self.conv(x)
            x = torch.flatten(x, 1)
            x = self.fully_connected(x)
            return x
    
    
    if __name__ == "__main__":
        net = Discriminator()
        print(net)
    
        _inputs = torch.rand((1, 1, 16, 16, 16))
        _outputs = net(_inputs)
    
        print(f"Transform {_inputs.shape} to {_outputs.shape}")