Skip to content
Snippets Groups Projects
Commit 9b8cb709 authored by Tamino Huxohl's avatar Tamino Huxohl
Browse files

add todos

parent d60df9f1
No related branches found
No related tags found
No related merge requests found
import torch import torch
import torch.nn as nn import torch.nn as nn
class Conv(nn.Sequential): class Conv(nn.Sequential):
def __init__(self, in_channels, out_channels): def __init__(self, in_channels, out_channels):
super().__init__() super().__init__()
self.append( self.append(nn.Conv3d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=1, padding="same"))
nn.Conv3d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=3,
stride=1,
padding="same",
)
)
self.append(nn.BatchNorm3d(num_features=out_channels)) self.append(nn.BatchNorm3d(num_features=out_channels))
self.append(nn.ReLU(inplace=True)) self.append(nn.ReLU(inplace=True))
class Discriminator(nn.Module): class Discriminator(nn.Module):
def __init__(self, in_channels=1): def __init__(self, in_channels=1):
super().__init__() super().__init__()
#TODO: make fully connected layer dependent on input shape
#TODO: write doc
self.conv = nn.Sequential( self.conv = nn.Sequential(
Conv(in_channels=in_channels, out_channels=32), Conv(in_channels=in_channels, out_channels=32),
...@@ -38,7 +32,7 @@ class Discriminator(nn.Module): ...@@ -38,7 +32,7 @@ class Discriminator(nn.Module):
nn.ReLU(inplace=True), nn.ReLU(inplace=True),
nn.Linear(in_features=128, out_features=1), nn.Linear(in_features=128, out_features=1),
) )
def forward(self, x): def forward(self, x):
x = self.conv(x) x = self.conv(x)
x = torch.flatten(x, 1) x = torch.flatten(x, 1)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment