A PyTorch implementation for paper Unsupervised Domain Adaptation by Backpropagation
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

194 lines
6.6 KiB

7 years ago
"""DANN model."""
import torch.nn as nn
from .functions import ReverseLayerF
from torchvision import models
7 years ago
class Classifier(nn.Module):
""" SVHN architecture without discriminator"""
7 years ago
def __init__(self):
super(Classifier, self).__init__()
self.restored = False
self.feature = nn.Sequential()
self.feature.add_module('f_conv1', nn.Conv2d(1, 64, kernel_size=5))
self.feature.add_module('f_bn1', nn.BatchNorm2d(64))
self.feature.add_module('f_pool1', nn.MaxPool2d(2))
self.feature.add_module('f_relu1', nn.ReLU(True))
self.feature.add_module('f_conv2', nn.Conv2d(64, 50, kernel_size=5))
self.feature.add_module('f_bn2', nn.BatchNorm2d(50))
self.feature.add_module('f_drop1', nn.Dropout2d())
self.feature.add_module('f_pool2', nn.MaxPool2d(2))
self.feature.add_module('f_relu2', nn.ReLU(True))
self.class_classifier = nn.Sequential()
self.class_classifier.add_module('c_fc1', nn.Linear(50 * 4 * 4, 100))
self.class_classifier.add_module('c_bn1', nn.BatchNorm2d(100))
self.class_classifier.add_module('c_relu1', nn.ReLU(True))
self.class_classifier.add_module('c_drop1', nn.Dropout2d())
self.class_classifier.add_module('c_fc2', nn.Linear(100, 100))
self.class_classifier.add_module('c_bn2', nn.BatchNorm2d(100))
self.class_classifier.add_module('c_relu2', nn.ReLU(True))
self.class_classifier.add_module('c_fc3', nn.Linear(100, 10))
self.class_classifier.add_module('c_softmax', nn.LogSoftmax(dim=1))
def forward(self, input_data):
input_data = input_data.expand(input_data.data.shape[0], 1, 28, 28)
feature = self.feature(input_data)
feature = feature.view(-1, 50 * 4 * 4)
class_output = self.class_classifier(feature)
return class_output
class MNISTmodel(nn.Module):
""" MNIST architecture
+Dropout2d, 84% ~ 73%
-Dropout2d, 50% ~ 73%
"""
def __init__(self):
super(MNISTmodel, self).__init__()
self.restored = False
self.feature = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=32, kernel_size=(5, 5)), # 3 28 28, 32 24 24
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=(2, 2)), # 32 12 12
nn.Conv2d(in_channels=32, out_channels=48, kernel_size=(5, 5)), # 48 8 8
nn.BatchNorm2d(48),
nn.Dropout2d(),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=(2, 2)), # 48 4 4
)
self.classifier = nn.Sequential(
nn.Linear(48*4*4, 100),
nn.BatchNorm1d(100),
nn.ReLU(inplace=True),
nn.Linear(100, 100),
nn.BatchNorm1d(100),
nn.ReLU(inplace=True),
nn.Linear(100, 10),
)
self.discriminator = nn.Sequential(
nn.Linear(48*4*4, 100),
nn.BatchNorm1d(100),
nn.ReLU(inplace=True),
nn.Linear(100, 2),
)
def forward(self, input_data, alpha):
input_data = input_data.expand(input_data.data.shape[0], 3, 28, 28)
feature = self.feature(input_data)
feature = feature.view(-1, 48 * 4 * 4)
reverse_feature = ReverseLayerF.apply(feature, alpha)
class_output = self.classifier(feature)
domain_output = self.discriminator(reverse_feature)
return class_output, domain_output
class SVHNmodel(nn.Module):
""" SVHN architecture
I don't know how to implement the paper's structure
"""
def __init__(self):
super(SVHNmodel, self).__init__()
7 years ago
self.restored = False
self.feature = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=64, kernel_size=(5, 5), stride=(1, 1)), # 3 28 28, 64 24 24
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=(2, 2)), # 64 12 12
nn.Conv2d(in_channels=64, out_channels=64, kernel_size=(5, 5)), # 64 8 8
nn.BatchNorm2d(64),
nn.Dropout2d(),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2)), # 64 4 4
nn.ReLU(inplace=True),
)
7 years ago
self.classifier = nn.Sequential(
nn.Linear(64*4*4, 1024),
nn.BatchNorm1d(1024),
nn.ReLU(inplace=True),
nn.Linear(1024, 256),
nn.BatchNorm1d(256),
nn.ReLU(inplace=True),
nn.Linear(256, 10),
)
7 years ago
self.discriminator = nn.Sequential(
nn.Linear(64*4*4, 1024),
nn.BatchNorm1d(1024),
nn.ReLU(inplace=True),
nn.Linear(1024, 256),
nn.BatchNorm1d(256),
nn.ReLU(inplace=True),
nn.Linear(256, 2),
)
7 years ago
def forward(self, input_data, alpha):
input_data = input_data.expand(input_data.data.shape[0], 3, 28, 28)
7 years ago
feature = self.feature(input_data)
feature = feature.view(-1, 64 * 4 * 4)
7 years ago
reverse_feature = ReverseLayerF.apply(feature, alpha)
class_output = self.classifier(feature)
domain_output = self.discriminator(reverse_feature)
7 years ago
return class_output, domain_output
class AlexModel(nn.Module):
""" AlexNet pretrained on imagenet for Office dataset"""
def __init__(self):
super(AlexModel, self).__init__()
self.restored = False
model_alexnet = models.alexnet(pretrained=True)
self.features = model_alexnet.features
self.fc = nn.Sequential()
for i in range(6):
self.fc.add_module("classifier" + str(i), model_alexnet.classifier[i])
self.__in_features = model_alexnet.classifier[6].in_features # 4096
self.bottleneck = nn.Sequential(
nn.Linear(4096, 256),
nn.ReLU(inplace=True),
nn.Dropout(),
)
self.classifier = nn.Sequential(
nn.Linear(256, 31)
)
self.discriminator = nn.Sequential(
nn.Linear(256, 1024),
nn.ReLU(),
nn.Dropout(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Dropout(),
nn.Linear(1024, 2),
)
def forward(self, input_data, alpha):
input_data = input_data.expand(input_data.data.shape[0], 3, 227, 227)
feature = self.features(input_data)
feature = feature.view(-1, 256*6*6)
fc = self.fc(feature)
bottleneck = self.bottleneck(fc)
reverse_bottleneck = ReverseLayerF.apply(bottleneck, alpha)
class_output = self.classifier(bottleneck)
domain_output = self.discriminator(reverse_bottleneck)
return class_output, domain_output