TuckER model
class TuckER(torch.nn.Module):
def __init__(self, d, d1, d2, **kwargs):
super(TuckER, self).__init__()
self.E = torch.nn.Embedding(len(d.entities), d1, padding_idx=0)
self.R = torch.nn.Embedding(len(d.relations), d2, padding_idx=0)
self.W = torch.nn.Parameter(torch.tensor(np.random.uniform(-1, 1, (d2, d1, d1)),
dtype=torch.float, device="cuda", requires_grad=True))
self.input_dropout = torch.nn.Dropout(kwargs["input_dropout"])
self.hidden_dropout1 = torch.nn.Dropout(kwargs["hidden_dropout1"])
self.hidden_dropout2 = torch.nn.Dropout(kwargs["hidden_dropout2"])
self.loss = torch.nn.BCELoss() ###损失函数
self.bn0 = torch.nn.BatchNorm1d(d1)
self.bn1 = torch.nn.BatchNorm1d(d1)
def init(self):
xavier_normal_(self.E.weight.data)
xavier_normal_(self.R.weight.data)
def forward(self, e1_idx, r_idx):
e1 = self.E(e1_idx)
x = self.bn0(e1)
x = self.input_dropout(x)
x = x.view(-1, 1, e1.size(1))
r = self.R(r_idx)
W_mat = torch.mm(r, self.W.view(r.size(1), -1))
W_mat = W_mat.view(-1, e1.size(1), e1.size(1))
W_mat = self.hidden_dropout1(W_mat)
x = torch.bmm(x, W_mat)
x = x.view(-1, e1.size(1))
x = self.bn1(x)
x = self.hidden_dropout2(x)
x = torch.mm(x, self.E.weight.transpose(1,0))
pred = F.sigmoid(x)
return pred
self.loss = torch.nn.BCELoss()
loss = model.loss(predictions, targets) ##predictions是Sigmoid二分类
Examples::
>>> m = nn.Sigmoid()
>>> loss = nn.BCELoss()
>>> input = torch.randn(3, requires_grad=True)
>>> target = torch.empty(3).random_(2)
>>> output = loss(m(input), target)
>>> output.backward()
Goal: visual loss and scalar values
pytorch visualization, installation tensorboardX and tensorflow
PIP install tensorflow (on the server is already installed version 1.4.0)
PIP install tensorboardX
Use tensorboardX, numerical functions shown in FIG pytorch framework changes
refer to the article: Pytorch visualized using tensorboardX. Ultra-detailed! ! !
from tensorboardX import SummaryWriter ##引用该模块
model.init()
opt = torch.optim.Adam(model.parameters(), lr=self.learning_rate)
writer = SummaryWriter('runs') ##放在优化之后
###在每个epcoh中添加这个标量
writer.add_scalar('train_loss', np.mean(losses), epoch)
###关闭
writer.close()
tensorboard --logdir runs
Reproduced in: https: //www.jianshu.com/p/21c2c24b5425