simple red neuronal LSTM

import numpy as np
import scipy.io as scio
import torch 
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
from torch.autograd import Variable
class simpleLSTM(nn.Module):
      def __init__(self,input_size,hidden_size,output_size,num_layers=1):
          super(simpleLSTM,self).__init__()
          self.hidden_size=hidden_size
          self.num_layers=num_layers
          self.embedding=nn.Embedding(input_size,hidden_size)
          self.lstm=nn.LSTM(hidden_size,hidden_size,num_layers,batch_first=True)
          self.fc=nn.Linear(hidden_size,output_size)
          self.softmax=nn.LogSoftmax(dim=1)
      def forward(self,input,hidden):
          x=self.embedding(input)
          output,hidden=self.lstm(x,hidden)
          output=output[:,-1,:] 
          output=self.fc(output)
          output=self.softmax(output)
          return output,hidden

      
      
          
      def initHidden(self):
          hidden=Variable(torch.zeros( self.num_layers,1,self.hidden_size ))
          cell=Variable(torch.zeros(self.num_layers,1,self.hidden_size))
          return (hidden,cell)

 

Publicados 234 artículos originales · ganado elogios 61 · vistas 120 000 +

Supongo que te gusta

Origin blog.csdn.net/weixin_42528089/article/details/103864723
Recomendado
Clasificación