单向LSTM
import torch.nn as nn
import torch
seq_len = 20
batch_size = 64
embedding_dim = 100
num_embeddings = 300
hidden_size = 128
number_layer = 3
input = torch.randint(low=0,high=256,size=[batch_size,seq_len]) #[64,20]
embedding = nn.Embedding(num_embeddings,embedding_dim)
input_embeded = embeddin
1