You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

train_lstm_att.py 2.0 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. # 首先需要加入以下的路径到环境变量,因为当前只对内部测试开放,所以需要手动申明一下路径
  2. import os
  3. os.environ['FASTNLP_BASE_URL'] = 'http://10.141.222.118:8888/file/download/'
  4. os.environ['FASTNLP_CACHE_DIR'] = '/remote-home/hyan01/fastnlp_caches'
  5. from fastNLP.io.data_loader import IMDBLoader
  6. from fastNLP.embeddings import StaticEmbedding
  7. from model.lstm_self_attention import BiLSTM_SELF_ATTENTION
  8. from fastNLP import CrossEntropyLoss, AccuracyMetric
  9. from fastNLP import Trainer
  10. from torch.optim import Adam
  11. class Config():
  12. train_epoch= 10
  13. lr=0.001
  14. num_classes=2
  15. hidden_dim=256
  16. num_layers=1
  17. attention_unit=256
  18. attention_hops=1
  19. nfc=128
  20. task_name = "IMDB"
  21. datapath={"train":"IMDB_data/train.csv", "test":"IMDB_data/test.csv"}
  22. save_model_path="./result_IMDB_test/"
  23. opt=Config()
  24. # load data
  25. dataloader=IMDBLoader()
  26. datainfo=dataloader.process(opt.datapath)
  27. # print(datainfo.datasets["train"])
  28. # print(datainfo)
  29. # define model
  30. vocab=datainfo.vocabs['words']
  31. embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-840b-300', requires_grad=True)
  32. model=BiLSTM_SELF_ATTENTION(init_embed=embed, num_classes=opt.num_classes, hidden_dim=opt.hidden_dim, num_layers=opt.num_layers, attention_unit=opt.attention_unit, attention_hops=opt.attention_hops, nfc=opt.nfc)
  33. # define loss_function and metrics
  34. loss=CrossEntropyLoss()
  35. metrics=AccuracyMetric()
  36. optimizer= Adam([param for param in model.parameters() if param.requires_grad==True], lr=opt.lr)
  37. def train(datainfo, model, optimizer, loss, metrics, opt):
  38. trainer = Trainer(datainfo.datasets['train'], model, optimizer=optimizer, loss=loss,
  39. metrics=metrics, dev_data=datainfo.datasets['test'], device=0, check_code_level=-1,
  40. n_epochs=opt.train_epoch, save_path=opt.save_model_path)
  41. trainer.train()
  42. if __name__ == "__main__":
  43. train(datainfo, model, optimizer, loss, metrics, opt)