[{"title":"( 9 个子文件 28KB ) 一文读懂——全局注意力机制(global attention)详解与代码实现","children":[{"title":"sentiment_analysis-amazon-Global Attention Mechanism(全局注意力机制keras)","children":[{"title":".idea","children":[{"title":"sentiment_analysis-amazon-master.iml <span style='color:#111;'> 324B </span>","children":null,"spread":false},{"title":"misc.xml <span style='color:#111;'> 188B </span>","children":null,"spread":false},{"title":"modules.xml <span style='color:#111;'> 323B </span>","children":null,"spread":false},{"title":"workspace.xml <span style='color:#111;'> 5.47KB </span>","children":null,"spread":false},{"title":".gitignore <span style='color:#111;'> 50B </span>","children":null,"spread":false},{"title":"inspectionProfiles","children":[{"title":"Project_Default.xml <span style='color:#111;'> 2.39KB </span>","children":null,"spread":false},{"title":"profiles_settings.xml <span style='color:#111;'> 174B </span>","children":null,"spread":false}],"spread":true}],"spread":true},{"title":"GlobalAttention_LSTM.py <span style='color:#111;'> 4.54KB </span>","children":null,"spread":false},{"title":"amazon_cells_labelled.txt <span style='color:#111;'> 56.86KB </span>","children":null,"spread":false}],"spread":true}],"spread":true}]