Blame view

LDA/04b-mini_ae.py 2.88 KB
b6d0165d1   Killian   Initial commit
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
  
  # coding: utf-8
  
  # In[2]:
  
  # Import
  import gensim
  from scipy import sparse
  import itertools
  from sklearn import preprocessing
  from keras.models import Sequential
  from keras.optimizers import SGD,Adam
  from mlp import *
  import mlp
  import sklearn.metrics
  import shelve
  import pickle
  from utils import *
  import sys
  import os
  import json
  # In[4]:
  
  sparse_model=shelve.open("{}".format(sys.argv[2]))
  in_dir = sys.argv[1]
  infer_model=shelve.open("{}/infer.shelve".format(in_dir))
  #['ASR', 'TRS', 'LABEL']
  # In[6]:
  ASR=sparse_model["ASR_wid"]
  TRS=sparse_model["TRS_wid"]
  LABEL=sparse_model["LABEL"]
  
  
  hidden_size=40
  input_activation="tanh"
  out_activation="tanh"
  loss="mse"
  epochs=500
  batch=1
  patience=60
  do_do=False
  sgd = Adam(lr=0.00001)#SGD(lr=0.00001,nesterov=False) #'rmsprop'# Adam(lr=0.00001)#SGD(lr=0.001, momentum=0.9, nesterov=True)
  try :
      sgd_repr=sgd.get_config()["name"]
  except AttributeError :
      sgd_repr=sgd
  
  params={ "h1" : hidden_size,
  	"inside_activation" : input_activation,
  	"out_activation" : out_activation,
          "do_dropout": do_do,
  	"loss" : loss,
  	"epochs" : epochs ,
  	"batch_size" : batch,
  	"patience" : patience,
          "sgd" : sgd_repr}
  name = "_".join([ str(x) for x in params.values()])
  try:
      os.mkdir("{}/{}".format(in_dir,name))
  except:
      pass
  db = shelve.open("{}/{}/ae_model.shelve".format(in_dir,name),writeback=True)
  db["params"] = params
  db["LABEL"]=LABEL
  #
  json.dump(params,
  	open("{}/{}/ae_model.json".format(in_dir,name),"w"),
  	indent=4)
  
  keys = ["ASR","TRS"]
d1012a7a1   Killian   update LDA/.py
71
  mlp_h = [ 512 , 1024 , 2048]
b6d0165d1   Killian   Initial commit
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
  mlp_loss ="categorical_crossentropy"
  mlp_dropouts = [0,0,0,0]
  mlp_sgd = Adam(0.0001)
  mlp_epochs = 200
  mlp_batch_size = 8
  
  db["AE"] = {}
  for mod in keys : 
      res=train_ae(infer_model["LDA"][mod]["TRAIN"],infer_model["LDA"][mod]["DEV"],infer_model["LDA"][mod]["TEST"],[params["h1"]],patience = params["patience"],sgd=sgd,in_activation="tanh",out_activation="tanh",loss=loss,epochs=epochs,batch_size=batch,verbose=0)
      mlp_res_list=[]
      for layer in res :
          mlp_res_list.append(train_mlp(layer[0],LABEL["TRAIN"],layer[1],LABEL["DEV"],layer[2],LABEL["TEST"],mlp_h,loss=mlp_loss,dropouts=mlp_dropouts,sgd=mlp_sgd,epochs=mlp_epochs,batch_size=mlp_batch_size,fit_verbose=0))
      db["AE"][mod]=mlp_res_list
  
  mod = "ASR"
  mod2= "TRS"
  mlp_res_list=[]
  
  res = train_ae(infer_model["LDA"][mod]["TRAIN"],infer_model["LDA"][mod]["DEV"],infer_model["LDA"][mod]["TEST"],[params["h1"]],dropouts=[0],patience = params["patience"],sgd=sgd,in_activation="tanh",out_activation="tanh",loss=loss,epochs=epochs,batch_size=batch,y_train=infer_model["LDA"][mod]["TRAIN"],y_dev=infer_model["LDA"][mod2]["DEV"],y_test=infer_model["LDA"][mod2]["TEST"])
  for layer in res :
      mlp_res_list.append(train_mlp(layer[0],LABEL["TRAIN"],layer[1],LABEL["DEV"],layer[2],LABEL["TEST"],mlp_h,loss=mlp_loss,dropouts=mlp_dropouts,sgd=mlp_sgd,epochs=mlp_epochs,batch_size=mlp_batch_size,fit_verbose=0))
  
  db["AE"]["SPE"] = mlp_res_list
  
  
  db.close()