Blame view

LDA/sae.py 1.54 KB
b6d0165d1   Killian   Initial commit
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
  # -*- coding: utf-8 -*-
  import keras
  import numpy
  #from keras.layers.core import Dense, Dropout, Activation 
  from keras.optimizers import SGD,Adam
  from keras.models import Sequential
  from keras.layers import Input, Dense, Dropout
  from keras.models import Model
  
  import pandas 
  from collections import namedtuple
  from sklearn.metrics import accuracy_score as perf
  save_tuple= namedtuple("save_tuple",["pred_train","pred_dev","pred_test"])
  
  def train_sae(train,dev,test,hidden_sizes,dropouts=None,in_activation="tanh",out_activation="relu",loss="mse",sgd=None,epochs=500,batch_size=8,verbose=1,patience=20):
       
      
      if dropouts == None:
          dropouts = [ 0 ] * (len(hidden_sizes) +1)
  
      if sgd == None : 
          sgd = SGD(lr=0.01, decay=0, momentum=0.9)
      xt,xd,xte = train,dev,test
      preds = []
      for h_layer in hidden_sizes :
          input_vect = Input(shape=(xt.shape[1],))
          previous = input_vect
          if dropouts:
              d = dropouts.pop(0)
              if d :
                  previous = Dropout(d)(previous)
  
          h = Dense(h_layer,activation=in_activation)(previous)
          out = Dense(xt.shape[1],activation=in_activation)(h)
          model = Model(input_vect,out)
          model.compile(loss=loss,optimizer=sgd)
          model.fit(xt,xt,nb_epoch=epochs,batch_size=batch_size,callbacks=[keras.callbacks.EarlyStopping(monitor="val_loss",patience=patience,verbose=0)],validation_data=(xd,xd),verbose=0)
          xt,xd,xte=( model.predict(xt),model.predict(xd),model.predict(xte) )
          preds.append((xt,xd,xte))
      return preds