DECODA_binary_BOW_AE_NO_HIDDEN_TRANS_MODELS.py 9.11 KB
# coding: utf-8

# In[2]:

# Import
import pandas
# Alignement
import nltk
import codecs
import gensim
from scipy import sparse
import itertools
from sklearn.feature_extraction.text import CountVectorizer
import scipy.sparse
import scipy.io
from sklearn import preprocessing
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation,AutoEncoder
from keras.optimizers import SGD,Adam
from keras.layers import containers
from mlp import *
import mlp
import sklearn.metrics
import shelve
import pickle
from utils import *
import sys
import json
# In[4]:

db=shelve.open("{}.shelve".format(sys.argv[2]),writeback=True)

sparse_model=shelve.open("{}.shelve".format(sys.argv[1]))
#['ASR', 'TRS', 'LABEL']
# In[6]:

ASR=sparse_model["ASR"]
TRS=sparse_model["TRS"]
LABEL=sparse_model["LABEL"]

db["ASR_SPARSE"]=ASR
db["TRS_SPARSE"]=TRS
db["LABEL"]=LABEL
print "todo label"
def select(elm):
    return int(elm.split("_")[-1])
#z.apply(select)
label_bin={}
lb = preprocessing.LabelBinarizer(neg_label=0)
lb.fit(LABEL["TRAIN"].apply(select))
for i in ASR.keys():
    label_bin=lb.transform(LABEL[i].apply(select))

hidden_size=795
hidden_size2=530
input_activation="tanh"
out_activation="tanh"
loss="mse"
epochs=100
batch=4
patience=10
sgd = Adam(lr=0.0001)#SGD(lr=0.00001,nesterov=False) #'rmsprop'# Adam(lr=0.00001)#SGD(lr=0.001, momentum=0.9, nesterov=True)
try :
    sgd_repr=sgd.get_config()
except AttributeError :
    sgd_repr=sgd
json.dump({ "h1" : hidden_size,
	"h2": hidden_size2,
	"inside_activation" : input_activation,
	"out_activation" : out_activation,
	"loss" : loss,
	"epochs" : epochs ,
	"batch_size" : batch,
	"patience" : patience,
        "sgd" : sgd_repr},
	open("{}.json".format(sys.argv[2]),"w"),
	indent=4)
print "gogo autoencoder ASR"
autoencode=Sequential()
autoencode.add(Dense(hidden_size,input_dim=ASR["TRAIN"].shape[1],init='glorot_uniform',activation=input_activation))
autoencode.add(Dense(hidden_size2,input_dim=hidden_size,init='glorot_uniform',activation=input_activation))
autoencode.add(Dense(hidden_size,input_dim=hidden_size2,init="glorot_uniform",activation=out_activation))
autoencode.add(Dense(ASR["DEV"].todense().shape[1],input_dim=hidden_size,init="glorot_uniform",activation=out_activation))

#autoencode.compile(optimizer=sgd,loss=loss)

autoencode.compile(optimizer=sgd,loss=loss)


# In[ ]:

autoencode.fit(ASR["TRAIN"].todense(),ASR["TRAIN"].todense(),
        nb_epoch=epochs,batch_size=batch,
        callbacks=
        [keras.callbacks.EarlyStopping(monitor='val_loss',patience=patience, verbose=0)],           validation_data=(ASR["DEV"].todense(),ASR["DEV"].todense()),verbose=1)


# In[ ]:

auto_decoder=Sequential()
auto_decoder.add(Dense(hidden_size,input_dim=ASR["DEV"].todense().shape[1],init='uniform',activation=input_activation,weights=autoencode.get_weights()[:2]))
auto_decoder.add(Dense(hidden_size2,input_dim=hidden_size,init='glorot_uniform',activation=input_activation,weights=autoencode.get_weights()[2:4]))
auto_decoder.add(Dense(hidden_size,input_dim=hidden_size2,init='glorot_uniform',activation=input_activation,weights=autoencode.get_weights()[4:6]))
auto_decoder.compile(optimizer=sgd,loss=loss)


# In[77]:

#autoencode.predict(ASR["DEV"].todense())


# In[ ]:

print "auto encoder et auto decoder asr okay"

ASR_AE_H2={}
for i in ASR.keys():
    ASR_AE_H2[i]=auto_decoder.predict(ASR[i].todense())
    #TRS[i]=dico.transform(TRS[i][2])

db["ASR_AE_H2"]=ASR_AE_H2


auto_decoder=Sequential()
auto_decoder.add(Dense(hidden_size,input_dim=ASR["DEV"].todense().shape[1],init='uniform',activation=input_activation,weights=autoencode.get_weights()[:2]))
auto_decoder.add(Dense(hidden_size2,input_dim=hidden_size,init='glorot_uniform',activation=input_activation,weights=autoencode.get_weights()[2:4]))
auto_decoder.compile(optimizer=sgd,loss=loss)

ASR_AE_H1={}
for i in ASR.keys():
    ASR_AE_H1[i]=auto_decoder.predict(ASR[i].todense())
    #TRS[i]=dico.transform(TRS[i][2])

db["ASR_AE_H1"]=ASR_AE_H1



print "auto encoder trs learning"
# In[68]:/
sgd_trs = SGD(lr=0.1,momentum=0.9)
autoencode_trs=Sequential()
autoencode_trs.add(Dense(hidden_size,input_dim=TRS["DEV"].todense().shape[1],init='glorot_uniform',activation=input_activation))
autoencode_trs.add(Dense(hidden_size2,input_dim=hidden_size,init='glorot_uniform',activation=input_activation))
autoencode_trs.add(Dense(hidden_size,input_dim=hidden_size2,init="glorot_uniform",activation=out_activation))
autoencode_trs.add(Dense(TRS["DEV"].todense().shape[1],input_dim=hidden_size,init="glorot_uniform",activation=out_activation))

#autoencode_trs.compile(optimizer=sgd_trs,loss=loss)

autoencode_trs.compile(optimizer=sgd,loss=loss)


# In[69]:

autoencode_trs.fit(TRS["TRAIN"].todense(),TRS["TRAIN"].todense(),nb_epoch=epochs,batch_size=batch,
               callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss',
                                                        patience=patience, verbose=0)],
               validation_data=(TRS["DEV"].todense(),TRS["DEV"].todense()),verbose=1)


# In[87]:

auto_decoder_trs=Sequential()
auto_decoder_trs.add(Dense(hidden_size,input_dim=ASR["DEV"].todense().shape[1],activation=input_activation,weights=autoencode_trs.get_weights()[:2]))
auto_decoder_trs.add(Dense(hidden_size2,input_dim=hidden_size,activation=input_activation,weights=autoencode_trs.get_weights()[2:4]))
auto_decoder_trs.add(Dense(hidden_size,input_dim=hidden_size2,activation=input_activation,weights=autoencode_trs.get_weights()[4:6]))
auto_decoder_trs.compile(optimizer=sgd,loss=loss)


# In[88]:
print "auto encoder trs okay"
TRS_AE_H2={}

for i in TRS.keys():
    TRS_AE_H2[i]=auto_decoder_trs.predict(TRS[i].todense())
    #TRS[i]=dico.transform(TRS[i][2])

db["TRS_AE_H2"]=TRS_AE_H2



auto_decoder_trs=Sequential()
auto_decoder_trs.add(Dense(hidden_size,input_dim=ASR["DEV"].todense().shape[1],activation=input_activation,weights=autoencode_trs.get_weights()[:2]))
auto_decoder_trs.add(Dense(hidden_size2,input_dim=hidden_size,activation=input_activation,weights=autoencode_trs.get_weights()[2:4]))
auto_decoder_trs.compile(optimizer=sgd,loss=loss)


# In[88]:
print "auto encoder trs okay"
TRS_AE_H1={}

for i in TRS.keys():
    TRS_AE_H1[i]=auto_decoder_trs.predict(TRS[i].todense())
    #TRS[i]=dico.transform(TRS[i][2])

db["TRS_AE_H1"]=TRS_AE_H1


db.sync()




# In[261]:

#pred_dev= model_TRS_AE.predict(TRS_AE["DEV"],batch_size=1)

TRS_AE={}
ASR_AE={}
for i in TRS.keys():
    TRS_AE[i]=autoencode_trs.predict(TRS[i].todense())
    ASR_AE[i]=autoencode.predict(ASR[i].todense())


db["TRS_AE_OUT"]=TRS_AE
db["ASR_AE_OUT"]=ASR_AE

db.sync()
# # Transfert de couche
# ICI
# In[138]:
print "learn transform ae H2({})".format(hidden_size)
model_TRANS = Sequential()
model_TRANS.add(Dense(hidden_size,input_dim=hidden_size, init='glorot_uniform', activation='relu'))

model_TRANS.compile(loss=loss, optimizer=sgd)


# In[146]:

model_TRANS.fit(ASR_AE_H2["TRAIN"],TRS_AE_H2["TRAIN"],nb_epoch=epochs,batch_size=batch,
               callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss',
                                                        patience=patience, verbose=0)],
               validation_data=(ASR_AE_H2["DEV"],TRS_AE_H2["DEV"]),verbose=1)


# In[140]:
print "make trans projection H2"
asr_transformer={}
for i in ASR_AE.keys():
    asr_transformer[i]=model_TRANS.predict(ASR_AE_H2[i])

db["ASR_H2_TRANFORMED_TRSH2"]=asr_transformer
# In[ ]:

db.sync()


model_TRANS = Sequential()
model_TRANS.add(Dense(ASR["TRAIN"].shape[1],input_dim=hidden_size,init="glorot_uniform",activation=out_activation,weights=autoencode_trs.get_weights()[-2:]))
model_TRANS.compile(loss=loss,optimizer=sgd)

print "make trans projection OUT "
trsh2_to_OUT={}
for i in ASR_AE.keys():
    trsh2_to_OUT[i]=model_TRANS.predict(asr_transformer[i])

db["ASR_H2_TRANFORMED_OUT"]=trsh2_to_OUT


print "learn transform ae H1({})".format(hidden_size2)
model_TRANS = Sequential()
model_TRANS.add(Dense(hidden_size2,input_dim=hidden_size2, init='glorot_uniform', activation=input_activation))
sgd_TRANS = SGD(lr=0.01, decay=1e-4, momentum=0.9, nesterov=True)
model_TRANS.compile(loss='mse', optimizer=sgd)


# In[146]:

model_TRANS.fit(ASR_AE_H1["TRAIN"],TRS_AE_H1["TRAIN"],nb_epoch=epochs,batch_size=batch,
               callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss',
                                                        patience=patience, verbose=0)],
               validation_data=(ASR_AE_H1["DEV"],TRS_AE_H1["DEV"]),verbose=1)



print "make trans projection H1"
asr_transformer_H1={}
for i in ASR_AE.keys():
    asr_transformer_H1[i]=model_TRANS.predict(ASR_AE_H1[i])

db["ASR_H1_TRANFORMED_TRSH2"]=asr_transformer_H1
# In[ ]:

model_TRANS_H1_OUT = Sequential()
model_TRANS_H1_OUT.add(Dense(hidden_size,input_dim=hidden_size2,init="glorot_uniform",activation=out_activation,weights=autoencode_trs.get_weights()[-4:-2]))
model_TRANS_H1_OUT.add(Dense(TRS["TRAIN"].shape[1],input_dim=hidden_size,init="glorot_uniform",activation=out_activation,weights=autoencode_trs.get_weights()[-2:]))
model_TRANS_H1_OUT.compile(loss=loss, optimizer=sgd)

asr_tranform_H1_out={}
for i in ASR_AE.keys():
    asr_tranform_H1_out[i]=model_TRANS_H1_OUT.predict(asr_transformer_H1[i])

db["ASR_H1_TRANFORMED_OUT"]=asr_tranform_H1_out
db.sync()
db.close()