DECODA_binary_BOW_MINIAE_MODELS.py 6.48 KB
# coding: utf-8

# In[2]:

# Import
import pandas
# Alignement
import nltk
import codecs
import gensim
from scipy import sparse
import itertools
from sklearn.feature_extraction.text import CountVectorizer
import scipy.sparse
import scipy.io
from sklearn import preprocessing
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation,AutoEncoder
from keras.optimizers import SGD,Adam
from keras.layers import containers
from mlp import *
import mlp
import sklearn.metrics
import shelve
import pickle
from utils import *
import sys
import json
# In[4]:

db=shelve.open("{}.shelve".format(sys.argv[2]),writeback=True)

sparse_model=shelve.open("{}.shelve".format(sys.argv[1]))
#['ASR', 'TRS', 'LABEL']
# In[6]:

ASR=sparse_model["ASR"]
TRS=sparse_model["TRS"]
LABEL=sparse_model["LABEL"]

db["ASR_SPARSE"]=ASR
db["TRS_SPARSE"]=TRS
db["LABEL"]=LABEL
print "todo label"
def select(elm):
    return int(elm.split("_")[-1])
#z.apply(select)
label_bin={}
lb = preprocessing.LabelBinarizer(neg_label=0)
lb.fit(LABEL["TRAIN"].apply(select))
for i in ASR.keys():
    label_bin=lb.transform(LABEL[i].apply(select))

hidden_size=50
input_activation="tanh"
out_activation="tanh"
loss="mse"
epochs=500
batch=1
patience=60
w1_size=3000
w2_size=500
do_do=False
sgd = Adam(lr=0.0001)#SGD(lr=0.00001,nesterov=False) #'rmsprop'# Adam(lr=0.00001)#SGD(lr=0.001, momentum=0.9, nesterov=True)
try :
    sgd_repr=sgd.get_config()
except AttributeError :
    sgd_repr=sgd
json.dump({ "h1" : hidden_size,
	"inside_activation" : input_activation,
	"out_activation" : out_activation,
        "do_dropout": do_do,
	"loss" : loss,
	"epochs" : epochs ,
	"batch_size" : batch,
	"patience" : patience,
        "sgd" : sgd_repr},
	open("{}.json".format(sys.argv[2]),"w"),
	indent=4)
print "gogo autoencoder ASR"
autoencode=Sequential()
autoencode.add(Dense(hidden_size,input_dim=ASR["TRAIN"].shape[1],init='glorot_uniform',activation=input_activation))
if do_do :
    autoencode.add(Dropout(0.5))
autoencode.add(Dense(ASR["DEV"].todense().shape[1],input_dim=hidden_size,init="glorot_uniform",activation=out_activation))

#autoencode.compile(optimizer=sgd,loss=loss)

autoencode.compile(optimizer=sgd,loss=loss)


# In[ ]:

autoencode.fit(ASR["TRAIN"].todense(),ASR["TRAIN"].todense(),nb_epoch=epochs,batch_size=batch,
               callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss',
                   patience=patience, verbose=0)],validation_data=(ASR["DEV"].todense(),ASR["DEV"].todense()),verbose=1)


# In[ ]:

auto_decoder=Sequential()
auto_decoder.add(Dense(hidden_size,input_dim=ASR["DEV"].todense().shape[1],init='uniform',activation=input_activation,weights=autoencode.get_weights()[:2]))
auto_decoder.compile(optimizer=sgd,loss=loss)


# In[77]:

#autoencode.predict(ASR["DEV"].todense())


# In[ ]:

print "auto encoder et auto decoder asr okay"

ASR_AE_H1={}
for i in ASR.keys():
    ASR_AE_H1[i]=auto_decoder.predict(ASR[i].todense())
    #TRS[i]=dico.transform(TRS[i][2])

db["ASR_AE_H1"]=ASR_AE_H1

print "auto encoder trs learning"
# In[68]:/
autoencode_trs=Sequential()
autoencode_trs.add(Dense(hidden_size,input_dim=TRS["DEV"].todense().shape[1],init='glorot_uniform',activation=input_activation))
if do_do:
    autoencode_trs.add(Dropout(0.5))
autoencode_trs.add(Dense(TRS["DEV"].todense().shape[1],input_dim=hidden_size,init="glorot_uniform",activation=out_activation))

#autoencode_trs.compile(optimizer=sgd_trs,loss=loss)

autoencode_trs.compile(optimizer=sgd,loss=loss)


# In[69]:

autoencode_trs.fit(TRS["TRAIN"].todense(),TRS["TRAIN"].todense(),nb_epoch=epochs,batch_size=batch,
               callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss',
                                                        patience=patience, verbose=0)],
               validation_data=(TRS["DEV"].todense(),TRS["DEV"].todense()),verbose=1)


# In[87]:



auto_decoder_trs=Sequential()
auto_decoder_trs.add(Dense(hidden_size,input_dim=ASR["DEV"].todense().shape[1],activation=input_activation,weights=autoencode_trs.get_weights()[:2]))
auto_decoder_trs.compile(optimizer=sgd,loss=loss)


# In[88]:
print "auto encoder trs okay"
TRS_AE_H1={}

for i in TRS.keys():
    TRS_AE_H1[i]=auto_decoder_trs.predict(TRS[i].todense())
    #TRS[i]=dico.transform(TRS[i][2])

db["TRS_AE_H1"]=TRS_AE_H1


db.sync()




# In[261]:

#pred_dev= model_TRS_AE.predict(TRS_AE["DEV"],batch_size=1)

TRS_AE={}
ASR_AE={}
for i in TRS.keys():
    TRS_AE[i]=autoencode_trs.predict(TRS[i].todense())
    ASR_AE[i]=autoencode.predict(ASR[i].todense())


db["TRS_AE_OUT"]=TRS_AE
db["ASR_AE_OUT"]=ASR_AE

db.sync()
# # Transfert de couche
# ICI
# In[138]:
print "learn transform ae H1({})".format(hidden_size)
model_TRANS = Sequential()
model_TRANS.add(Dense( w1_size,input_dim=hidden_size, init='glorot_uniform', activation=input_activation))
if do_do: 
    model_TRANS.add(Dropout(0.5))
model_TRANS.add(Dense( hidden_size,input_dim=w1_size, init='glorot_uniform', activation=input_activation))
sgd_TRANS = SGD(lr=0.01, decay=1e-4, momentum=0.9, nesterov=True)
#model_TRANS.compile(loss='mse', optimizer=sgd_TRANS)

model_TRANS.compile(loss='mse', optimizer=sgd)


# In[146]:

model_TRANS.fit(ASR_AE_H1["TRAIN"],TRS_AE_H1["TRAIN"],nb_epoch=epochs,batch_size=batch,
               callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss',
                                                        patience=patience, verbose=0)],
               validation_data=(ASR_AE_H1["DEV"],TRS_AE_H1["DEV"]),verbose=1)


# In[140]:
print "make trans projection H1"
asr_transformer={}
for i in ASR_AE.keys():
    asr_transformer[i]=model_TRANS.predict(ASR_AE_H1[i])

db["ASR_H1_TRANFORMED_TRSH1"]=asr_transformer
# In[ ]:

db.sync()


auto_decoder_trans=Sequential()
auto_decoder_trans.add(Dense(w1_size,input_dim=hidden_size,activation=input_activation,weights=model_TRANS.get_weights()[:2]))
auto_decoder_trans.compile(optimizer=sgd,loss=loss)

asr_trans_w1={}
for i in ASR_AE.keys():
    asr_trans_w1[i]=auto_decoder_trans.predict(ASR_AE_H1[i])
db["ASR_H1_TRANSFORMED_W1"]=asr_trans_w1
print "shape",ASR_AE["TRAIN"].shape[1]

model_TRANS_H2_OUT = Sequential()
model_TRANS_H2_OUT.add(Dense(TRS["DEV"].todense().shape[1],input_dim=hidden_size,init="glorot_uniform",activation=out_activation,weights=autoencode_trs.get_weights()[-2:]))
sgd_out = SGD(lr=0.01, decay=1e-4, momentum=0.9, nesterov=True)
model_TRANS_H2_OUT.compile(loss='mse', optimizer=sgd)

asr_tranform_out={}
for i in ASR_AE.keys():
    asr_tranform_out[i]=model_TRANS_H2_OUT.predict(asr_transformer[i])

db["ASR_H2_TRANFORMED_OUT"]=asr_tranform_out
db.sync()


db.sync()
db.close()