## import system modules # import os import sys ## import ML and datatype modules # import keras from keras.models import Sequential from keras.layers import Dense, Dropout from keras.callbacks import ModelCheckpoint from keras.optimizers import SGD from keras.utils import to_categorical from keras.models import model_from_json from sklearn import preprocessing import numpy as np np.set_printoptions(suppress=True) NO_OF_CLASSES = 2 ## This method performs decoding on the test data using trained model. ## input arguments are test features and labels in numpy array, model architecture/weights' ## abs location and output(hypfile) # def dcd_neural_network(feats, labs, mdl_arch, mdl_ws, hyp_file): ## open architecture file # mdl_fcont = open(mdl_arch, 'rb') mdl = mdl_fcont.read() mdl_fcont.close() ## load the network # loaded_mdl = model_from_json(mdl) loaded_mdl.load_weights(mdl_ws) ## predict the class on test set # score = loaded_mdl.predict_proba(feats, batch_size = 1) print score ## write hyp file # write_ofile(hyp_file, score) ## end of method # def write_ofile(ofile, pred): with open(ofile, 'wb') as fout: for ele in pred: if float(ele[0]) > 0.5: fout.write("1\n") else: fout.write("0\n") def extract_data(fp_a): dat = readflines(fp_a) ## initialize the feature and label list # feats = [] labs = [] ## collect all the features and labels # for line in dat: l_fields = line.split() ## convert strings to a int/float32 datatype # feats_m = map(float,l_fields[1:]) labs_m = map(int,l_fields[0]) feats.append(feats_m) labs.append(labs_m) feats = np.asarray(feats) labs = np.asarray(labs) ## return feat and labels as tuples # return (feats, labs) ## This method reads lines of a filelist and returns them as a list # def readflines(list_a): with open(list_a, 'r') as fl: return fl.read().splitlines() def create_dirtree(dirtree_a): if not os.path.exists(dirtree_a): os.makedirs(dirtree_a) def main(): labfeatslist = "../exam_dat/dev.txt" mdl_architecture = "../keras_output/mlp_net.json" mdl_weights = "../keras_output/dnn.nnet.h5" hyp_file = "../keras_hyp.txt" normalize_f = True feats, labs = extract_data(labfeatslist) if normalize_f: max_abs_scalar = preprocessing.MaxAbsScaler() max_abs_scalar.fit(feats) feats = max_abs_scalar.transform(feats) labs_cat = to_categorical(labs, num_classes = NO_OF_CLASSES) ## gather the data along with probs from rf and nb model # dcd_neural_network(feats, labs_cat, mdl_architecture, mdl_weights, hyp_file) if __name__ == "__main__": main()