mirna_data_processing.R
raw_data.R
nano_count.R
nn_loop_miRNA.py
nn_miRNA.py
Data structures from the cluster are saved using cPickle, and loaded into this jupyter notebook for further visualization and analysis.
Files generated by R can be found under the tumor-origin/mirna
folder
import tensorflow as tf
import numpy
import pandas as pd
from tensorflow.keras import layers
from tensorflow.keras.utils import to_categorical
from numpy import random
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn import svm
from sklearn import metrics
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
import cPickle as pickle
data = pd.read_csv('mirna/raw.txt', sep='\t')
types = pd.read_csv('mirna/types-numeric.txt', sep='\t')
labels = pd.read_csv('mirna/types-labels.txt', sep='\t')
# train test split
random.seed(69)
ii = numpy.random.rand(len(data)) < 0.7
np_data = data.values
np_types = types.values
np_labels = labels.values
# scaling data
from sklearn import preprocessing
min_max_scaler = preprocessing.MinMaxScaler()
np_data_min_max = min_max_scaler.fit_transform(np_data)
train = np_data_min_max[ii]
test = np_data_min_max[~ii]
# train = np_data[ii]
# test = np_data[~ii]
pand_train = data[ii]
pand_test = data[~ii]
# types = numbers assigned (0-16)
train_types = np_types[ii]
test_types = np_types[~ii]
# labels = string values assigned (then one-hot encoded later)
train_labels = np_labels[ii]
test_labels = np_labels[~ii]
# ravel
r_train_types = train_types.ravel()
r_test_types = test_types.ravel()
r_train_labels = train_labels.ravel()
r_test_labels = test_labels.ravel()
# One hot encoding of string labels for keras model
encoded_train = to_categorical(r_train_types)
encoded_test = to_categorical(r_test_types)
# random forest baseline
rfmodel = RandomForestClassifier(n_estimators=100)
rf = rfmodel.fit(train, r_train_labels)
rf_pred = rf.predict(test)
# Model Accuracy, how often is the classifier correct
print("Accuracy: ", metrics.accuracy_score(r_test_labels, rf_pred))
rf_cm = confusion_matrix(r_test_labels, rf_pred,)
y_true = pd.Series(r_test_labels)
rf_pred = pd.Series(rf_pred)
pd.crosstab(y_true, rf_pred, rownames=['True'], colnames=['Predicted'], margins=True)
('Accuracy: ', 0.9537037037037037)
Predicted | blca | brca | chol | coad | esca | hnsc | kich | kirc | lich | luad | ov | paad | prad | skcm | stad | thca | ucec | All |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
True | ||||||||||||||||||
blca | 128 | 4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 132 |
brca | 1 | 372 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 373 |
chol | 0 | 2 | 10 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
coad | 0 | 2 | 0 | 152 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 154 |
esca | 24 | 3 | 0 | 5 | 33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 66 |
hnsc | 1 | 1 | 0 | 0 | 0 | 154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 157 |
kich | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 30 |
kirc | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 178 |
lich | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 118 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 120 |
luad | 1 | 7 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 157 | 1 | 0 | 0 | 0 | 1 | 0 | 5 | 173 |
ov | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 140 | 0 | 0 | 0 | 0 | 0 | 4 | 144 |
paad | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 2 | 0 | 39 | 0 | 0 | 6 | 1 | 0 | 50 |
prad | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 168 | 0 | 0 | 0 | 0 | 170 |
skcm | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 123 | 0 | 0 | 0 | 123 |
stad | 1 | 0 | 0 | 8 | 0 | 1 | 0 | 0 | 0 | 3 | 0 | 1 | 1 | 0 | 131 | 0 | 0 | 146 |
thca | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 167 | 0 | 169 |
ucec | 0 | 7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 171 | 179 |
All | 157 | 402 | 10 | 165 | 34 | 157 | 29 | 177 | 118 | 164 | 141 | 40 | 169 | 124 | 139 | 168 | 182 | 2376 |
cm_labels = ['brca', 'blca', 'chol', 'coad', 'esca', 'hnsc', 'kich', 'kirc', 'lich', 'luad', 'ov', 'paad', 'prad', 'skcm', 'stad', 'thca', 'ucec']
labels = r_test_labels
plt.matshow(rf_cm, cmap='binary')
plt.xticks(range(len(labels)), labels, rotation=45)
plt.yticks(range(len(labels)), labels)
labels = ['blca', 'brca', 'chol', 'coad', 'esca', 'hnsc', 'kich', 'kirc', 'lich', 'luad', 'ov', 'paad', 'prad', 'skcm', 'stad', 'thca', 'ucec']
label_values = True
# plt.figure(figsize=(8,8))
# plt.matshow(rf_cm)
matfig = plt.figure(figsize=(12,12))
plt.matshow(rf_cm, fignum=matfig.number)
plt.title('True label')
plt.colorbar()
plt.xticks(range(len(labels)), labels, rotation=90)
plt.yticks(range(len(labels)), labels)
plt.ylabel("Clustering label")
if label_values:
for (i, j), z in numpy.ndenumerate(rf_cm):
plt.text(j, i, z, ha='center', va='center',
bbox=dict(boxstyle='round', facecolor='white', edgecolor='0.3'))
nn_cm = [[139,0,1,0,0,0,0,0,0,0,0,0,0,0,4,0,0],
[0,123,0,0,1,7,1,0,0,0,0,0,0,0,0,0,0],
[0,4,369,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,2,0, 10,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,2,0,0,145,5,0,0,0,0,0,0,2,0,0,0,0],
[0 ,24,0,0,6, 33,1,0,0,0,0,0,2,0,0,0,0],
[0,3,0,0,0,5,144,0,0,0,0,0,3,1,1,0,0],
[0,0,0,0,0,0,0, 30,0,0,0,0,0,0,0,0,0],
[0,0,2,0,0,0,0,0,176,0,0,0,0,0,0,0,0],
[0,0,0,1,0,0,0,0,0,116,1,0,1,0,0,1,0],
[0,3,4,0,1,1,0,0,0,0,159,0,3,0,2,0,0],
[0,0,2,0,0,0,0,0,0,0,0,168,0,0,0,0,0],
[0,0,0,0,7,7,2,0,0,0,1,0,126,0,0,3,0],
[0,0,0,0,0,0,0,1,0,0,3,0,0,165,0,0,0],
[0,1,3,0,0,0,0,0,0,0,1,0,2,0,172,0,0],
[0,0,0,0,0,0,1,0,1,0,0,0,6,0,0, 42,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,122]]
print(nn_cm)
[[139, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0], [0, 123, 0, 0, 1, 7, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 4, 369, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2, 0, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2, 0, 0, 145, 5, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0], [0, 24, 0, 0, 6, 33, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0], [0, 3, 0, 0, 0, 5, 144, 0, 0, 0, 0, 0, 3, 1, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 2, 0, 0, 0, 0, 0, 176, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0, 0, 0, 0, 116, 1, 0, 1, 0, 0, 1, 0], [0, 3, 4, 0, 1, 1, 0, 0, 0, 0, 159, 0, 3, 0, 2, 0, 0], [0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 168, 0, 0, 0, 0, 0], [0, 0, 0, 0, 7, 7, 2, 0, 0, 0, 1, 0, 126, 0, 0, 3, 0], [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 3, 0, 0, 165, 0, 0, 0], [0, 1, 3, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 172, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 6, 0, 0, 42, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 122]]
labels = ['ov','blca', 'brca', 'chol', 'coad', 'esca', 'hnsc', 'kich', 'kirc', 'lich', 'luad', 'prad', 'stad', 'thca', 'ucec', 'paad','skcm']
label_values = True
# plt.figure(figsize=(8,8))
# plt.matshow(rf_cm)
matfig = plt.figure(figsize=(12,12))
plt.matshow(nn_cm, fignum=matfig.number)
plt.title('True label')
plt.colorbar()
plt.xticks(range(len(labels)), labels, rotation=90)
plt.yticks(range(len(labels)), labels)
plt.ylabel("Clustering label")
if label_values:
for (i, j), z in numpy.ndenumerate(nn_cm):
plt.text(j, i, z, ha='center', va='center',
bbox=dict(boxstyle='round', facecolor='white', edgecolor='0.3'))
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import ShuffleSplit
from sklearn.metrics import r2_score
from collections import defaultdict
X = train
Y = r_train_types
names = pand_train.columns.values
rf = RandomForestRegressor()
scores = defaultdict(list)
# start with 10 splits
rs = ShuffleSplit(n_splits=10, test_size=.30, random_state=69)
rs.get_n_splits(X)
print(rs)
#crossvalidate the scores on a number of different random splits of the data
for train_idx, test_idx in rs.split(X):
X_train, X_test = X[train_idx], X[test_idx]
Y_train, Y_test = Y[train_idx], Y[test_idx]
r = rf.fit(X_train, Y_train)
acc = r2_score(Y_test, rf.predict(X_test))
for i in range(X.shape[1]):
X_t = X_test.copy()
numpy.random.shuffle(X_t[:, i])
shuff_acc = r2_score(Y_test, rf.predict(X_t))
scores[names[i]].append((acc-shuff_acc)/acc)
print "Features sorted by their score:"
print sorted([(round(numpy.mean(score), 4), feat) for
feat, score in scores.items()], reverse=True)
ShuffleSplit(n_splits=10, random_state=69, test_size=0.3, train_size=None) Features sorted by their score: [(0.1688, 'hsa_miR_140_5p'), (0.0628, 'hsa_miR_944'), (0.0278, 'hsa_miR_532_5p'), (0.0247, 'hsa_miR_584_5p'), (0.0221, 'hsa_miR_146a_5p'), (0.0209, 'hsa_miR_362_3p'), (0.0199, 'hsa_miR_23b_3p'), (0.0178, 'hsa_miR_211_5p'), (0.0153, 'hsa_let_7i_5p'), (0.0149, 'hsa_miR_92b_3p'), (0.0127, 'hsa_miR_561_5p'), (0.0114, 'hsa_miR_577'), (0.0113, 'hsa_let_7g_5p'), (0.011, 'hsa_miR_206'), (0.0104, 'hsa_miR_141_5p'), (0.0103, 'hsa_miR_375'), (0.0091, 'hsa_miR_135b_5p'), (0.009, 'hsa_miR_28_5p'), (0.0081, 'hsa_miR_192_5p'), (0.008, 'hsa_miR_99b_5p'), (0.0075, 'hsa_miR_194_5p'), (0.0072, 'hsa_miR_205_5p'), (0.007, 'hsa_miR_200a_3p'), (0.0068, 'hsa_miR_29a_5p'), (0.0066, 'hsa_miR_181c_3p'), (0.0061, 'hsa_miR_135a_3p'), (0.0058, 'hsa_let_7e_3p'), (0.0051, 'hsa_miR_197_3p'), (0.005, 'hsa_miR_22_3p'), (0.005, 'hsa_miR_135a_5p'), (0.0047, 'hsa_miR_628_5p'), (0.0046, 'hsa_miR_361_5p'), (0.0043, 'hsa_miR_30e_5p'), (0.0042, 'hsa_miR_199b_5p'), (0.0037, 'hsa_miR_30d_5p'), (0.0037, 'hsa_miR_10b_5p'), (0.0034, 'hsa_miR_6510_3p'), (0.0033, 'hsa_miR_934'), (0.0033, 'hsa_miR_671_3p'), (0.0032, 'hsa_miR_27b_3p'), (0.0032, 'hsa_miR_196b_5p'), (0.0029, 'hsa_miR_5680'), (0.0029, 'hsa_miR_132_3p'), (0.0028, 'hsa_miR_374a_3p'), (0.0027, 'hsa_miR_194_3p'), (0.0024, 'hsa_miR_1301_3p'), (0.0023, 'hsa_miR_200c_3p'), (0.0022, 'hsa_miR_885_5p'), (0.0021, 'hsa_miR_505_3p'), (0.002, 'hsa_miR_122_5p'), (0.0018, 'hsa_miR_21_5p'), (0.0018, 'hsa_miR_192_3p'), (0.0018, 'hsa_miR_101_3p'), (0.0017, 'hsa_miR_30d_3p'), (0.0017, 'hsa_miR_29c_3p'), (0.0017, 'hsa_miR_29a_3p'), (0.0017, 'hsa_miR_28_3p'), (0.0017, 'hsa_miR_27b_5p'), (0.0017, 'hsa_miR_205_3p'), (0.0017, 'hsa_miR_1262'), (0.0016, 'hsa_miR_429'), (0.0016, 'hsa_miR_26b_5p'), (0.0016, 'hsa_miR_26a_5p'), (0.0016, 'hsa_miR_200b_3p'), (0.0016, 'hsa_miR_196a_5p'), (0.0016, 'hsa_miR_185_5p'), (0.0016, 'hsa_miR_126_5p'), (0.0015, 'hsa_miR_219a_2_3p'), (0.0015, 'hsa_miR_181a_2_3p'), (0.0015, 'hsa_let_7g_3p'), (0.0014, 'hsa_miR_5706'), (0.0014, 'hsa_miR_1224_5p'), (0.0014, 'hsa_miR_100_5p'), (0.0013, 'hsa_miR_4423_5p'), (0.0013, 'hsa_miR_30a_5p'), (0.0013, 'hsa_miR_215_5p'), (0.0013, 'hsa_miR_148b_3p'), (0.0013, 'hsa_miR_126_3p'), (0.0013, 'hsa_miR_10a_5p'), (0.0012, 'hsa_miR_509_3p'), (0.0012, 'hsa_miR_143_3p'), (0.0011, 'hsa_miR_708_5p'), (0.0011, 'hsa_miR_21_3p'), (0.0011, 'hsa_miR_218_1_3p'), (0.0011, 'hsa_miR_200a_5p'), (0.001, 'hsa_miR_676_3p'), (0.001, 'hsa_miR_190a_5p'), (0.001, 'hsa_miR_142_3p'), (0.001, 'hsa_miR_1293'), (0.0009, 'hsa_miR_508_3p'), (0.0009, 'hsa_miR_491_5p'), (0.0009, 'hsa_miR_4709_3p'), (0.0009, 'hsa_miR_4431'), (0.0009, 'hsa_miR_3912_3p'), (0.0009, 'hsa_miR_338_5p'), (0.0009, 'hsa_miR_326'), (0.0009, 'hsa_miR_204_5p'), (0.0009, 'hsa_miR_200c_5p'), (0.0009, 'hsa_miR_148a_3p'), (0.0009, 'hsa_miR_138_1_3p'), (0.0009, 'hsa_miR_1287_5p'), (0.0009, 'hsa_miR_1226_3p'), (0.0009, 'hsa_let_7f_1_3p'), (0.0008, 'hsa_miR_6815_3p'), (0.0008, 'hsa_miR_3909'), (0.0008, 'hsa_miR_34a_5p'), (0.0008, 'hsa_miR_30a_3p'), (0.0008, 'hsa_miR_125b_5p'), (0.0008, 'hsa_miR_10b_3p'), (0.0007, 'hsa_miR_6499_5p'), (0.0007, 'hsa_miR_582_5p'), (0.0007, 'hsa_miR_580_5p'), (0.0007, 'hsa_miR_3920'), (0.0007, 'hsa_miR_30c_2_3p'), (0.0007, 'hsa_miR_20b_5p'), (0.0007, 'hsa_miR_181a_5p'), (0.0007, 'hsa_miR_146b_5p'), (0.0007, 'hsa_miR_140_3p'), (0.0007, 'hsa_miR_1243'), (0.0007, 'hsa_let_7f_5p'), (0.0006, 'hsa_miR_627_5p'), (0.0006, 'hsa_miR_449a'), (0.0006, 'hsa_miR_34c_5p'), (0.0006, 'hsa_miR_328_3p'), (0.0006, 'hsa_miR_30e_3p'), (0.0006, 'hsa_miR_30b_3p'), (0.0006, 'hsa_miR_23b_5p'), (0.0006, 'hsa_miR_195_5p'), (0.0006, 'hsa_miR_145_3p'), (0.0006, 'hsa_miR_141_3p'), (0.0006, 'hsa_miR_1247_5p'), (0.0005, 'hsa_miR_95_3p'), (0.0005, 'hsa_miR_885_3p'), (0.0005, 'hsa_miR_708_3p'), (0.0005, 'hsa_miR_6125'), (0.0005, 'hsa_miR_582_3p'), (0.0005, 'hsa_miR_502_3p'), (0.0005, 'hsa_miR_4662a_5p'), (0.0005, 'hsa_miR_455_3p'), (0.0005, 'hsa_miR_449c_5p'), (0.0005, 'hsa_miR_4473'), (0.0005, 'hsa_miR_374a_5p'), (0.0005, 'hsa_miR_34c_3p'), (0.0005, 'hsa_miR_345_5p'), (0.0005, 'hsa_miR_335_5p'), (0.0005, 'hsa_miR_296_5p'), (0.0005, 'hsa_miR_217'), (0.0005, 'hsa_miR_16_2_3p'), (0.0005, 'hsa_miR_151a_3p'), (0.0005, 'hsa_miR_149_5p'), (0.0005, 'hsa_miR_128_3p'), (0.0005, 'hsa_miR_1'), (0.0005, 'hsa_let_7b_3p'), (0.0005, 'hsa_let_7a_5p'), (0.0004, 'hsa_miR_9_5p'), (0.0004, 'hsa_miR_99a_5p'), (0.0004, 'hsa_miR_98_5p'), (0.0004, 'hsa_miR_891a_5p'), (0.0004, 'hsa_miR_7_5p'), (0.0004, 'hsa_miR_7976'), (0.0004, 'hsa_miR_6806_3p'), (0.0004, 'hsa_miR_671_5p'), (0.0004, 'hsa_miR_653_5p'), (0.0004, 'hsa_miR_6514_5p'), (0.0004, 'hsa_miR_6513_3p'), (0.0004, 'hsa_miR_589_5p'), (0.0004, 'hsa_miR_548v'), (0.0004, 'hsa_miR_545_5p'), (0.0004, 'hsa_miR_542_3p'), (0.0004, 'hsa_miR_511_5p'), (0.0004, 'hsa_miR_499a_5p'), (0.0004, 'hsa_miR_4796_5p'), (0.0004, 'hsa_miR_4782_5p'), (0.0004, 'hsa_miR_4659a_5p'), (0.0004, 'hsa_miR_449b_5p'), (0.0004, 'hsa_miR_382_3p'), (0.0004, 'hsa_miR_370_5p'), (0.0004, 'hsa_miR_363_3p'), (0.0004, 'hsa_miR_32_5p'), (0.0004, 'hsa_miR_214_5p'), (0.0004, 'hsa_miR_2116_3p'), (0.0004, 'hsa_miR_210_3p'), (0.0004, 'hsa_miR_204_3p'), (0.0004, 'hsa_miR_202_3p'), (0.0004, 'hsa_miR_199a_5p'), (0.0004, 'hsa_miR_196b_3p'), (0.0004, 'hsa_miR_193b_3p'), (0.0004, 'hsa_miR_152_3p'), (0.0004, 'hsa_miR_148a_5p'), (0.0004, 'hsa_miR_145_5p'), (0.0004, 'hsa_miR_138_5p'), (0.0004, 'hsa_miR_132_5p'), (0.0004, 'hsa_miR_1292_5p'), (0.0004, 'hsa_miR_1179'), (0.0004, 'hsa_miR_103a_3p'), (0.0004, 'hsa_let_7a_3p'), (0.0003, 'hsa_miR_874_3p'), (0.0003, 'hsa_miR_744_5p'), (0.0003, 'hsa_miR_6855_5p'), (0.0003, 'hsa_miR_6815_5p'), (0.0003, 'hsa_miR_6772_3p'), (0.0003, 'hsa_miR_676_5p'), (0.0003, 'hsa_miR_675_3p'), (0.0003, 'hsa_miR_6726_3p'), (0.0003, 'hsa_miR_658'), (0.0003, 'hsa_miR_625_3p'), (0.0003, 'hsa_miR_599'), (0.0003, 'hsa_miR_552_5p'), (0.0003, 'hsa_miR_548q'), (0.0003, 'hsa_miR_548n'), (0.0003, 'hsa_miR_548e_5p'), (0.0003, 'hsa_miR_514a_5p'), (0.0003, 'hsa_miR_510_3p'), (0.0003, 'hsa_miR_5090'), (0.0003, 'hsa_miR_489_3p'), (0.0003, 'hsa_miR_483_5p'), (0.0003, 'hsa_miR_4791'), (0.0003, 'hsa_miR_4768_5p'), (0.0003, 'hsa_miR_4728_3p'), (0.0003, 'hsa_miR_455_5p'), (0.0003, 'hsa_miR_452_3p'), (0.0003, 'hsa_miR_423_5p'), (0.0003, 'hsa_miR_3922_3p'), (0.0003, 'hsa_miR_342_5p'), (0.0003, 'hsa_miR_320a'), (0.0003, 'hsa_miR_31_3p'), (0.0003, 'hsa_miR_3158_3p'), (0.0003, 'hsa_miR_3152_5p'), (0.0003, 'hsa_miR_30c_5p'), (0.0003, 'hsa_miR_224_5p'), (0.0003, 'hsa_miR_200b_5p'), (0.0003, 'hsa_miR_19b_3p'), (0.0003, 'hsa_miR_1915_3p'), (0.0003, 'hsa_miR_181b_3p'), (0.0003, 'hsa_miR_142_5p'), (0.0003, 'hsa_miR_135b_3p'), (0.0003, 'hsa_miR_133a_3p'), (0.0003, 'hsa_miR_1251_3p'), (0.0003, 'hsa_miR_1247_3p'), (0.0003, 'hsa_miR_1180_3p'), (0.0003, 'hsa_miR_10a_3p'), (0.0003, 'hsa_miR_101_5p'), (0.0003, 'hsa_miR_100_3p'), (0.0002, 'hsa_miR_93_5p'), (0.0002, 'hsa_miR_92a_1_5p'), (0.0002, 'hsa_miR_892a'), (0.0002, 'hsa_miR_891b'), (0.0002, 'hsa_miR_887_3p'), (0.0002, 'hsa_miR_7_2_3p'), (0.0002, 'hsa_miR_6892_5p'), (0.0002, 'hsa_miR_6885_5p'), (0.0002, 'hsa_miR_6818_5p'), (0.0002, 'hsa_miR_6809_3p'), (0.0002, 'hsa_miR_6792_3p'), (0.0002, 'hsa_miR_6761_3p'), (0.0002, 'hsa_miR_6747_3p'), (0.0002, 'hsa_miR_6734_5p'), (0.0002, 'hsa_miR_6733_3p'), (0.0002, 'hsa_miR_6720_3p'), (0.0002, 'hsa_miR_660_5p'), (0.0002, 'hsa_miR_653_3p'), (0.0002, 'hsa_miR_652_3p'), (0.0002, 'hsa_miR_6515_5p'), (0.0002, 'hsa_miR_6513_5p'), (0.0002, 'hsa_miR_6512_3p'), (0.0002, 'hsa_miR_6501_3p'), (0.0002, 'hsa_miR_629_5p'), (0.0002, 'hsa_miR_615_3p'), (0.0002, 'hsa_miR_605_5p'), (0.0002, 'hsa_miR_598_3p'), (0.0002, 'hsa_miR_590_3p'), (0.0002, 'hsa_miR_589_3p'), (0.0002, 'hsa_miR_584_3p'), (0.0002, 'hsa_miR_576_5p'), (0.0002, 'hsa_miR_5696'), (0.0002, 'hsa_miR_5579_3p'), (0.0002, 'hsa_miR_552_3p'), (0.0002, 'hsa_miR_548e_3p'), (0.0002, 'hsa_miR_532_3p'), (0.0002, 'hsa_miR_502_5p'), (0.0002, 'hsa_miR_487b_3p'), (0.0002, 'hsa_miR_484'), (0.0002, 'hsa_miR_4802_5p'), (0.0002, 'hsa_miR_4795_3p'), (0.0002, 'hsa_miR_4787_3p'), (0.0002, 'hsa_miR_4777_3p'), (0.0002, 'hsa_miR_4775'), (0.0002, 'hsa_miR_4746_5p'), (0.0002, 'hsa_miR_4745_3p'), (0.0002, 'hsa_miR_4690_3p'), (0.0002, 'hsa_miR_4676_3p'), (0.0002, 'hsa_miR_4658'), (0.0002, 'hsa_miR_4634'), (0.0002, 'hsa_miR_4632_3p'), (0.0002, 'hsa_miR_4529_3p'), (0.0002, 'hsa_miR_449b_3p'), (0.0002, 'hsa_miR_4469'), (0.0002, 'hsa_miR_4433b_3p'), (0.0002, 'hsa_miR_4423_3p'), (0.0002, 'hsa_miR_425_3p'), (0.0002, 'hsa_miR_410_3p'), (0.0002, 'hsa_miR_3913_5p'), (0.0002, 'hsa_miR_3913_3p'), (0.0002, 'hsa_miR_374b_5p'), (0.0002, 'hsa_miR_3652'), (0.0002, 'hsa_miR_3616_3p'), (0.0002, 'hsa_miR_3611'), (0.0002, 'hsa_miR_3610'), (0.0002, 'hsa_miR_34b_3p'), (0.0002, 'hsa_miR_34a_3p'), (0.0002, 'hsa_miR_339_5p'), (0.0002, 'hsa_miR_338_3p'), (0.0002, 'hsa_miR_335_3p'), (0.0002, 'hsa_miR_323a_3p'), (0.0002, 'hsa_miR_320c'), (0.0002, 'hsa_miR_3154'), (0.0002, 'hsa_miR_3136_5p'), (0.0002, 'hsa_miR_29b_2_5p'), (0.0002, 'hsa_miR_27a_3p'), (0.0002, 'hsa_miR_24_3p'), (0.0002, 'hsa_miR_24_1_5p'), (0.0002, 'hsa_miR_23a_3p'), (0.0002, 'hsa_miR_2355_5p'), (0.0002, 'hsa_miR_221_3p'), (0.0002, 'hsa_miR_218_5p'), (0.0002, 'hsa_miR_203b_3p'), (0.0002, 'hsa_miR_203a'), (0.0002, 'hsa_miR_199a_3p'), (0.0002, 'hsa_miR_193a_5p'), (0.0002, 'hsa_miR_181b_5p'), (0.0002, 'hsa_miR_181a_3p'), (0.0002, 'hsa_miR_16_5p'), (0.0002, 'hsa_miR_150_3p'), (0.0002, 'hsa_miR_146b_3p'), (0.0002, 'hsa_miR_139_5p'), (0.0002, 'hsa_miR_139_3p'), (0.0002, 'hsa_miR_136_5p'), (0.0002, 'hsa_miR_133b'), (0.0002, 'hsa_miR_130b_3p'), (0.0002, 'hsa_miR_130a_3p'), (0.0002, 'hsa_miR_1307_5p'), (0.0002, 'hsa_miR_1307_3p'), (0.0002, 'hsa_miR_129_2_3p'), (0.0002, 'hsa_miR_1266_5p'), (0.0002, 'hsa_miR_125b_1_3p'), (0.0002, 'hsa_miR_125a_5p'), (0.0002, 'hsa_miR_1251_5p'), (0.0002, 'hsa_miR_122_3p'), (0.0002, 'hsa_miR_106b_3p'), (0.0002, 'hsa_miR_106a_5p'), (0.0002, 'hsa_let_7i_3p'), (0.0001, 'hsa_miR_9_3p'), (0.0001, 'hsa_miR_99b_3p'), (0.0001, 'hsa_miR_99a_3p'), (0.0001, 'hsa_miR_98_3p'), (0.0001, 'hsa_miR_96_5p'), (0.0001, 'hsa_miR_942_5p'), (0.0001, 'hsa_miR_942_3p'), (0.0001, 'hsa_miR_940'), (0.0001, 'hsa_miR_93_3p'), (0.0001, 'hsa_miR_939_5p'), (0.0001, 'hsa_miR_935'), (0.0001, 'hsa_miR_892b'), (0.0001, 'hsa_miR_873_5p'), (0.0001, 'hsa_miR_802'), (0.0001, 'hsa_miR_7844_5p'), (0.0001, 'hsa_miR_770_5p'), (0.0001, 'hsa_miR_766_3p'), (0.0001, 'hsa_miR_7641'), (0.0001, 'hsa_miR_758_3p'), (0.0001, 'hsa_miR_744_3p'), (0.0001, 'hsa_miR_7112_3p'), (0.0001, 'hsa_miR_6882_5p'), (0.0001, 'hsa_miR_6875_5p'), (0.0001, 'hsa_miR_6874_3p'), (0.0001, 'hsa_miR_6858_5p'), (0.0001, 'hsa_miR_6853_3p'), (0.0001, 'hsa_miR_6844'), (0.0001, 'hsa_miR_6837_5p'), (0.0001, 'hsa_miR_6813_3p'), (0.0001, 'hsa_miR_6808_3p'), (0.0001, 'hsa_miR_6802_3p'), (0.0001, 'hsa_miR_6793_5p'), (0.0001, 'hsa_miR_6786_3p'), (0.0001, 'hsa_miR_6783_5p'), (0.0001, 'hsa_miR_6779_5p'), (0.0001, 'hsa_miR_6776_5p'), (0.0001, 'hsa_miR_6761_5p'), (0.0001, 'hsa_miR_6748_5p'), (0.0001, 'hsa_miR_6744_5p'), (0.0001, 'hsa_miR_6739_3p'), (0.0001, 'hsa_miR_6734_3p'), (0.0001, 'hsa_miR_6731_5p'), (0.0001, 'hsa_miR_6730_5p'), (0.0001, 'hsa_miR_6723_5p'), (0.0001, 'hsa_miR_6717_5p'), (0.0001, 'hsa_miR_664a_5p'), (0.0001, 'hsa_miR_654_3p'), (0.0001, 'hsa_miR_6507_5p'), (0.0001, 'hsa_miR_6503_5p'), (0.0001, 'hsa_miR_6499_3p'), (0.0001, 'hsa_miR_642a_3p'), (0.0001, 'hsa_miR_628_3p'), (0.0001, 'hsa_miR_625_5p'), (0.0001, 'hsa_miR_624_5p'), (0.0001, 'hsa_miR_616_5p'), (0.0001, 'hsa_miR_607'), (0.0001, 'hsa_miR_592'), (0.0001, 'hsa_miR_590_5p'), (0.0001, 'hsa_miR_581'), (0.0001, 'hsa_miR_579_3p'), (0.0001, 'hsa_miR_5705'), (0.0001, 'hsa_miR_5684'), (0.0001, 'hsa_miR_5682'), (0.0001, 'hsa_miR_5586_5p'), (0.0001, 'hsa_miR_5584_5p'), (0.0001, 'hsa_miR_556_5p'), (0.0001, 'hsa_miR_548b_3p'), (0.0001, 'hsa_miR_548ay_3p'), (0.0001, 'hsa_miR_519a_5p'), (0.0001, 'hsa_miR_518b'), (0.0001, 'hsa_miR_514a_3p'), (0.0001, 'hsa_miR_513c_5p'), (0.0001, 'hsa_miR_508_5p'), (0.0001, 'hsa_miR_506_3p'), (0.0001, 'hsa_miR_5006_3p'), (0.0001, 'hsa_miR_4999_5p'), (0.0001, 'hsa_miR_496'), (0.0001, 'hsa_miR_493_5p'), (0.0001, 'hsa_miR_493_3p'), (0.0001, 'hsa_miR_490_3p'), (0.0001, 'hsa_miR_486_5p'), (0.0001, 'hsa_miR_483_3p'), (0.0001, 'hsa_miR_4797_3p'), (0.0001, 'hsa_miR_4770'), (0.0001, 'hsa_miR_4757_5p'), (0.0001, 'hsa_miR_4753_5p'), (0.0001, 'hsa_miR_4748'), (0.0001, 'hsa_miR_4745_5p'), (0.0001, 'hsa_miR_4743_3p'), (0.0001, 'hsa_miR_4742_3p'), (0.0001, 'hsa_miR_4733_3p'), (0.0001, 'hsa_miR_4732_3p'), (0.0001, 'hsa_miR_4683'), (0.0001, 'hsa_miR_4657'), (0.0001, 'hsa_miR_4647'), (0.0001, 'hsa_miR_4646_5p'), (0.0001, 'hsa_miR_4638_5p'), (0.0001, 'hsa_miR_4536_3p'), (0.0001, 'hsa_miR_452_5p'), (0.0001, 'hsa_miR_4517'), (0.0001, 'hsa_miR_4515'), (0.0001, 'hsa_miR_450a_5p'), (0.0001, 'hsa_miR_450a_2_3p'), (0.0001, 'hsa_miR_4491'), (0.0001, 'hsa_miR_4484'), (0.0001, 'hsa_miR_4474_3p'), (0.0001, 'hsa_miR_4461'), (0.0001, 'hsa_miR_433_3p'), (0.0001, 'hsa_miR_432_5p'), (0.0001, 'hsa_miR_425_5p'), (0.0001, 'hsa_miR_424_5p'), (0.0001, 'hsa_miR_424_3p'), (0.0001, 'hsa_miR_423_3p'), (0.0001, 'hsa_miR_410_5p'), (0.0001, 'hsa_miR_3940_5p'), (0.0001, 'hsa_miR_3934_5p'), (0.0001, 'hsa_miR_3928_3p'), (0.0001, 'hsa_miR_3926'), (0.0001, 'hsa_miR_3917'), (0.0001, 'hsa_miR_383_5p'), (0.0001, 'hsa_miR_382_5p'), (0.0001, 'hsa_miR_378a_5p'), (0.0001, 'hsa_miR_378a_3p'), (0.0001, 'hsa_miR_376c_3p'), (0.0001, 'hsa_miR_374b_3p'), (0.0001, 'hsa_miR_370_3p'), (0.0001, 'hsa_miR_369_5p'), (0.0001, 'hsa_miR_3691_5p'), (0.0001, 'hsa_miR_3691_3p'), (0.0001, 'hsa_miR_3680_3p'), (0.0001, 'hsa_miR_3678_5p'), (0.0001, 'hsa_miR_3677_3p'), (0.0001, 'hsa_miR_3667_3p'), (0.0001, 'hsa_miR_3662'), (0.0001, 'hsa_miR_365a_5p'), (0.0001, 'hsa_miR_3620_5p'), (0.0001, 'hsa_miR_3620_3p'), (0.0001, 'hsa_miR_3617_3p'), (0.0001, 'hsa_miR_3615'), (0.0001, 'hsa_miR_3613_5p'), (0.0001, 'hsa_miR_3613_3p'), (0.0001, 'hsa_miR_3607_5p'), (0.0001, 'hsa_miR_3605_5p'), (0.0001, 'hsa_miR_3605_3p'), (0.0001, 'hsa_miR_3591_5p'), (0.0001, 'hsa_miR_34b_5p'), (0.0001, 'hsa_miR_342_3p'), (0.0001, 'hsa_miR_340_5p'), (0.0001, 'hsa_miR_340_3p'), (0.0001, 'hsa_miR_33a_5p'), (0.0001, 'hsa_miR_339_3p'), (0.0001, 'hsa_miR_331_5p'), (0.0001, 'hsa_miR_331_3p'), (0.0001, 'hsa_miR_320b'), (0.0001, 'hsa_miR_3200_3p'), (0.0001, 'hsa_miR_31_5p'), (0.0001, 'hsa_miR_3199'), (0.0001, 'hsa_miR_3193'), (0.0001, 'hsa_miR_3191_3p'), (0.0001, 'hsa_miR_3188'), (0.0001, 'hsa_miR_3164'), (0.0001, 'hsa_miR_3160_3p'), (0.0001, 'hsa_miR_3156_5p'), (0.0001, 'hsa_miR_3150a_5p'), (0.0001, 'hsa_miR_3140_3p'), (0.0001, 'hsa_miR_3133'), (0.0001, 'hsa_miR_3127_3p'), (0.0001, 'hsa_miR_3117_3p'), (0.0001, 'hsa_miR_3065_5p'), (0.0001, 'hsa_miR_3065_3p'), (0.0001, 'hsa_miR_3064_5p'), (0.0001, 'hsa_miR_299_5p'), (0.0001, 'hsa_miR_26b_3p'), (0.0001, 'hsa_miR_2355_3p'), (0.0001, 'hsa_miR_22_5p'), (0.0001, 'hsa_miR_2277_5p'), (0.0001, 'hsa_miR_224_3p'), (0.0001, 'hsa_miR_222_5p'), (0.0001, 'hsa_miR_222_3p'), (0.0001, 'hsa_miR_221_5p'), (0.0001, 'hsa_miR_216b_5p'), (0.0001, 'hsa_miR_215_3p'), (0.0001, 'hsa_miR_212_5p'), (0.0001, 'hsa_miR_211_3p'), (0.0001, 'hsa_miR_210_5p'), (0.0001, 'hsa_miR_20b_3p'), (0.0001, 'hsa_miR_202_5p'), (0.0001, 'hsa_miR_19a_3p'), (0.0001, 'hsa_miR_199b_3p'), (0.0001, 'hsa_miR_1976'), (0.0001, 'hsa_miR_196a_3p'), (0.0001, 'hsa_miR_18b_5p'), (0.0001, 'hsa_miR_18b_3p'), (0.0001, 'hsa_miR_18a_3p'), (0.0001, 'hsa_miR_188_3p'), (0.0001, 'hsa_miR_182_5p'), (0.0001, 'hsa_miR_181d_5p'), (0.0001, 'hsa_miR_181c_5p'), (0.0001, 'hsa_miR_17_5p'), (0.0001, 'hsa_miR_17_3p'), (0.0001, 'hsa_miR_15a_5p'), (0.0001, 'hsa_miR_155_5p'), (0.0001, 'hsa_miR_152_5p'), (0.0001, 'hsa_miR_151b'), (0.0001, 'hsa_miR_150_5p'), (0.0001, 'hsa_miR_149_3p'), (0.0001, 'hsa_miR_146a_3p'), (0.0001, 'hsa_miR_1468_5p'), (0.0001, 'hsa_miR_144_3p'), (0.0001, 'hsa_miR_143_5p'), (0.0001, 'hsa_miR_136_3p'), (0.0001, 'hsa_miR_133a_5p'), (0.0001, 'hsa_miR_130b_5p'), (0.0001, 'hsa_miR_130a_5p'), (0.0001, 'hsa_miR_1306_5p'), (0.0001, 'hsa_miR_129_5p'), (0.0001, 'hsa_miR_1298_3p'), (0.0001, 'hsa_miR_1294'), (0.0001, 'hsa_miR_128_1_5p'), (0.0001, 'hsa_miR_1287_3p'), (0.0001, 'hsa_miR_127_5p'), (0.0001, 'hsa_miR_1270'), (0.0001, 'hsa_miR_1269a'), (0.0001, 'hsa_miR_125b_2_3p'), (0.0001, 'hsa_miR_125a_3p'), (0.0001, 'hsa_miR_1248'), (0.0001, 'hsa_miR_1229_5p'), (0.0001, 'hsa_miR_107'), (0.0001, 'hsa_miR_106b_5p'), (0.0001, 'hsa_miR_105_5p'), (0.0001, 'hsa_let_7e_5p'), (0.0001, 'hsa_let_7d_5p'), (0.0001, 'hsa_let_7c_5p'), (0.0001, 'hsa_let_7c_3p'), (0.0001, 'hsa_let_7b_5p'), (0.0001, 'hsa_let_7a_2_3p'), (0.0, 'hsa_miR_96_3p'), (-0.0, 'hsa_miR_95_5p'), (0.0, 'hsa_miR_943'), (0.0, 'hsa_miR_939_3p'), (-0.0, 'hsa_miR_938'), (0.0, 'hsa_miR_937_5p'), (0.0, 'hsa_miR_937_3p'), (0.0, 'hsa_miR_936'), (-0.0, 'hsa_miR_933'), (0.0, 'hsa_miR_92b_5p'), (0.0, 'hsa_miR_92a_3p'), (0.0, 'hsa_miR_92a_2_5p'), (0.0, 'hsa_miR_922'), (0.0, 'hsa_miR_921'), (0.0, 'hsa_miR_920'), (0.0, 'hsa_miR_892c_5p'), (-0.0, 'hsa_miR_892c_3p'), (0.0, 'hsa_miR_891a_3p'), (0.0, 'hsa_miR_890'), (0.0, 'hsa_miR_889_5p'), (0.0, 'hsa_miR_889_3p'), (0.0, 'hsa_miR_888_5p'), (0.0, 'hsa_miR_888_3p'), (0.0, 'hsa_miR_887_5p'), (0.0, 'hsa_miR_877_5p'), (0.0, 'hsa_miR_877_3p'), (0.0, 'hsa_miR_876_5p'), (0.0, 'hsa_miR_876_3p'), (0.0, 'hsa_miR_875_5p'), (0.0, 'hsa_miR_875_3p'), (0.0, 'hsa_miR_874_5p'), (0.0, 'hsa_miR_873_3p'), (0.0, 'hsa_miR_8089'), (0.0, 'hsa_miR_8086'), (0.0, 'hsa_miR_8085'), (0.0, 'hsa_miR_8083'), (0.0, 'hsa_miR_8082'), (0.0, 'hsa_miR_8081'), (0.0, 'hsa_miR_8079'), (0.0, 'hsa_miR_8077'), (0.0, 'hsa_miR_8076'), (0.0, 'hsa_miR_8075'), (0.0, 'hsa_miR_8074'), (0.0, 'hsa_miR_8073'), (0.0, 'hsa_miR_8072'), (0.0, 'hsa_miR_8070'), (0.0, 'hsa_miR_8066'), (0.0, 'hsa_miR_8065'), (0.0, 'hsa_miR_8064'), (0.0, 'hsa_miR_8063'), (0.0, 'hsa_miR_8059'), (0.0, 'hsa_miR_8058'), (0.0, 'hsa_miR_8057'), (0.0, 'hsa_miR_8052'), (0.0, 'hsa_miR_7_1_3p'), (-0.0, 'hsa_miR_7978'), (0.0, 'hsa_miR_7977'), (0.0, 'hsa_miR_7975'), (-0.0, 'hsa_miR_7974'), (0.0, 'hsa_miR_7973'), (0.0, 'hsa_miR_7856_5p'), (0.0, 'hsa_miR_7855_5p'), (0.0, 'hsa_miR_7854_3p'), (0.0, 'hsa_miR_7853_5p'), (0.0, 'hsa_miR_7852_3p'), (0.0, 'hsa_miR_7851_3p'), (0.0, 'hsa_miR_7850_5p'), (0.0, 'hsa_miR_7849_3p'), (0.0, 'hsa_miR_7848_3p'), (0.0, 'hsa_miR_7847_3p'), (0.0, 'hsa_miR_7846_3p'), (0.0, 'hsa_miR_7845_5p'), (0.0, 'hsa_miR_7843_5p'), (0.0, 'hsa_miR_7843_3p'), (0.0, 'hsa_miR_7706'), (0.0, 'hsa_miR_7704'), (-0.0, 'hsa_miR_7703'), (0.0, 'hsa_miR_7702'), (0.0, 'hsa_miR_769_5p'), (0.0, 'hsa_miR_769_3p'), (-0.0, 'hsa_miR_767_5p'), (-0.0, 'hsa_miR_767_3p'), (0.0, 'hsa_miR_766_5p'), (0.0, 'hsa_miR_765'), (0.0, 'hsa_miR_764'), (0.0, 'hsa_miR_762'), (0.0, 'hsa_miR_761'), (0.0, 'hsa_miR_760'), (-0.0, 'hsa_miR_758_5p'), (0.0, 'hsa_miR_7515'), (0.0, 'hsa_miR_718'), (0.0, 'hsa_miR_7161_5p'), (0.0, 'hsa_miR_7161_3p'), (0.0, 'hsa_miR_7160_5p'), (0.0, 'hsa_miR_7160_3p'), (0.0, 'hsa_miR_7158_5p'), (0.0, 'hsa_miR_7158_3p'), (0.0, 'hsa_miR_7157_5p'), (0.0, 'hsa_miR_7157_3p'), (0.0, 'hsa_miR_7156_3p'), (0.0, 'hsa_miR_7155_5p'), (0.0, 'hsa_miR_7155_3p'), (0.0, 'hsa_miR_7154_3p'), (0.0, 'hsa_miR_7152_5p'), (0.0, 'hsa_miR_7152_3p'), (-0.0, 'hsa_miR_7151_5p'), (0.0, 'hsa_miR_7151_3p'), (0.0, 'hsa_miR_7150'), (0.0, 'hsa_miR_7114_5p'), (0.0, 'hsa_miR_7114_3p'), (-0.0, 'hsa_miR_7113_5p'), (0.0, 'hsa_miR_7113_3p'), (0.0, 'hsa_miR_7112_5p'), (0.0, 'hsa_miR_7111_5p'), (-0.0, 'hsa_miR_7111_3p'), (0.0, 'hsa_miR_7110_5p'), (0.0, 'hsa_miR_7110_3p'), (0.0, 'hsa_miR_711'), (0.0, 'hsa_miR_7109_5p'), (0.0, 'hsa_miR_7109_3p'), (0.0, 'hsa_miR_7108_5p'), (0.0, 'hsa_miR_7108_3p'), (0.0, 'hsa_miR_7107_5p'), (0.0, 'hsa_miR_7107_3p'), (-0.0, 'hsa_miR_7106_3p'), (0.0, 'hsa_miR_6895_5p'), (0.0, 'hsa_miR_6895_3p'), (-0.0, 'hsa_miR_6894_5p'), (0.0, 'hsa_miR_6894_3p'), (0.0, 'hsa_miR_6893_5p'), (0.0, 'hsa_miR_6893_3p'), (0.0, 'hsa_miR_6892_3p'), (0.0, 'hsa_miR_6891_3p'), (0.0, 'hsa_miR_6890_5p'), (0.0, 'hsa_miR_6890_3p'), (0.0, 'hsa_miR_6889_5p'), (0.0, 'hsa_miR_6889_3p'), (0.0, 'hsa_miR_6888_5p'), (0.0, 'hsa_miR_6888_3p'), (0.0, 'hsa_miR_6887_5p'), (-0.0, 'hsa_miR_6887_3p'), (0.0, 'hsa_miR_6886_5p'), (0.0, 'hsa_miR_6886_3p'), (-0.0, 'hsa_miR_6885_3p'), (0.0, 'hsa_miR_6884_5p'), (0.0, 'hsa_miR_6884_3p'), (0.0, 'hsa_miR_6883_5p'), (0.0, 'hsa_miR_6883_3p'), (0.0, 'hsa_miR_6882_3p'), (0.0, 'hsa_miR_6881_5p'), (-0.0, 'hsa_miR_6881_3p'), (0.0, 'hsa_miR_6880_5p'), (0.0, 'hsa_miR_6880_3p'), (0.0, 'hsa_miR_6879_5p'), (-0.0, 'hsa_miR_6879_3p'), (0.0, 'hsa_miR_6878_5p'), (0.0, 'hsa_miR_6878_3p'), (0.0, 'hsa_miR_6877_5p'), (0.0, 'hsa_miR_6877_3p'), (0.0, 'hsa_miR_6876_5p'), (0.0, 'hsa_miR_6876_3p'), (0.0, 'hsa_miR_6875_3p'), (0.0, 'hsa_miR_6874_5p'), (0.0, 'hsa_miR_6873_5p'), (0.0, 'hsa_miR_6873_3p'), (0.0, 'hsa_miR_6872_5p'), (0.0, 'hsa_miR_6872_3p'), (-0.0, 'hsa_miR_6871_5p'), (0.0, 'hsa_miR_6871_3p'), (0.0, 'hsa_miR_6870_5p'), (0.0, 'hsa_miR_6870_3p'), (0.0, 'hsa_miR_6869_5p'), (0.0, 'hsa_miR_6869_3p'), (0.0, 'hsa_miR_6868_5p'), (0.0, 'hsa_miR_6868_3p'), (0.0, 'hsa_miR_6867_5p'), (0.0, 'hsa_miR_6867_3p'), (0.0, 'hsa_miR_6866_5p'), (-0.0, 'hsa_miR_6866_3p'), (0.0, 'hsa_miR_6865_5p'), (0.0, 'hsa_miR_6865_3p'), (0.0, 'hsa_miR_6864_5p'), (-0.0, 'hsa_miR_6864_3p'), (-0.0, 'hsa_miR_6863'), (-0.0, 'hsa_miR_6862_5p'), (0.0, 'hsa_miR_6862_3p'), (0.0, 'hsa_miR_6861_5p'), (0.0, 'hsa_miR_6861_3p'), (-0.0, 'hsa_miR_6860'), (0.0, 'hsa_miR_6859_3p'), (0.0, 'hsa_miR_6858_3p'), (0.0, 'hsa_miR_6857_5p'), (0.0, 'hsa_miR_6857_3p'), (0.0, 'hsa_miR_6856_5p'), (-0.0, 'hsa_miR_6856_3p'), (0.0, 'hsa_miR_6855_3p'), (-0.0, 'hsa_miR_6854_5p'), (0.0, 'hsa_miR_6854_3p'), (0.0, 'hsa_miR_6853_5p'), (-0.0, 'hsa_miR_6852_5p'), (0.0, 'hsa_miR_6852_3p'), (0.0, 'hsa_miR_6851_5p'), (0.0, 'hsa_miR_6851_3p'), (0.0, 'hsa_miR_6850_5p'), (0.0, 'hsa_miR_6850_3p'), (0.0, 'hsa_miR_6849_5p'), (-0.0, 'hsa_miR_6849_3p'), (0.0, 'hsa_miR_6848_5p'), (0.0, 'hsa_miR_6848_3p'), (0.0, 'hsa_miR_6847_5p'), (0.0, 'hsa_miR_6847_3p'), (0.0, 'hsa_miR_6846_5p'), (0.0, 'hsa_miR_6846_3p'), (0.0, 'hsa_miR_6845_5p'), (0.0, 'hsa_miR_6845_3p'), (-0.0, 'hsa_miR_6843_3p'), (0.0, 'hsa_miR_6842_5p'), (-0.0, 'hsa_miR_6842_3p'), (0.0, 'hsa_miR_6841_5p'), (0.0, 'hsa_miR_6841_3p'), (-0.0, 'hsa_miR_6840_5p'), (0.0, 'hsa_miR_6840_3p'), (0.0, 'hsa_miR_6839_5p'), (0.0, 'hsa_miR_6839_3p'), (0.0, 'hsa_miR_6838_5p'), (0.0, 'hsa_miR_6838_3p'), (-0.0, 'hsa_miR_6837_3p'), (0.0, 'hsa_miR_6836_5p'), (0.0, 'hsa_miR_6836_3p'), (0.0, 'hsa_miR_6835_5p'), (0.0, 'hsa_miR_6835_3p'), (0.0, 'hsa_miR_6834_5p'), (0.0, 'hsa_miR_6834_3p'), (0.0, 'hsa_miR_6833_5p'), (0.0, 'hsa_miR_6833_3p'), (0.0, 'hsa_miR_6832_5p'), (0.0, 'hsa_miR_6832_3p'), (0.0, 'hsa_miR_6831_5p'), (0.0, 'hsa_miR_6831_3p'), (0.0, 'hsa_miR_6830_5p'), (0.0, 'hsa_miR_6830_3p'), (-0.0, 'hsa_miR_6829_5p'), (0.0, 'hsa_miR_6829_3p'), (0.0, 'hsa_miR_6828_5p'), (0.0, 'hsa_miR_6828_3p'), (-0.0, 'hsa_miR_6827_5p'), (0.0, 'hsa_miR_6827_3p'), (0.0, 'hsa_miR_6826_5p'), (0.0, 'hsa_miR_6826_3p'), (0.0, 'hsa_miR_6825_5p'), (0.0, 'hsa_miR_6825_3p'), (-0.0, 'hsa_miR_6824_5p'), (0.0, 'hsa_miR_6824_3p'), (-0.0, 'hsa_miR_6823_5p'), (0.0, 'hsa_miR_6823_3p'), (-0.0, 'hsa_miR_6822_5p'), (0.0, 'hsa_miR_6822_3p'), (0.0, 'hsa_miR_6821_5p'), (0.0, 'hsa_miR_6821_3p'), (0.0, 'hsa_miR_6820_5p'), (-0.0, 'hsa_miR_6820_3p'), (-0.0, 'hsa_miR_6819_5p'), (0.0, 'hsa_miR_6819_3p'), (-0.0, 'hsa_miR_6818_3p'), (0.0, 'hsa_miR_6817_5p'), (0.0, 'hsa_miR_6817_3p'), (0.0, 'hsa_miR_6816_5p'), (0.0, 'hsa_miR_6816_3p'), (-0.0, 'hsa_miR_6814_5p'), (0.0, 'hsa_miR_6814_3p'), (0.0, 'hsa_miR_6813_5p'), (-0.0, 'hsa_miR_6812_5p'), (0.0, 'hsa_miR_6812_3p'), (0.0, 'hsa_miR_6811_5p'), (-0.0, 'hsa_miR_6811_3p'), (-0.0, 'hsa_miR_6810_5p'), (0.0, 'hsa_miR_6810_3p'), (0.0, 'hsa_miR_6809_5p'), (0.0, 'hsa_miR_6808_5p'), (-0.0, 'hsa_miR_6807_5p'), (0.0, 'hsa_miR_6807_3p'), (0.0, 'hsa_miR_6806_5p'), (0.0, 'hsa_miR_6805_5p'), (0.0, 'hsa_miR_6805_3p'), (0.0, 'hsa_miR_6804_5p'), (-0.0, 'hsa_miR_6804_3p'), (-0.0, 'hsa_miR_6803_5p'), (0.0, 'hsa_miR_6803_3p'), (0.0, 'hsa_miR_6802_5p'), (0.0, 'hsa_miR_6801_5p'), (0.0, 'hsa_miR_6801_3p'), (0.0, 'hsa_miR_6800_5p'), (-0.0, 'hsa_miR_6800_3p'), (-0.0, 'hsa_miR_6799_5p'), (0.0, 'hsa_miR_6799_3p'), (0.0, 'hsa_miR_6798_5p'), (-0.0, 'hsa_miR_6798_3p'), (0.0, 'hsa_miR_6797_5p'), (0.0, 'hsa_miR_6797_3p'), (0.0, 'hsa_miR_6796_5p'), (0.0, 'hsa_miR_6796_3p'), (0.0, 'hsa_miR_6795_5p'), (0.0, 'hsa_miR_6795_3p'), (-0.0, 'hsa_miR_6794_5p'), (0.0, 'hsa_miR_6794_3p'), (0.0, 'hsa_miR_6793_3p'), (0.0, 'hsa_miR_6792_5p'), (0.0, 'hsa_miR_6791_5p'), (0.0, 'hsa_miR_6791_3p'), (0.0, 'hsa_miR_6790_5p'), (-0.0, 'hsa_miR_6790_3p'), (0.0, 'hsa_miR_6789_5p'), (0.0, 'hsa_miR_6789_3p'), (0.0, 'hsa_miR_6788_5p'), (0.0, 'hsa_miR_6788_3p'), (-0.0, 'hsa_miR_6787_5p'), (0.0, 'hsa_miR_6787_3p'), (0.0, 'hsa_miR_6786_5p'), (0.0, 'hsa_miR_6785_5p'), (-0.0, 'hsa_miR_6785_3p'), (0.0, 'hsa_miR_6784_5p'), (0.0, 'hsa_miR_6784_3p'), (0.0, 'hsa_miR_6783_3p'), (0.0, 'hsa_miR_6782_5p'), (0.0, 'hsa_miR_6782_3p'), (0.0, 'hsa_miR_6781_5p'), (-0.0, 'hsa_miR_6781_3p'), (0.0, 'hsa_miR_6780b_5p'), (0.0, 'hsa_miR_6780b_3p'), (0.0, 'hsa_miR_6780a_5p'), (0.0, 'hsa_miR_6780a_3p'), (0.0, 'hsa_miR_6779_3p'), (0.0, 'hsa_miR_6778_5p'), (-0.0, 'hsa_miR_6778_3p'), (0.0, 'hsa_miR_6777_5p'), (0.0, 'hsa_miR_6777_3p'), (0.0, 'hsa_miR_6776_3p'), (0.0, 'hsa_miR_6775_5p'), (0.0, 'hsa_miR_6775_3p'), (0.0, 'hsa_miR_6774_5p'), (-0.0, 'hsa_miR_6774_3p'), (0.0, 'hsa_miR_6773_5p'), (0.0, 'hsa_miR_6773_3p'), (0.0, 'hsa_miR_6772_5p'), (0.0, 'hsa_miR_6771_5p'), (0.0, 'hsa_miR_6771_3p'), (0.0, 'hsa_miR_6770_5p'), (0.0, 'hsa_miR_6770_3p'), (0.0, 'hsa_miR_6769b_5p'), (-0.0, 'hsa_miR_6769b_3p'), (0.0, 'hsa_miR_6769a_5p'), (-0.0, 'hsa_miR_6769a_3p'), (0.0, 'hsa_miR_6768_5p'), (0.0, 'hsa_miR_6768_3p'), (0.0, 'hsa_miR_6767_5p'), (0.0, 'hsa_miR_6767_3p'), (0.0, 'hsa_miR_6766_5p'), (0.0, 'hsa_miR_6766_3p'), (0.0, 'hsa_miR_6765_5p'), (0.0, 'hsa_miR_6765_3p'), (0.0, 'hsa_miR_6764_5p'), (0.0, 'hsa_miR_6764_3p'), (0.0, 'hsa_miR_6763_5p'), (0.0, 'hsa_miR_6763_3p'), (0.0, 'hsa_miR_6762_5p'), (-0.0, 'hsa_miR_6762_3p'), (0.0, 'hsa_miR_6760_5p'), (0.0, 'hsa_miR_6760_3p'), (-0.0, 'hsa_miR_675_5p'), (0.0, 'hsa_miR_6759_5p'), (0.0, 'hsa_miR_6759_3p'), (0.0, 'hsa_miR_6758_5p'), (0.0, 'hsa_miR_6758_3p'), (-0.0, 'hsa_miR_6757_5p'), (0.0, 'hsa_miR_6757_3p'), (0.0, 'hsa_miR_6756_5p'), (0.0, 'hsa_miR_6756_3p'), (-0.0, 'hsa_miR_6755_5p'), (0.0, 'hsa_miR_6755_3p'), (0.0, 'hsa_miR_6754_5p'), (-0.0, 'hsa_miR_6754_3p'), (0.0, 'hsa_miR_6753_5p'), (-0.0, 'hsa_miR_6753_3p'), (0.0, 'hsa_miR_6752_5p'), (0.0, 'hsa_miR_6752_3p'), (0.0, 'hsa_miR_6751_5p'), (-0.0, 'hsa_miR_6751_3p'), (0.0, 'hsa_miR_6750_5p'), (0.0, 'hsa_miR_6750_3p'), (0.0, 'hsa_miR_6749_5p'), (0.0, 'hsa_miR_6749_3p'), (-0.0, 'hsa_miR_6748_3p'), (0.0, 'hsa_miR_6747_5p'), (0.0, 'hsa_miR_6746_5p'), (-0.0, 'hsa_miR_6746_3p'), (0.0, 'hsa_miR_6745'), (0.0, 'hsa_miR_6744_3p'), (0.0, 'hsa_miR_6743_5p'), (0.0, 'hsa_miR_6743_3p'), (0.0, 'hsa_miR_6742_5p'), (-0.0, 'hsa_miR_6742_3p'), (0.0, 'hsa_miR_6741_5p'), (0.0, 'hsa_miR_6741_3p'), (0.0, 'hsa_miR_6740_5p'), (0.0, 'hsa_miR_6740_3p'), (0.0, 'hsa_miR_6739_5p'), (0.0, 'hsa_miR_6738_5p'), (0.0, 'hsa_miR_6738_3p'), (0.0, 'hsa_miR_6737_5p'), (0.0, 'hsa_miR_6737_3p'), (0.0, 'hsa_miR_6736_5p'), (0.0, 'hsa_miR_6736_3p'), (0.0, 'hsa_miR_6735_5p'), (0.0, 'hsa_miR_6735_3p'), (0.0, 'hsa_miR_6733_5p'), (0.0, 'hsa_miR_6732_5p'), (0.0, 'hsa_miR_6732_3p'), (0.0, 'hsa_miR_6731_3p'), (0.0, 'hsa_miR_6730_3p'), (0.0, 'hsa_miR_6729_5p'), (0.0, 'hsa_miR_6729_3p'), (0.0, 'hsa_miR_6728_5p'), (-0.0, 'hsa_miR_6728_3p'), (-0.0, 'hsa_miR_6727_5p'), (0.0, 'hsa_miR_6727_3p'), (-0.0, 'hsa_miR_6726_5p'), (0.0, 'hsa_miR_6722_5p'), (0.0, 'hsa_miR_6722_3p'), (-0.0, 'hsa_miR_6721_5p'), (0.0, 'hsa_miR_6720_5p'), (0.0, 'hsa_miR_6719_3p'), (-0.0, 'hsa_miR_6718_5p'), (0.0, 'hsa_miR_6716_5p'), (0.0, 'hsa_miR_6716_3p'), (0.0, 'hsa_miR_6715b_5p'), (0.0, 'hsa_miR_6715b_3p'), (-0.0, 'hsa_miR_6715a_3p'), (0.0, 'hsa_miR_670_5p'), (-0.0, 'hsa_miR_670_3p'), (0.0, 'hsa_miR_668_5p'), (0.0, 'hsa_miR_668_3p'), (-0.0, 'hsa_miR_665'), (0.0, 'hsa_miR_664b_5p'), (0.0, 'hsa_miR_664b_3p'), (0.0, 'hsa_miR_664a_3p'), (0.0, 'hsa_miR_663a'), (0.0, 'hsa_miR_662'), (0.0, 'hsa_miR_661'), (0.0, 'hsa_miR_660_3p'), (0.0, 'hsa_miR_659_5p'), (0.0, 'hsa_miR_659_3p'), (0.0, 'hsa_miR_657'), (-0.0, 'hsa_miR_656_5p'), (-0.0, 'hsa_miR_656_3p'), (0.0, 'hsa_miR_655_5p'), (0.0, 'hsa_miR_655_3p'), (0.0, 'hsa_miR_654_5p'), (-0.0, 'hsa_miR_652_5p'), (0.0, 'hsa_miR_651_5p'), (0.0, 'hsa_miR_651_3p'), (0.0, 'hsa_miR_6516_5p'), (-0.0, 'hsa_miR_6516_3p'), (0.0, 'hsa_miR_6515_3p'), (0.0, 'hsa_miR_6514_3p'), (0.0, 'hsa_miR_6512_5p'), (-0.0, 'hsa_miR_6511b_3p'), (0.0, 'hsa_miR_6510_5p'), (-0.0, 'hsa_miR_6509_5p'), (0.0, 'hsa_miR_6509_3p'), (-0.0, 'hsa_miR_6508_5p'), (0.0, 'hsa_miR_6508_3p'), (-0.0, 'hsa_miR_6507_3p'), (0.0, 'hsa_miR_6506_5p'), (0.0, 'hsa_miR_6506_3p'), (-0.0, 'hsa_miR_6505_5p'), (0.0, 'hsa_miR_6505_3p'), (0.0, 'hsa_miR_6504_5p'), (0.0, 'hsa_miR_6504_3p'), (0.0, 'hsa_miR_6503_3p'), (0.0, 'hsa_miR_6502_5p'), (0.0, 'hsa_miR_6502_3p'), (-0.0, 'hsa_miR_6501_5p'), (0.0, 'hsa_miR_6500_5p'), (0.0, 'hsa_miR_6500_3p'), (0.0, 'hsa_miR_650'), (0.0, 'hsa_miR_647'), (0.0, 'hsa_miR_646'), (0.0, 'hsa_miR_645'), (-0.0, 'hsa_miR_643'), (0.0, 'hsa_miR_642b_5p'), (0.0, 'hsa_miR_642b_3p'), (-0.0, 'hsa_miR_642a_5p'), (0.0, 'hsa_miR_641'), (0.0, 'hsa_miR_640'), (0.0, 'hsa_miR_639'), (0.0, 'hsa_miR_638'), (0.0, 'hsa_miR_637'), (-0.0, 'hsa_miR_636'), (0.0, 'hsa_miR_635'), (0.0, 'hsa_miR_634'), (0.0, 'hsa_miR_632'), (0.0, 'hsa_miR_631'), (-0.0, 'hsa_miR_630'), (0.0, 'hsa_miR_629_3p'), (0.0, 'hsa_miR_626'), (0.0, 'hsa_miR_624_3p'), (0.0, 'hsa_miR_623'), (0.0, 'hsa_miR_622'), (0.0, 'hsa_miR_621'), (0.0, 'hsa_miR_619_5p'), (0.0, 'hsa_miR_619_3p'), (0.0, 'hsa_miR_618'), (0.0, 'hsa_miR_617'), (0.0, 'hsa_miR_616_3p'), (0.0, 'hsa_miR_615_5p'), (0.0, 'hsa_miR_614'), (0.0, 'hsa_miR_6134'), (0.0, 'hsa_miR_6133'), (0.0, 'hsa_miR_6132'), (0.0, 'hsa_miR_6131'), (0.0, 'hsa_miR_6130'), (0.0, 'hsa_miR_613'), (0.0, 'hsa_miR_6129'), (0.0, 'hsa_miR_6128'), (0.0, 'hsa_miR_6126'), (0.0, 'hsa_miR_6124'), (0.0, 'hsa_miR_612'), (0.0, 'hsa_miR_611'), (-0.0, 'hsa_miR_610'), (0.0, 'hsa_miR_6090'), (0.0, 'hsa_miR_609'), (0.0, 'hsa_miR_6089'), (0.0, 'hsa_miR_6088'), (-0.0, 'hsa_miR_6087'), (0.0, 'hsa_miR_6086'), (0.0, 'hsa_miR_6084'), (0.0, 'hsa_miR_6081'), (0.0, 'hsa_miR_6080'), (0.0, 'hsa_miR_608'), (0.0, 'hsa_miR_6079'), (0.0, 'hsa_miR_6077'), (0.0, 'hsa_miR_6076'), (0.0, 'hsa_miR_6075'), (0.0, 'hsa_miR_6073'), (0.0, 'hsa_miR_6071'), (0.0, 'hsa_miR_6070'), (0.0, 'hsa_miR_6068'), (0.0, 'hsa_miR_606'), (0.0, 'hsa_miR_605_3p'), (0.0, 'hsa_miR_604'), (0.0, 'hsa_miR_603'), (0.0, 'hsa_miR_602'), (0.0, 'hsa_miR_601'), (0.0, 'hsa_miR_600'), (0.0, 'hsa_miR_598_5p'), (-0.0, 'hsa_miR_597_5p'), (0.0, 'hsa_miR_597_3p'), (0.0, 'hsa_miR_596'), (0.0, 'hsa_miR_595'), (0.0, 'hsa_miR_593_5p'), (0.0, 'hsa_miR_591'), (0.0, 'hsa_miR_588'), (0.0, 'hsa_miR_587'), (0.0, 'hsa_miR_586'), (0.0, 'hsa_miR_585_5p'), (0.0, 'hsa_miR_585_3p'), (-0.0, 'hsa_miR_580_3p'), (-0.0, 'hsa_miR_579_5p'), (0.0, 'hsa_miR_5787'), (0.0, 'hsa_miR_578'), (0.0, 'hsa_miR_576_3p'), (0.0, 'hsa_miR_575'), (0.0, 'hsa_miR_574_5p'), (0.0, 'hsa_miR_574_3p'), (0.0, 'hsa_miR_573'), (0.0, 'hsa_miR_572'), (0.0, 'hsa_miR_571'), (-0.0, 'hsa_miR_570_5p'), (-0.0, 'hsa_miR_570_3p'), (0.0, 'hsa_miR_5708'), (0.0, 'hsa_miR_5707'), (-0.0, 'hsa_miR_5704'), (0.0, 'hsa_miR_5703'), (0.0, 'hsa_miR_5702'), (0.0, 'hsa_miR_5700'), (-0.0, 'hsa_miR_5699_3p'), (0.0, 'hsa_miR_5698'), (-0.0, 'hsa_miR_5697'), (0.0, 'hsa_miR_5695'), (-0.0, 'hsa_miR_5694'), (0.0, 'hsa_miR_5693'), (0.0, 'hsa_miR_5692c'), (0.0, 'hsa_miR_5692a'), (0.0, 'hsa_miR_5691'), (-0.0, 'hsa_miR_5690'), (0.0, 'hsa_miR_5689'), (0.0, 'hsa_miR_5688'), (0.0, 'hsa_miR_5687'), (0.0, 'hsa_miR_5685'), (0.0, 'hsa_miR_5683'), (0.0, 'hsa_miR_5681b'), (0.0, 'hsa_miR_5681a'), (0.0, 'hsa_miR_568'), (0.0, 'hsa_miR_567'), (0.0, 'hsa_miR_564'), (0.0, 'hsa_miR_561_3p'), (0.0, 'hsa_miR_5591_5p'), (0.0, 'hsa_miR_5591_3p'), (0.0, 'hsa_miR_5590_5p'), (0.0, 'hsa_miR_5590_3p'), (0.0, 'hsa_miR_559'), (-0.0, 'hsa_miR_5589_5p'), (0.0, 'hsa_miR_5589_3p'), (0.0, 'hsa_miR_5588_5p'), (-0.0, 'hsa_miR_5588_3p'), (0.0, 'hsa_miR_5587_5p'), (0.0, 'hsa_miR_5587_3p'), (-0.0, 'hsa_miR_5586_3p'), (0.0, 'hsa_miR_5585_5p'), (0.0, 'hsa_miR_5585_3p'), (0.0, 'hsa_miR_5584_3p'), (0.0, 'hsa_miR_5583_5p'), (0.0, 'hsa_miR_5583_3p'), (0.0, 'hsa_miR_5582_5p'), (-0.0, 'hsa_miR_5582_3p'), (-0.0, 'hsa_miR_5581_5p'), (0.0, 'hsa_miR_5581_3p'), (-0.0, 'hsa_miR_5580_5p'), (-0.0, 'hsa_miR_5580_3p'), (0.0, 'hsa_miR_558'), (0.0, 'hsa_miR_5579_5p'), (-0.0, 'hsa_miR_5572'), (-0.0, 'hsa_miR_5571_5p'), (0.0, 'hsa_miR_5571_3p'), (0.0, 'hsa_miR_556_3p'), (0.0, 'hsa_miR_555'), (0.0, 'hsa_miR_554'), (0.0, 'hsa_miR_551b_5p'), (0.0, 'hsa_miR_551b_3p'), (-0.0, 'hsa_miR_551a'), (0.0, 'hsa_miR_550b_3p'), (-0.0, 'hsa_miR_550b_2_5p'), (0.0, 'hsa_miR_550a_5p'), (-0.0, 'hsa_miR_550a_3p'), (0.0, 'hsa_miR_550a_3_5p'), (0.0, 'hsa_miR_549a'), (0.0, 'hsa_miR_548y'), (0.0, 'hsa_miR_548x_5p'), (-0.0, 'hsa_miR_548x_3p'), (-0.0, 'hsa_miR_548w'), (-0.0, 'hsa_miR_548u'), (0.0, 'hsa_miR_548t_5p'), (0.0, 'hsa_miR_548s'), (0.0, 'hsa_miR_548o_3p'), (0.0, 'hsa_miR_548m'), (0.0, 'hsa_miR_548l'), (-0.0, 'hsa_miR_548k'), (-0.0, 'hsa_miR_548j_5p'), (0.0, 'hsa_miR_548j_3p'), (0.0, 'hsa_miR_548h_5p'), (0.0, 'hsa_miR_548g_3p'), (0.0, 'hsa_miR_548f_5p'), (0.0, 'hsa_miR_548f_3p'), (0.0, 'hsa_miR_548d_5p'), (-0.0, 'hsa_miR_548d_3p'), (0.0, 'hsa_miR_548c_5p'), (0.0, 'hsa_miR_548c_3p'), (0.0, 'hsa_miR_548ba'), (0.0, 'hsa_miR_548b_5p'), (0.0, 'hsa_miR_548az_5p'), (-0.0, 'hsa_miR_548ax'), (-0.0, 'hsa_miR_548aw'), (-0.0, 'hsa_miR_548av_5p'), (0.0, 'hsa_miR_548au_5p'), (-0.0, 'hsa_miR_548au_3p'), (0.0, 'hsa_miR_548at_5p'), (0.0, 'hsa_miR_548at_3p'), (0.0, 'hsa_miR_548as_5p'), (0.0, 'hsa_miR_548as_3p'), (0.0, 'hsa_miR_548ar_5p'), (0.0, 'hsa_miR_548ar_3p'), (0.0, 'hsa_miR_548aq_5p'), (0.0, 'hsa_miR_548aq_3p'), (0.0, 'hsa_miR_548ap_5p'), (0.0, 'hsa_miR_548ao_5p'), (-0.0, 'hsa_miR_548ao_3p'), (0.0, 'hsa_miR_548an'), (0.0, 'hsa_miR_548am_3p'), (0.0, 'hsa_miR_548al'), (0.0, 'hsa_miR_548ak'), (0.0, 'hsa_miR_548aj_3p'), (0.0, 'hsa_miR_548ai'), (0.0, 'hsa_miR_548ah_5p'), (0.0, 'hsa_miR_548ah_3p'), (0.0, 'hsa_miR_548ag'), (0.0, 'hsa_miR_548ac'), (-0.0, 'hsa_miR_548ab'), (-0.0, 'hsa_miR_548a_5p'), (0.0, 'hsa_miR_548a_3p'), (0.0, 'hsa_miR_545_3p'), (0.0, 'hsa_miR_544b'), (0.0, 'hsa_miR_544a'), (-0.0, 'hsa_miR_542_5p'), (-0.0, 'hsa_miR_541_5p'), (0.0, 'hsa_miR_541_3p'), (-0.0, 'hsa_miR_539_5p'), (-0.0, 'hsa_miR_539_3p'), (0.0, 'hsa_miR_527'), (0.0, 'hsa_miR_526b_5p'), (0.0, 'hsa_miR_526b_3p'), (0.0, 'hsa_miR_526a'), (-0.0, 'hsa_miR_525_5p'), (0.0, 'hsa_miR_525_3p'), (-0.0, 'hsa_miR_524_5p'), (0.0, 'hsa_miR_524_3p'), (0.0, 'hsa_miR_523_5p'), (0.0, 'hsa_miR_523_3p'), (-0.0, 'hsa_miR_521'), (0.0, 'hsa_miR_520h'), (0.0, 'hsa_miR_520g_5p'), (0.0, 'hsa_miR_520g_3p'), (0.0, 'hsa_miR_520f_5p'), (0.0, 'hsa_miR_520f_3p'), (0.0, 'hsa_miR_520e'), (0.0, 'hsa_miR_520d_5p'), (0.0, 'hsa_miR_520d_3p'), (0.0, 'hsa_miR_520c_5p'), (0.0, 'hsa_miR_520c_3p'), (-0.0, 'hsa_miR_520b'), (0.0, 'hsa_miR_520a_5p'), (0.0, 'hsa_miR_520a_3p'), (0.0, 'hsa_miR_519e_5p'), (0.0, 'hsa_miR_519e_3p'), (0.0, 'hsa_miR_519d_5p'), (0.0, 'hsa_miR_519d_3p'), (0.0, 'hsa_miR_519c_3p'), (0.0, 'hsa_miR_519b_5p'), (0.0, 'hsa_miR_519b_3p'), (-0.0, 'hsa_miR_519a_3p'), (0.0, 'hsa_miR_5197_5p'), (0.0, 'hsa_miR_5196_5p'), (0.0, 'hsa_miR_5196_3p'), (0.0, 'hsa_miR_5195_5p'), (0.0, 'hsa_miR_5195_3p'), (0.0, 'hsa_miR_5194'), (-0.0, 'hsa_miR_5193'), (0.0, 'hsa_miR_5192'), (0.0, 'hsa_miR_5191'), (0.0, 'hsa_miR_5190'), (0.0, 'hsa_miR_518f_5p'), (0.0, 'hsa_miR_518f_3p'), (0.0, 'hsa_miR_518e_5p'), (-0.0, 'hsa_miR_518e_3p'), (0.0, 'hsa_miR_518d_3p'), (-0.0, 'hsa_miR_518c_5p'), (0.0, 'hsa_miR_518c_3p'), (0.0, 'hsa_miR_518a_5p'), (0.0, 'hsa_miR_518a_3p'), (0.0, 'hsa_miR_5189_5p'), (0.0, 'hsa_miR_5189_3p'), (0.0, 'hsa_miR_5188'), (-0.0, 'hsa_miR_5187_5p'), (0.0, 'hsa_miR_5187_3p'), (0.0, 'hsa_miR_5186'), (0.0, 'hsa_miR_517c_3p'), (-0.0, 'hsa_miR_517b_3p'), (-0.0, 'hsa_miR_517a_3p'), (-0.0, 'hsa_miR_517_5p'), (0.0, 'hsa_miR_516b_5p'), (-0.0, 'hsa_miR_516a_5p'), (0.0, 'hsa_miR_515_5p'), (0.0, 'hsa_miR_515_3p'), (0.0, 'hsa_miR_514b_5p'), (0.0, 'hsa_miR_514b_3p'), (-0.0, 'hsa_miR_513c_3p'), (0.0, 'hsa_miR_513b_5p'), (0.0, 'hsa_miR_513b_3p'), (-0.0, 'hsa_miR_513a_5p'), (0.0, 'hsa_miR_513a_3p'), (0.0, 'hsa_miR_512_5p'), (0.0, 'hsa_miR_512_3p'), (-0.0, 'hsa_miR_511_3p'), (0.0, 'hsa_miR_510_5p'), (0.0, 'hsa_miR_5100'), (0.0, 'hsa_miR_509_5p'), (0.0, 'hsa_miR_509_3_5p'), (-0.0, 'hsa_miR_5094'), (0.0, 'hsa_miR_5093'), (0.0, 'hsa_miR_5092'), (0.0, 'hsa_miR_5091'), (0.0, 'hsa_miR_5089_5p'), (0.0, 'hsa_miR_5089_3p'), (0.0, 'hsa_miR_5088_5p'), (0.0, 'hsa_miR_5088_3p'), (0.0, 'hsa_miR_5087'), (0.0, 'hsa_miR_507'), (0.0, 'hsa_miR_506_5p'), (0.0, 'hsa_miR_505_5p'), (0.0, 'hsa_miR_504_5p'), (0.0, 'hsa_miR_504_3p'), (0.0, 'hsa_miR_5047'), (-0.0, 'hsa_miR_503_5p'), (0.0, 'hsa_miR_503_3p'), (0.0, 'hsa_miR_501_5p'), (0.0, 'hsa_miR_501_3p'), (0.0, 'hsa_miR_5011_5p'), (0.0, 'hsa_miR_5011_3p'), (0.0, 'hsa_miR_5010_5p'), (-0.0, 'hsa_miR_5010_3p'), (0.0, 'hsa_miR_500b_5p'), (0.0, 'hsa_miR_500b_3p'), (0.0, 'hsa_miR_500a_5p'), (-0.0, 'hsa_miR_500a_3p'), (-0.0, 'hsa_miR_5009_5p'), (0.0, 'hsa_miR_5009_3p'), (-0.0, 'hsa_miR_5008_5p'), (-0.0, 'hsa_miR_5008_3p'), (0.0, 'hsa_miR_5007_5p'), (0.0, 'hsa_miR_5007_3p'), (0.0, 'hsa_miR_5006_5p'), (0.0, 'hsa_miR_5004_5p'), (0.0, 'hsa_miR_5004_3p'), (0.0, 'hsa_miR_5003_5p'), (0.0, 'hsa_miR_5003_3p'), (0.0, 'hsa_miR_5002_5p'), (-0.0, 'hsa_miR_5002_3p'), (0.0, 'hsa_miR_5001_5p'), (0.0, 'hsa_miR_5001_3p'), (0.0, 'hsa_miR_5000_5p'), (-0.0, 'hsa_miR_5000_3p'), (0.0, 'hsa_miR_499b_5p'), (0.0, 'hsa_miR_499b_3p'), (0.0, 'hsa_miR_499a_3p'), (0.0, 'hsa_miR_4999_3p'), (-0.0, 'hsa_miR_498'), (-0.0, 'hsa_miR_497_5p'), (-0.0, 'hsa_miR_497_3p'), (-0.0, 'hsa_miR_495_5p'), (0.0, 'hsa_miR_494_5p'), (0.0, 'hsa_miR_494_3p'), (0.0, 'hsa_miR_492'), (0.0, 'hsa_miR_491_3p'), (-0.0, 'hsa_miR_490_5p'), (0.0, 'hsa_miR_489_5p'), (-0.0, 'hsa_miR_488_5p'), (-0.0, 'hsa_miR_488_3p'), (0.0, 'hsa_miR_487b_5p'), (0.0, 'hsa_miR_487a_5p'), (0.0, 'hsa_miR_487a_3p'), (-0.0, 'hsa_miR_486_3p'), (0.0, 'hsa_miR_485_5p'), (-0.0, 'hsa_miR_485_3p'), (0.0, 'hsa_miR_4804_5p'), (0.0, 'hsa_miR_4804_3p'), (0.0, 'hsa_miR_4803'), (0.0, 'hsa_miR_4802_3p'), (0.0, 'hsa_miR_4800_5p'), (-0.0, 'hsa_miR_4800_3p'), (-0.0, 'hsa_miR_4799_5p'), (0.0, 'hsa_miR_4799_3p'), (0.0, 'hsa_miR_4798_5p'), (0.0, 'hsa_miR_4798_3p'), (0.0, 'hsa_miR_4797_5p'), (0.0, 'hsa_miR_4796_3p'), (0.0, 'hsa_miR_4795_5p'), (0.0, 'hsa_miR_4794'), (0.0, 'hsa_miR_4793_5p'), (-0.0, 'hsa_miR_4793_3p'), (0.0, 'hsa_miR_4792'), (0.0, 'hsa_miR_4790_5p'), (0.0, 'hsa_miR_4790_3p'), (0.0, 'hsa_miR_4789_5p'), (0.0, 'hsa_miR_4789_3p'), (0.0, 'hsa_miR_4788'), (0.0, 'hsa_miR_4787_5p'), (0.0, 'hsa_miR_4786_5p'), (0.0, 'hsa_miR_4786_3p'), (0.0, 'hsa_miR_4785'), (-0.0, 'hsa_miR_4784'), (0.0, 'hsa_miR_4783_5p'), (0.0, 'hsa_miR_4783_3p'), (0.0, 'hsa_miR_4782_3p'), (0.0, 'hsa_miR_4781_5p'), (0.0, 'hsa_miR_4781_3p'), (0.0, 'hsa_miR_4780'), (0.0, 'hsa_miR_4779'), (-0.0, 'hsa_miR_4778_5p'), (0.0, 'hsa_miR_4778_3p'), (-0.0, 'hsa_miR_4777_5p'), (0.0, 'hsa_miR_4776_5p'), (0.0, 'hsa_miR_4776_3p'), (0.0, 'hsa_miR_4774_5p'), (0.0, 'hsa_miR_4774_3p'), (0.0, 'hsa_miR_4773'), (0.0, 'hsa_miR_4772_5p'), (0.0, 'hsa_miR_4772_3p'), (0.0, 'hsa_miR_4771'), (0.0, 'hsa_miR_4769_5p'), (-0.0, 'hsa_miR_4769_3p'), (0.0, 'hsa_miR_4768_3p'), (0.0, 'hsa_miR_4767'), (0.0, 'hsa_miR_4766_5p'), (0.0, 'hsa_miR_4766_3p'), (0.0, 'hsa_miR_4765'), (0.0, 'hsa_miR_4764_5p'), (-0.0, 'hsa_miR_4764_3p'), (0.0, 'hsa_miR_4763_5p'), (0.0, 'hsa_miR_4763_3p'), (0.0, 'hsa_miR_4762_5p'), (0.0, 'hsa_miR_4762_3p'), (0.0, 'hsa_miR_4761_5p'), (0.0, 'hsa_miR_4761_3p'), (0.0, 'hsa_miR_4760_5p'), (0.0, 'hsa_miR_4760_3p'), (0.0, 'hsa_miR_4759'), (-0.0, 'hsa_miR_4758_5p'), (0.0, 'hsa_miR_4758_3p'), (0.0, 'hsa_miR_4757_3p'), (0.0, 'hsa_miR_4756_5p'), (0.0, 'hsa_miR_4756_3p'), (-0.0, 'hsa_miR_4755_5p'), (0.0, 'hsa_miR_4755_3p'), (-0.0, 'hsa_miR_4754'), (-0.0, 'hsa_miR_4753_3p'), (-0.0, 'hsa_miR_4752'), (-0.0, 'hsa_miR_4751'), (-0.0, 'hsa_miR_4750_5p'), (0.0, 'hsa_miR_4750_3p'), (0.0, 'hsa_miR_4749_5p'), (-0.0, 'hsa_miR_4749_3p'), (0.0, 'hsa_miR_4747_5p'), (0.0, 'hsa_miR_4747_3p'), (0.0, 'hsa_miR_4746_3p'), (0.0, 'hsa_miR_4744'), (-0.0, 'hsa_miR_4743_5p'), (0.0, 'hsa_miR_4742_5p'), (-0.0, 'hsa_miR_4741'), (0.0, 'hsa_miR_4740_5p'), (0.0, 'hsa_miR_4740_3p'), (0.0, 'hsa_miR_4739'), (-0.0, 'hsa_miR_4738_5p'), (-0.0, 'hsa_miR_4738_3p'), (0.0, 'hsa_miR_4737'), (-0.0, 'hsa_miR_4736'), (-0.0, 'hsa_miR_4735_5p'), (0.0, 'hsa_miR_4735_3p'), (0.0, 'hsa_miR_4734'), (-0.0, 'hsa_miR_4733_5p'), (0.0, 'hsa_miR_4732_5p'), (-0.0, 'hsa_miR_4731_5p'), (0.0, 'hsa_miR_4731_3p'), (0.0, 'hsa_miR_4730'), (0.0, 'hsa_miR_4729'), (0.0, 'hsa_miR_4728_5p'), (0.0, 'hsa_miR_4727_5p'), (-0.0, 'hsa_miR_4727_3p'), (-0.0, 'hsa_miR_4726_5p'), (0.0, 'hsa_miR_4726_3p'), (0.0, 'hsa_miR_4725_5p'), (0.0, 'hsa_miR_4725_3p'), (0.0, 'hsa_miR_4724_5p'), (0.0, 'hsa_miR_4724_3p'), (0.0, 'hsa_miR_4723_5p'), (0.0, 'hsa_miR_4723_3p'), (-0.0, 'hsa_miR_4722_5p'), (0.0, 'hsa_miR_4722_3p'), (0.0, 'hsa_miR_4721'), (0.0, 'hsa_miR_4720_5p'), (0.0, 'hsa_miR_4720_3p'), (0.0, 'hsa_miR_4719'), (0.0, 'hsa_miR_4718'), (0.0, 'hsa_miR_4717_5p'), (0.0, 'hsa_miR_4717_3p'), (0.0, 'hsa_miR_4716_5p'), (0.0, 'hsa_miR_4716_3p'), (-0.0, 'hsa_miR_4715_5p'), (0.0, 'hsa_miR_4715_3p'), (-0.0, 'hsa_miR_4714_5p'), (0.0, 'hsa_miR_4714_3p'), (0.0, 'hsa_miR_4713_5p'), (0.0, 'hsa_miR_4713_3p'), (0.0, 'hsa_miR_4712_5p'), (0.0, 'hsa_miR_4712_3p'), (0.0, 'hsa_miR_4711_5p'), (0.0, 'hsa_miR_4711_3p'), (0.0, 'hsa_miR_4710'), (-0.0, 'hsa_miR_4709_5p'), (0.0, 'hsa_miR_4708_5p'), (0.0, 'hsa_miR_4708_3p'), (0.0, 'hsa_miR_4707_5p'), (0.0, 'hsa_miR_4707_3p'), (0.0, 'hsa_miR_4706'), (0.0, 'hsa_miR_4705'), (0.0, 'hsa_miR_4704_5p'), (0.0, 'hsa_miR_4704_3p'), (0.0, 'hsa_miR_4703_5p'), (0.0, 'hsa_miR_4703_3p'), (-0.0, 'hsa_miR_4701_5p'), (0.0, 'hsa_miR_4701_3p'), (0.0, 'hsa_miR_4700_5p'), (-0.0, 'hsa_miR_4700_3p'), (0.0, 'hsa_miR_4699_5p'), (-0.0, 'hsa_miR_4699_3p'), (0.0, 'hsa_miR_4698'), (0.0, 'hsa_miR_4697_5p'), (0.0, 'hsa_miR_4697_3p'), (0.0, 'hsa_miR_4696'), (0.0, 'hsa_miR_4695_5p'), (-0.0, 'hsa_miR_4695_3p'), (0.0, 'hsa_miR_4694_5p'), (0.0, 'hsa_miR_4694_3p'), (0.0, 'hsa_miR_4693_5p'), (0.0, 'hsa_miR_4693_3p'), (0.0, 'hsa_miR_4692'), (0.0, 'hsa_miR_4691_5p'), (-0.0, 'hsa_miR_4691_3p'), (0.0, 'hsa_miR_4690_5p'), (-0.0, 'hsa_miR_4689'), (0.0, 'hsa_miR_4688'), (0.0, 'hsa_miR_4687_5p'), (0.0, 'hsa_miR_4686'), (0.0, 'hsa_miR_4685_5p'), (-0.0, 'hsa_miR_4685_3p'), (-0.0, 'hsa_miR_4684_5p'), (0.0, 'hsa_miR_4684_3p'), (0.0, 'hsa_miR_4682'), (0.0, 'hsa_miR_4681'), (0.0, 'hsa_miR_4680_5p'), (-0.0, 'hsa_miR_4680_3p'), (0.0, 'hsa_miR_4679'), (0.0, 'hsa_miR_4678'), (-0.0, 'hsa_miR_4677_5p'), (0.0, 'hsa_miR_4676_5p'), (0.0, 'hsa_miR_4675'), (-0.0, 'hsa_miR_4674'), (-0.0, 'hsa_miR_4673'), (0.0, 'hsa_miR_4672'), (0.0, 'hsa_miR_4671_5p'), (-0.0, 'hsa_miR_4671_3p'), (0.0, 'hsa_miR_4670_5p'), (0.0, 'hsa_miR_4670_3p'), (0.0, 'hsa_miR_4669'), (0.0, 'hsa_miR_4668_5p'), (0.0, 'hsa_miR_4668_3p'), (0.0, 'hsa_miR_4667_5p'), (0.0, 'hsa_miR_4667_3p'), (0.0, 'hsa_miR_4666b'), (-0.0, 'hsa_miR_4666a_5p'), (0.0, 'hsa_miR_4666a_3p'), (-0.0, 'hsa_miR_4665_5p'), (0.0, 'hsa_miR_4665_3p'), (0.0, 'hsa_miR_4664_5p'), (-0.0, 'hsa_miR_4664_3p'), (0.0, 'hsa_miR_4663'), (0.0, 'hsa_miR_4662b'), (0.0, 'hsa_miR_4662a_3p'), (0.0, 'hsa_miR_4661_5p'), (0.0, 'hsa_miR_4661_3p'), (-0.0, 'hsa_miR_4660'), (0.0, 'hsa_miR_466'), (0.0, 'hsa_miR_4659b_5p'), (0.0, 'hsa_miR_4659b_3p'), (0.0, 'hsa_miR_4659a_3p'), (0.0, 'hsa_miR_4656'), (0.0, 'hsa_miR_4655_5p'), (0.0, 'hsa_miR_4655_3p'), (0.0, 'hsa_miR_4654'), (0.0, 'hsa_miR_4653_5p'), (0.0, 'hsa_miR_4653_3p'), (-0.0, 'hsa_miR_4652_5p'), (-0.0, 'hsa_miR_4652_3p'), (-0.0, 'hsa_miR_4651'), (0.0, 'hsa_miR_4650_5p'), (0.0, 'hsa_miR_4650_3p'), (0.0, 'hsa_miR_4649_5p'), (0.0, 'hsa_miR_4649_3p'), (-0.0, 'hsa_miR_4648'), (-0.0, 'hsa_miR_4646_3p'), (0.0, 'hsa_miR_4645_5p'), (0.0, 'hsa_miR_4645_3p'), (0.0, 'hsa_miR_4644'), (0.0, 'hsa_miR_4643'), (0.0, 'hsa_miR_4642'), (0.0, 'hsa_miR_4641'), (-0.0, 'hsa_miR_4640_5p'), (0.0, 'hsa_miR_4640_3p'), (0.0, 'hsa_miR_4639_5p'), (0.0, 'hsa_miR_4639_3p'), (0.0, 'hsa_miR_4638_3p'), (0.0, 'hsa_miR_4637'), (0.0, 'hsa_miR_4636'), (0.0, 'hsa_miR_4635'), (0.0, 'hsa_miR_4633_5p'), (0.0, 'hsa_miR_4633_3p'), (0.0, 'hsa_miR_4632_5p'), (0.0, 'hsa_miR_454_5p'), (0.0, 'hsa_miR_454_3p'), (0.0, 'hsa_miR_4540'), (0.0, 'hsa_miR_4539'), (0.0, 'hsa_miR_4538'), (0.0, 'hsa_miR_4537'), (0.0, 'hsa_miR_4536_5p'), (0.0, 'hsa_miR_4535'), (0.0, 'hsa_miR_4534'), (-0.0, 'hsa_miR_4533'), (-0.0, 'hsa_miR_4532'), (0.0, 'hsa_miR_4531'), (-0.0, 'hsa_miR_4530'), (0.0, 'hsa_miR_4529_5p'), (0.0, 'hsa_miR_4528'), (0.0, 'hsa_miR_4527'), (0.0, 'hsa_miR_4526'), (0.0, 'hsa_miR_4525'), (0.0, 'hsa_miR_4524b_5p'), (0.0, 'hsa_miR_4524b_3p'), (0.0, 'hsa_miR_4524a_5p'), (0.0, 'hsa_miR_4524a_3p'), (-0.0, 'hsa_miR_4523'), (0.0, 'hsa_miR_4522'), (0.0, 'hsa_miR_4521'), (0.0, 'hsa_miR_4520b_3p'), (0.0, 'hsa_miR_4520a_5p'), (0.0, 'hsa_miR_4520a_3p'), (0.0, 'hsa_miR_451b'), (-0.0, 'hsa_miR_451a'), (0.0, 'hsa_miR_4519'), (0.0, 'hsa_miR_4518'), (0.0, 'hsa_miR_4516'), (0.0, 'hsa_miR_4514'), (0.0, 'hsa_miR_4513'), (0.0, 'hsa_miR_4512'), (0.0, 'hsa_miR_4511'), (0.0, 'hsa_miR_4510'), (0.0, 'hsa_miR_450b_5p'), (0.0, 'hsa_miR_450b_3p'), (0.0, 'hsa_miR_450a_1_3p'), (0.0, 'hsa_miR_4508'), (0.0, 'hsa_miR_4507'), (0.0, 'hsa_miR_4506'), (-0.0, 'hsa_miR_4505'), (0.0, 'hsa_miR_4504'), (0.0, 'hsa_miR_4503'), (0.0, 'hsa_miR_4502'), (-0.0, 'hsa_miR_4501'), (0.0, 'hsa_miR_4500'), (0.0, 'hsa_miR_449c_3p'), (0.0, 'hsa_miR_4499'), (0.0, 'hsa_miR_4498'), (0.0, 'hsa_miR_4497'), (0.0, 'hsa_miR_4496'), (0.0, 'hsa_miR_4495'), (0.0, 'hsa_miR_4494'), (0.0, 'hsa_miR_4493'), (0.0, 'hsa_miR_4492'), (0.0, 'hsa_miR_4490'), (0.0, 'hsa_miR_4489'), (0.0, 'hsa_miR_4488'), (-0.0, 'hsa_miR_4487'), (0.0, 'hsa_miR_4485'), (0.0, 'hsa_miR_4483'), (0.0, 'hsa_miR_4482_5p'), (-0.0, 'hsa_miR_4482_3p'), (0.0, 'hsa_miR_4480'), (-0.0, 'hsa_miR_448'), (-0.0, 'hsa_miR_4479'), (-0.0, 'hsa_miR_4478'), (0.0, 'hsa_miR_4477b'), (0.0, 'hsa_miR_4476'), (0.0, 'hsa_miR_4475'), (-0.0, 'hsa_miR_4474_5p'), (0.0, 'hsa_miR_4471'), (0.0, 'hsa_miR_4470'), (0.0, 'hsa_miR_4467'), (-0.0, 'hsa_miR_4466'), (0.0, 'hsa_miR_4465'), (0.0, 'hsa_miR_4464'), (0.0, 'hsa_miR_4463'), (0.0, 'hsa_miR_4460'), (0.0, 'hsa_miR_4459'), (0.0, 'hsa_miR_4458'), (-0.0, 'hsa_miR_4457'), (0.0, 'hsa_miR_4456'), (0.0, 'hsa_miR_4454'), (-0.0, 'hsa_miR_4453'), (0.0, 'hsa_miR_4452'), (0.0, 'hsa_miR_4451'), (-0.0, 'hsa_miR_4450'), (-0.0, 'hsa_miR_4449'), (0.0, 'hsa_miR_4448'), (0.0, 'hsa_miR_4446_5p'), (-0.0, 'hsa_miR_4446_3p'), (0.0, 'hsa_miR_4445_5p'), (0.0, 'hsa_miR_4445_3p'), (0.0, 'hsa_miR_4444'), (0.0, 'hsa_miR_4443'), (0.0, 'hsa_miR_4442'), (0.0, 'hsa_miR_4441'), (0.0, 'hsa_miR_4440'), (0.0, 'hsa_miR_4439'), (0.0, 'hsa_miR_4438'), (0.0, 'hsa_miR_4437'), (-0.0, 'hsa_miR_4436b_5p'), (0.0, 'hsa_miR_4436b_3p'), (0.0, 'hsa_miR_4436a'), (0.0, 'hsa_miR_4435'), (0.0, 'hsa_miR_4434'), (0.0, 'hsa_miR_4433b_5p'), (0.0, 'hsa_miR_4433_5p'), (0.0, 'hsa_miR_4433_3p'), (0.0, 'hsa_miR_4432'), (0.0, 'hsa_miR_4430'), (0.0, 'hsa_miR_4429'), (0.0, 'hsa_miR_4428'), (-0.0, 'hsa_miR_4427'), (0.0, 'hsa_miR_4426'), (-0.0, 'hsa_miR_4425'), (0.0, 'hsa_miR_4424'), (0.0, 'hsa_miR_4422'), (-0.0, 'hsa_miR_4421'), (0.0, 'hsa_miR_4420'), (0.0, 'hsa_miR_4419b'), (0.0, 'hsa_miR_4418'), (0.0, 'hsa_miR_4417'), (0.0, 'hsa_miR_433_5p'), (-0.0, 'hsa_miR_432_3p'), (0.0, 'hsa_miR_4327'), (0.0, 'hsa_miR_4326'), (0.0, 'hsa_miR_4324'), (0.0, 'hsa_miR_4323'), (0.0, 'hsa_miR_4322'), (0.0, 'hsa_miR_4321'), (0.0, 'hsa_miR_4320'), (-0.0, 'hsa_miR_431_5p'), (0.0, 'hsa_miR_4315'), (0.0, 'hsa_miR_4314'), (0.0, 'hsa_miR_4313'), (0.0, 'hsa_miR_4312'), (0.0, 'hsa_miR_4310'), (0.0, 'hsa_miR_4306'), (0.0, 'hsa_miR_4302'), (0.0, 'hsa_miR_4301'), (0.0, 'hsa_miR_4300'), (0.0, 'hsa_miR_4296'), (0.0, 'hsa_miR_4295'), (0.0, 'hsa_miR_4292'), (0.0, 'hsa_miR_4290'), (0.0, 'hsa_miR_4289'), (-0.0, 'hsa_miR_4286'), (0.0, 'hsa_miR_4284'), (0.0, 'hsa_miR_4281'), (0.0, 'hsa_miR_4280'), (0.0, 'hsa_miR_4279'), (0.0, 'hsa_miR_4278'), (0.0, 'hsa_miR_4277'), (0.0, 'hsa_miR_4270'), (0.0, 'hsa_miR_4268'), (0.0, 'hsa_miR_4267'), (0.0, 'hsa_miR_4265'), (0.0, 'hsa_miR_4263'), (0.0, 'hsa_miR_4261'), (0.0, 'hsa_miR_4260'), (0.0, 'hsa_miR_4259'), (0.0, 'hsa_miR_4258'), (0.0, 'hsa_miR_4257'), (0.0, 'hsa_miR_4256'), (0.0, 'hsa_miR_4254'), (0.0, 'hsa_miR_422a'), (0.0, 'hsa_miR_421'), (0.0, 'hsa_miR_412_5p'), (0.0, 'hsa_miR_412_3p'), (0.0, 'hsa_miR_411_5p'), (0.0, 'hsa_miR_411_3p'), (0.0, 'hsa_miR_409_5p'), (0.0, 'hsa_miR_3978'), (0.0, 'hsa_miR_3972'), (0.0, 'hsa_miR_3960'), (0.0, 'hsa_miR_3945'), (0.0, 'hsa_miR_3944_5p'), (-0.0, 'hsa_miR_3944_3p'), (0.0, 'hsa_miR_3943'), (0.0, 'hsa_miR_3942_3p'), (0.0, 'hsa_miR_3941'), (-0.0, 'hsa_miR_3940_3p'), (0.0, 'hsa_miR_3939'), (0.0, 'hsa_miR_3938'), (0.0, 'hsa_miR_3937'), (0.0, 'hsa_miR_3936'), (0.0, 'hsa_miR_3935'), (0.0, 'hsa_miR_3934_3p'), (0.0, 'hsa_miR_3929'), (0.0, 'hsa_miR_3928_5p'), (0.0, 'hsa_miR_3927_5p'), (0.0, 'hsa_miR_3927_3p'), (0.0, 'hsa_miR_3925_5p'), (0.0, 'hsa_miR_3925_3p'), (0.0, 'hsa_miR_3924'), (0.0, 'hsa_miR_3923'), (-0.0, 'hsa_miR_3922_5p'), (0.0, 'hsa_miR_3921'), (0.0, 'hsa_miR_3919'), (-0.0, 'hsa_miR_3918'), (0.0, 'hsa_miR_3916'), (0.0, 'hsa_miR_3915'), (0.0, 'hsa_miR_3914'), (-0.0, 'hsa_miR_3912_5p'), (0.0, 'hsa_miR_3911'), (-0.0, 'hsa_miR_3910'), (0.0, 'hsa_miR_3908'), (-0.0, 'hsa_miR_3907'), (0.0, 'hsa_miR_384'), (0.0, 'hsa_miR_383_3p'), (-0.0, 'hsa_miR_381_5p'), (0.0, 'hsa_miR_381_3p'), (0.0, 'hsa_miR_380_5p'), (-0.0, 'hsa_miR_380_3p'), (0.0, 'hsa_miR_379_5p'), (0.0, 'hsa_miR_379_3p'), (0.0, 'hsa_miR_378j'), (-0.0, 'hsa_miR_378i'), (0.0, 'hsa_miR_378h'), (0.0, 'hsa_miR_378g'), (0.0, 'hsa_miR_378f'), (0.0, 'hsa_miR_378e'), (0.0, 'hsa_miR_378d'), (0.0, 'hsa_miR_378c'), (0.0, 'hsa_miR_378b'), (0.0, 'hsa_miR_377_5p'), (0.0, 'hsa_miR_377_3p'), (-0.0, 'hsa_miR_376c_5p'), (-0.0, 'hsa_miR_376b_5p'), (-0.0, 'hsa_miR_376b_3p'), (-0.0, 'hsa_miR_376a_5p'), (-0.0, 'hsa_miR_376a_3p'), (0.0, 'hsa_miR_376a_2_5p'), (0.0, 'hsa_miR_374c_5p'), (0.0, 'hsa_miR_374c_3p'), (0.0, 'hsa_miR_373_5p'), (0.0, 'hsa_miR_373_3p'), (0.0, 'hsa_miR_372_5p'), (-0.0, 'hsa_miR_372_3p'), (-0.0, 'hsa_miR_371b_5p'), (0.0, 'hsa_miR_371b_3p'), (0.0, 'hsa_miR_371a_5p'), (-0.0, 'hsa_miR_371a_3p'), (0.0, 'hsa_miR_3714'), (0.0, 'hsa_miR_369_3p'), (0.0, 'hsa_miR_3692_5p'), (0.0, 'hsa_miR_3692_3p'), (0.0, 'hsa_miR_3690'), (-0.0, 'hsa_miR_3689f'), (0.0, 'hsa_miR_3689e'), (0.0, 'hsa_miR_3689d'), (0.0, 'hsa_miR_3689c'), (0.0, 'hsa_miR_3689b_5p'), (0.0, 'hsa_miR_3689b_3p'), (0.0, 'hsa_miR_3689a_5p'), (0.0, 'hsa_miR_3688_5p'), (0.0, 'hsa_miR_3688_3p'), (0.0, 'hsa_miR_3686'), (0.0, 'hsa_miR_3685'), (0.0, 'hsa_miR_3684'), (0.0, 'hsa_miR_3683'), (0.0, 'hsa_miR_3682_5p'), (0.0, 'hsa_miR_3682_3p'), (0.0, 'hsa_miR_3681_5p'), (0.0, 'hsa_miR_3681_3p'), (0.0, 'hsa_miR_3680_5p'), (0.0, 'hsa_miR_367_5p'), (0.0, 'hsa_miR_367_3p'), (-0.0, 'hsa_miR_3679_5p'), (-0.0, 'hsa_miR_3679_3p'), (-0.0, 'hsa_miR_3678_3p'), (-0.0, 'hsa_miR_3677_5p'), (-0.0, 'hsa_miR_3675_5p'), (0.0, 'hsa_miR_3675_3p'), (0.0, 'hsa_miR_3674'), (0.0, 'hsa_miR_3672'), (0.0, 'hsa_miR_3671'), (0.0, 'hsa_miR_3668'), (0.0, 'hsa_miR_3667_5p'), (0.0, 'hsa_miR_3666'), (0.0, 'hsa_miR_3665'), (0.0, 'hsa_miR_3664_5p'), (-0.0, 'hsa_miR_3664_3p'), (0.0, 'hsa_miR_3663_5p'), (0.0, 'hsa_miR_3663_3p'), (-0.0, 'hsa_miR_3661'), (0.0, 'hsa_miR_3660'), (-0.0, 'hsa_miR_365b_5p'), (0.0, 'hsa_miR_365b_3p'), (0.0, 'hsa_miR_365a_3p'), (-0.0, 'hsa_miR_3659'), (0.0, 'hsa_miR_3658'), (-0.0, 'hsa_miR_3657'), (0.0, 'hsa_miR_3656'), (0.0, 'hsa_miR_3655'), (0.0, 'hsa_miR_3654'), (0.0, 'hsa_miR_3653'), (-0.0, 'hsa_miR_3651'), (0.0, 'hsa_miR_3650'), (0.0, 'hsa_miR_3649'), (-0.0, 'hsa_miR_3646'), (-0.0, 'hsa_miR_363_5p'), (-0.0, 'hsa_miR_362_5p'), (0.0, 'hsa_miR_3622b_5p'), (-0.0, 'hsa_miR_3622b_3p'), (0.0, 'hsa_miR_3622a_5p'), (0.0, 'hsa_miR_3622a_3p'), (0.0, 'hsa_miR_3621'), (0.0, 'hsa_miR_361_3p'), (0.0, 'hsa_miR_3619_5p'), (0.0, 'hsa_miR_3619_3p'), (0.0, 'hsa_miR_3618'), (0.0, 'hsa_miR_3617_5p'), (0.0, 'hsa_miR_3616_5p'), (-0.0, 'hsa_miR_3614_5p'), (0.0, 'hsa_miR_3614_3p'), (0.0, 'hsa_miR_3612'), (-0.0, 'hsa_miR_3609'), (0.0, 'hsa_miR_3607_3p'), (-0.0, 'hsa_miR_3606_5p'), (0.0, 'hsa_miR_3606_3p'), (0.0, 'hsa_miR_3591_3p'), (0.0, 'hsa_miR_3529_5p'), (0.0, 'hsa_miR_3529_3p'), (0.0, 'hsa_miR_346'), (-0.0, 'hsa_miR_345_3p'), (0.0, 'hsa_miR_33b_5p'), (0.0, 'hsa_miR_33b_3p'), (-0.0, 'hsa_miR_33a_3p'), (0.0, 'hsa_miR_337_5p'), (-0.0, 'hsa_miR_337_3p'), (0.0, 'hsa_miR_330_5p'), (-0.0, 'hsa_miR_330_3p'), (0.0, 'hsa_miR_32_3p'), (0.0, 'hsa_miR_329_5p'), (-0.0, 'hsa_miR_329_3p'), (-0.0, 'hsa_miR_328_5p'), (0.0, 'hsa_miR_325'), (0.0, 'hsa_miR_324_5p'), (0.0, 'hsa_miR_324_3p'), (-0.0, 'hsa_miR_323b_5p'), (0.0, 'hsa_miR_323b_3p'), (0.0, 'hsa_miR_323a_5p'), (0.0, 'hsa_miR_320e'), (0.0, 'hsa_miR_320d'), (0.0, 'hsa_miR_3202'), (0.0, 'hsa_miR_3201'), (0.0, 'hsa_miR_3200_5p'), (-0.0, 'hsa_miR_3198'), (0.0, 'hsa_miR_3197'), (-0.0, 'hsa_miR_3196'), (-0.0, 'hsa_miR_3195'), (-0.0, 'hsa_miR_3194_5p'), (0.0, 'hsa_miR_3194_3p'), (0.0, 'hsa_miR_3192_5p'), (0.0, 'hsa_miR_3192_3p'), (0.0, 'hsa_miR_3191_5p'), (0.0, 'hsa_miR_3190_5p'), (0.0, 'hsa_miR_3190_3p'), (0.0, 'hsa_miR_3189_5p'), (0.0, 'hsa_miR_3189_3p'), (0.0, 'hsa_miR_3187_5p'), (-0.0, 'hsa_miR_3187_3p'), (0.0, 'hsa_miR_3186_5p'), (0.0, 'hsa_miR_3186_3p'), (0.0, 'hsa_miR_3185'), (0.0, 'hsa_miR_3184_5p'), (0.0, 'hsa_miR_3184_3p'), (-0.0, 'hsa_miR_3183'), (-0.0, 'hsa_miR_3182'), (0.0, 'hsa_miR_3181'), (0.0, 'hsa_miR_3180'), (0.0, 'hsa_miR_3178'), (0.0, 'hsa_miR_3177_5p'), (-0.0, 'hsa_miR_3177_3p'), (0.0, 'hsa_miR_3176'), (0.0, 'hsa_miR_3175'), (-0.0, 'hsa_miR_3174'), (0.0, 'hsa_miR_3173_5p'), (0.0, 'hsa_miR_3173_3p'), (0.0, 'hsa_miR_3171'), (0.0, 'hsa_miR_3170'), (0.0, 'hsa_miR_3169'), (0.0, 'hsa_miR_3168'), (0.0, 'hsa_miR_3167'), (0.0, 'hsa_miR_3166'), (0.0, 'hsa_miR_3165'), (-0.0, 'hsa_miR_3163'), (0.0, 'hsa_miR_3162_5p'), (0.0, 'hsa_miR_3162_3p'), (-0.0, 'hsa_miR_3161'), (0.0, 'hsa_miR_3160_5p'), (0.0, 'hsa_miR_3159'), (0.0, 'hsa_miR_3158_5p'), (0.0, 'hsa_miR_3157_5p'), (-0.0, 'hsa_miR_3157_3p'), (0.0, 'hsa_miR_3156_3p'), (0.0, 'hsa_miR_3155b'), (0.0, 'hsa_miR_3155a'), (0.0, 'hsa_miR_3153'), (0.0, 'hsa_miR_3152_3p'), (0.0, 'hsa_miR_3151_5p'), (0.0, 'hsa_miR_3151_3p'), (0.0, 'hsa_miR_3150b_5p'), (0.0, 'hsa_miR_3150b_3p'), (0.0, 'hsa_miR_3150a_3p'), (0.0, 'hsa_miR_3149'), (0.0, 'hsa_miR_3148'), (0.0, 'hsa_miR_3147'), (0.0, 'hsa_miR_3146'), (0.0, 'hsa_miR_3145_5p'), (0.0, 'hsa_miR_3145_3p'), (-0.0, 'hsa_miR_3144_5p'), (-0.0, 'hsa_miR_3144_3p'), (0.0, 'hsa_miR_3143'), (0.0, 'hsa_miR_3142'), (0.0, 'hsa_miR_3141'), (0.0, 'hsa_miR_3140_5p'), (0.0, 'hsa_miR_3139'), (0.0, 'hsa_miR_3138'), (0.0, 'hsa_miR_3136_3p'), (0.0, 'hsa_miR_3135b'), (0.0, 'hsa_miR_3135a'), (0.0, 'hsa_miR_3134'), (-0.0, 'hsa_miR_3132'), (0.0, 'hsa_miR_3131'), (-0.0, 'hsa_miR_3130_5p'), (-0.0, 'hsa_miR_3130_3p'), (0.0, 'hsa_miR_3129_5p'), (0.0, 'hsa_miR_3129_3p'), (0.0, 'hsa_miR_3128'), (0.0, 'hsa_miR_3127_5p'), (0.0, 'hsa_miR_3126_5p'), (-0.0, 'hsa_miR_3126_3p'), (-0.0, 'hsa_miR_3125'), (0.0, 'hsa_miR_3124_5p'), (0.0, 'hsa_miR_3124_3p'), (0.0, 'hsa_miR_3122'), (0.0, 'hsa_miR_3121_5p'), (0.0, 'hsa_miR_3121_3p'), (0.0, 'hsa_miR_3120_5p'), (0.0, 'hsa_miR_3120_3p'), (0.0, 'hsa_miR_3119'), (0.0, 'hsa_miR_3117_5p'), (0.0, 'hsa_miR_3116'), (-0.0, 'hsa_miR_3115'), (-0.0, 'hsa_miR_30c_1_3p'), (-0.0, 'hsa_miR_3074_3p'), (0.0, 'hsa_miR_3064_3p'), (0.0, 'hsa_miR_302d_5p'), (-0.0, 'hsa_miR_302d_3p'), (0.0, 'hsa_miR_302c_5p'), (0.0, 'hsa_miR_302c_3p'), (0.0, 'hsa_miR_302b_5p'), (-0.0, 'hsa_miR_302b_3p'), (0.0, 'hsa_miR_302a_5p'), (0.0, 'hsa_miR_302a_3p'), (0.0, 'hsa_miR_301b'), (0.0, 'hsa_miR_301a_5p'), (-0.0, 'hsa_miR_301a_3p'), (0.0, 'hsa_miR_29c_5p'), (0.0, 'hsa_miR_29b_3p'), (0.0, 'hsa_miR_29b_1_5p'), (0.0, 'hsa_miR_299_3p'), (-0.0, 'hsa_miR_296_3p'), (0.0, 'hsa_miR_2861'), (0.0, 'hsa_miR_27a_5p'), (0.0, 'hsa_miR_26a_2_3p'), (-0.0, 'hsa_miR_26a_1_3p'), (-0.0, 'hsa_miR_2682_5p'), (0.0, 'hsa_miR_2682_3p'), (-0.0, 'hsa_miR_2681_5p'), (-0.0, 'hsa_miR_2681_3p'), (0.0, 'hsa_miR_25_5p'), (0.0, 'hsa_miR_25_3p'), (0.0, 'hsa_miR_24_2_5p'), (0.0, 'hsa_miR_2467_5p'), (0.0, 'hsa_miR_2467_3p'), (0.0, 'hsa_miR_23c'), (-0.0, 'hsa_miR_23a_5p'), (0.0, 'hsa_miR_2392'), (-0.0, 'hsa_miR_2278'), (0.0, 'hsa_miR_2277_3p'), (-0.0, 'hsa_miR_2276_5p'), (-0.0, 'hsa_miR_2276_3p'), (0.0, 'hsa_miR_223_5p'), (-0.0, 'hsa_miR_223_3p'), (-0.0, 'hsa_miR_219b_5p'), (-0.0, 'hsa_miR_219b_3p'), (-0.0, 'hsa_miR_219a_5p'), (0.0, 'hsa_miR_219a_1_3p'), (-0.0, 'hsa_miR_218_2_3p'), (0.0, 'hsa_miR_216b_3p'), (0.0, 'hsa_miR_216a_5p'), (0.0, 'hsa_miR_216a_3p'), (-0.0, 'hsa_miR_214_3p'), (-0.0, 'hsa_miR_212_3p'), (0.0, 'hsa_miR_2117'), (-0.0, 'hsa_miR_2116_5p'), (0.0, 'hsa_miR_2115_5p'), (0.0, 'hsa_miR_2115_3p'), (-0.0, 'hsa_miR_2114_5p'), (0.0, 'hsa_miR_2114_3p'), (0.0, 'hsa_miR_2113'), (0.0, 'hsa_miR_2110'), (0.0, 'hsa_miR_20a_5p'), (-0.0, 'hsa_miR_20a_3p'), (0.0, 'hsa_miR_208b_5p'), (0.0, 'hsa_miR_208b_3p'), (0.0, 'hsa_miR_208a_5p'), (0.0, 'hsa_miR_208a_3p'), (0.0, 'hsa_miR_2052'), (0.0, 'hsa_miR_203b_5p'), (0.0, 'hsa_miR_19b_2_5p'), (0.0, 'hsa_miR_19b_1_5p'), (-0.0, 'hsa_miR_19a_5p'), (0.0, 'hsa_miR_198'), (0.0, 'hsa_miR_197_5p'), (0.0, 'hsa_miR_1973'), (0.0, 'hsa_miR_1972'), (0.0, 'hsa_miR_195_3p'), (-0.0, 'hsa_miR_193b_5p'), (-0.0, 'hsa_miR_193a_3p'), (0.0, 'hsa_miR_191_5p'), (-0.0, 'hsa_miR_191_3p'), (0.0, 'hsa_miR_1915_5p'), (0.0, 'hsa_miR_1914_5p'), (0.0, 'hsa_miR_1914_3p'), (0.0, 'hsa_miR_1913'), (-0.0, 'hsa_miR_1912'), (0.0, 'hsa_miR_1911_5p'), (0.0, 'hsa_miR_1911_3p'), (0.0, 'hsa_miR_1910_5p'), (0.0, 'hsa_miR_1910_3p'), (0.0, 'hsa_miR_190b'), (0.0, 'hsa_miR_190a_3p'), (0.0, 'hsa_miR_1909_5p'), (0.0, 'hsa_miR_1909_3p'), (0.0, 'hsa_miR_1908_5p'), (-0.0, 'hsa_miR_1908_3p'), (0.0, 'hsa_miR_18a_5p'), (0.0, 'hsa_miR_188_5p'), (0.0, 'hsa_miR_187_5p'), (-0.0, 'hsa_miR_187_3p'), (-0.0, 'hsa_miR_186_5p'), (-0.0, 'hsa_miR_186_3p'), (0.0, 'hsa_miR_185_3p'), (0.0, 'hsa_miR_184'), (-0.0, 'hsa_miR_183_5p'), (0.0, 'hsa_miR_183_3p'), (0.0, 'hsa_miR_182_3p'), (0.0, 'hsa_miR_1827'), (0.0, 'hsa_miR_1825'), (-0.0, 'hsa_miR_181d_3p'), (0.0, 'hsa_miR_16_1_3p'), (0.0, 'hsa_miR_15b_5p'), (0.0, 'hsa_miR_15b_3p'), (0.0, 'hsa_miR_15a_3p'), (0.0, 'hsa_miR_1587'), (-0.0, 'hsa_miR_155_3p'), (0.0, 'hsa_miR_154_5p'), (-0.0, 'hsa_miR_154_3p'), (0.0, 'hsa_miR_153_5p'), (0.0, 'hsa_miR_153_3p'), (0.0, 'hsa_miR_1539'), (0.0, 'hsa_miR_1538'), (0.0, 'hsa_miR_1537_5p'), (0.0, 'hsa_miR_1537_3p'), (0.0, 'hsa_miR_151a_5p'), (0.0, 'hsa_miR_148b_5p'), (0.0, 'hsa_miR_147b'), (0.0, 'hsa_miR_147a'), (0.0, 'hsa_miR_1471'), (0.0, 'hsa_miR_1469'), (0.0, 'hsa_miR_1468_3p'), (0.0, 'hsa_miR_138_2_3p'), (0.0, 'hsa_miR_137'), (-0.0, 'hsa_miR_134_3p'), (0.0, 'hsa_miR_1343_5p'), (0.0, 'hsa_miR_1343_3p'), (0.0, 'hsa_miR_1323'), (0.0, 'hsa_miR_1322'), (0.0, 'hsa_miR_1321'), (0.0, 'hsa_miR_1306_3p'), (0.0, 'hsa_miR_1305'), (0.0, 'hsa_miR_1304_5p'), (0.0, 'hsa_miR_1303'), (-0.0, 'hsa_miR_1301_5p'), (-0.0, 'hsa_miR_129_1_3p'), (0.0, 'hsa_miR_1299'), (0.0, 'hsa_miR_1298_5p'), (0.0, 'hsa_miR_1297'), (-0.0, 'hsa_miR_1296_5p'), (0.0, 'hsa_miR_1296_3p'), (0.0, 'hsa_miR_1295b_5p'), (0.0, 'hsa_miR_1295b_3p'), (0.0, 'hsa_miR_1295a'), (0.0, 'hsa_miR_1292_3p'), (0.0, 'hsa_miR_1291'), (0.0, 'hsa_miR_1290'), (0.0, 'hsa_miR_128_2_5p'), (0.0, 'hsa_miR_1289'), (0.0, 'hsa_miR_1288_5p'), (0.0, 'hsa_miR_1288_3p'), (0.0, 'hsa_miR_1286'), (0.0, 'hsa_miR_1285_3p'), (-0.0, 'hsa_miR_1284'), (-0.0, 'hsa_miR_1283'), (0.0, 'hsa_miR_1282'), (0.0, 'hsa_miR_1281'), (0.0, 'hsa_miR_127_3p'), (0.0, 'hsa_miR_1278'), (-0.0, 'hsa_miR_1277_5p'), (0.0, 'hsa_miR_1276'), (0.0, 'hsa_miR_1275'), (0.0, 'hsa_miR_1273h_5p'), (0.0, 'hsa_miR_1273h_3p'), (0.0, 'hsa_miR_1273g_5p'), (0.0, 'hsa_miR_1273c'), (0.0, 'hsa_miR_1273a'), (0.0, 'hsa_miR_1272'), (0.0, 'hsa_miR_1271_5p'), (0.0, 'hsa_miR_1269b'), (0.0, 'hsa_miR_1268b'), (0.0, 'hsa_miR_1268a'), (0.0, 'hsa_miR_1267'), (0.0, 'hsa_miR_1266_3p'), (0.0, 'hsa_miR_1265'), (0.0, 'hsa_miR_1264'), (0.0, 'hsa_miR_1263'), (0.0, 'hsa_miR_1261'), (0.0, 'hsa_miR_1260b'), (0.0, 'hsa_miR_1260a'), (-0.0, 'hsa_miR_1258'), (0.0, 'hsa_miR_1257'), (0.0, 'hsa_miR_1256'), (-0.0, 'hsa_miR_1255a'), (-0.0, 'hsa_miR_1254'), (0.0, 'hsa_miR_1253'), (0.0, 'hsa_miR_1252_5p'), (0.0, 'hsa_miR_1252_3p'), (0.0, 'hsa_miR_1250_5p'), (0.0, 'hsa_miR_1250_3p'), (0.0, 'hsa_miR_124_5p'), (0.0, 'hsa_miR_124_3p'), (-0.0, 'hsa_miR_1249'), (-0.0, 'hsa_miR_1246'), (0.0, 'hsa_miR_1245b_5p'), (0.0, 'hsa_miR_1245b_3p'), (-0.0, 'hsa_miR_1245a'), (0.0, 'hsa_miR_1238_5p'), (-0.0, 'hsa_miR_1238_3p'), (0.0, 'hsa_miR_1237_5p'), (-0.0, 'hsa_miR_1237_3p'), (0.0, 'hsa_miR_1236_5p'), (0.0, 'hsa_miR_1236_3p'), (-0.0, 'hsa_miR_1234_3p'), (0.0, 'hsa_miR_1231'), (-0.0, 'hsa_miR_1229_3p'), (-0.0, 'hsa_miR_1228_5p'), (-0.0, 'hsa_miR_1228_3p'), (0.0, 'hsa_miR_1227_5p'), (0.0, 'hsa_miR_1227_3p'), (-0.0, 'hsa_miR_1226_5p'), (0.0, 'hsa_miR_1225_5p'), (0.0, 'hsa_miR_1225_3p'), (0.0, 'hsa_miR_1224_3p'), (0.0, 'hsa_miR_1207_5p'), (0.0, 'hsa_miR_1207_3p'), (0.0, 'hsa_miR_1206'), (0.0, 'hsa_miR_1205'), (0.0, 'hsa_miR_1204'), (0.0, 'hsa_miR_1203'), (0.0, 'hsa_miR_1200'), (0.0, 'hsa_miR_1199_5p'), (0.0, 'hsa_miR_1199_3p'), (-0.0, 'hsa_miR_1197'), (0.0, 'hsa_miR_1193'), (0.0, 'hsa_miR_1185_5p'), (0.0, 'hsa_miR_1185_2_3p'), (-0.0, 'hsa_miR_1185_1_3p'), (0.0, 'hsa_miR_1184'), (0.0, 'hsa_miR_1183'), (-0.0, 'hsa_miR_1182'), (0.0, 'hsa_miR_1181'), (-0.0, 'hsa_miR_1180_5p'), (0.0, 'hsa_miR_1178_5p'), (0.0, 'hsa_miR_1178_3p'), (0.0, 'hsa_miR_106a_3p'), (-0.0, 'hsa_miR_105_3p'), (0.0, 'hsa_miR_103b'), (0.0, 'hsa_miR_103a_2_5p'), (0.0, 'hsa_let_7f_2_3p'), (0.0, 'hsa_let_7d_3p'), (-0.0001, 'hsa_miR_7705'), (-0.0001, 'hsa_miR_7156_5p'), (-0.0001, 'hsa_miR_7106_5p'), (-0.0001, 'hsa_miR_6891_5p'), (-0.0001, 'hsa_miR_627_3p'), (-0.0001, 'hsa_miR_5699_5p'), (-0.0001, 'hsa_miR_543'), (-0.0001, 'hsa_miR_495_3p'), (-0.0001, 'hsa_miR_4687_3p'), (-0.0001, 'hsa_miR_4677_3p'), (-0.0001, 'hsa_miR_431_3p'), (-0.0001, 'hsa_miR_409_3p'), (-0.0001, 'hsa_miR_3942_5p'), (-0.0001, 'hsa_miR_3137'), (-0.0001, 'hsa_miR_30b_5p'), (-0.0001, 'hsa_miR_3074_5p'), (-0.0001, 'hsa_miR_144_5p'), (-0.0001, 'hsa_miR_134_5p'), (-0.0001, 'hsa_miR_1304_3p'), (-0.0001, 'hsa_miR_1277_3p'), (-0.0003, 'hsa_miR_522_3p')]
# Random Forest Classification report
print(classification_report(y_true, rf_pred))
print(accuracy_score(y_true, rf_pred, normalize=True, sample_weight=None))
precision recall f1-score support blca 0.82 0.97 0.89 132 brca 0.93 1.00 0.96 373 chol 1.00 0.83 0.91 12 coad 0.92 0.99 0.95 154 esca 0.97 0.50 0.66 66 hnsc 0.98 0.98 0.98 157 kich 0.97 0.93 0.95 30 kirc 0.99 0.98 0.99 178 lich 1.00 0.98 0.99 120 luad 0.96 0.91 0.93 173 ov 0.99 0.97 0.98 144 paad 0.97 0.78 0.87 50 prad 0.99 0.99 0.99 170 skcm 0.99 1.00 1.00 123 stad 0.94 0.90 0.92 146 thca 0.99 0.99 0.99 169 ucec 0.94 0.96 0.95 179 micro avg 0.95 0.95 0.95 2376 macro avg 0.96 0.92 0.94 2376 weighted avg 0.96 0.95 0.95 2376 0.9537037037037037
def getClassAccuracy(rf_cm):
for i in range (len(rf_cm)):
print("\n" + str(i) + ": " + cancers[i])
# TP
tp = rf_cm[i][i]
print("TP: " + str(rf_cm[i][i]))
# FP, columns...things that are classified as XXX cancer, but are not
fp = 0
for col in range(len(rf_cm[0])):
if col != i:
fp += rf_cm[col][i]
print("FP: " + str(fp))
# FN, rows...
fn = 0
for row in range(len(rf_cm)):
if row != i:
fn += rf_cm[i][row]
print("FN: " + str(fn))
# TN, ... everything else
tn = 0
for row in range(len(rf_cm)):
if row != i:
for col in range(len(rf_cm[0])):
if col != i:
tn += rf_cm[row][col]
print("TN: " + str(tn))
accuracy = (tp+tn)/float(tp+tn+fp+fn)
print("Accuracy: " + str(accuracy))
# getClassAccuracy(rf_cm)
# KNN
# n_neighbors rule of thumb... num_features^(1/2)
knnmodel = KNeighborsClassifier(n_neighbors=3)
knn = knnmodel.fit(train, r_train_labels)
knn_pred = knn.predict(test)
# 3 n_neighbors got accuracy of 83%
# 49 neighbors got accuracy of 75%
# 25 neighbors got 79%
# 10 neighbors got 81%
# Model Accuracy, how often is the classifier correct
print("Accuracy: ", metrics.accuracy_score(r_test_labels, knn_pred))
knn_cm = confusion_matrix(r_test_labels, knn_pred,)
y_true = pd.Series(r_test_labels)
knn_pred = pd.Series(knn_pred)
pd.crosstab(y_true, knn_pred, rownames=['True'], colnames=['Predicted'], margins=True)
('Accuracy: ', 0.8320707070707071)
Predicted | blca | brca | chol | coad | esca | hnsc | kich | kirc | lich | luad | ov | paad | prad | skcm | stad | thca | ucec | All |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
True | ||||||||||||||||||
blca | 100 | 15 | 0 | 3 | 13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 132 |
brca | 10 | 358 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 373 |
chol | 1 | 1 | 9 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
coad | 4 | 3 | 1 | 145 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 154 |
esca | 31 | 3 | 0 | 6 | 25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 66 |
hnsc | 2 | 0 | 0 | 0 | 2 | 136 | 0 | 1 | 2 | 9 | 0 | 0 | 2 | 0 | 3 | 0 | 0 | 157 |
kich | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 30 |
kirc | 1 | 2 | 0 | 0 | 0 | 1 | 2 | 170 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 178 |
lich | 1 | 1 | 0 | 0 | 0 | 4 | 0 | 2 | 92 | 7 | 0 | 1 | 3 | 0 | 5 | 3 | 1 | 120 |
luad | 3 | 7 | 0 | 1 | 0 | 8 | 0 | 2 | 4 | 114 | 1 | 11 | 3 | 1 | 7 | 6 | 5 | 173 |
ov | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 138 | 0 | 0 | 0 | 0 | 0 | 4 | 144 |
paad | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 2 | 1 | 11 | 0 | 27 | 2 | 0 | 5 | 0 | 0 | 50 |
prad | 0 | 1 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 158 | 0 | 3 | 0 | 3 | 170 |
skcm | 6 | 4 | 0 | 0 | 0 | 4 | 0 | 5 | 5 | 3 | 0 | 0 | 0 | 93 | 1 | 2 | 0 | 123 |
stad | 6 | 0 | 0 | 6 | 0 | 5 | 0 | 0 | 8 | 10 | 0 | 2 | 13 | 0 | 96 | 0 | 0 | 146 |
thca | 0 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 2 | 10 | 1 | 0 | 2 | 0 | 0 | 150 | 0 | 169 |
ucec | 2 | 12 | 0 | 0 | 0 | 4 | 1 | 10 | 0 | 5 | 0 | 0 | 3 | 2 | 1 | 0 | 139 | 179 |
All | 167 | 411 | 10 | 162 | 43 | 167 | 30 | 196 | 114 | 172 | 140 | 42 | 186 | 96 | 122 | 161 | 157 | 2376 |
# KNN classification report
print(classification_report(y_true, knn_pred))
print(accuracy_score(y_true, knn_pred, normalize=True, sample_weight=None))
precision recall f1-score support blca 0.60 0.76 0.67 132 brca 0.87 0.96 0.91 373 chol 0.90 0.75 0.82 12 coad 0.90 0.94 0.92 154 esca 0.58 0.38 0.46 66 hnsc 0.81 0.87 0.84 157 kich 0.90 0.90 0.90 30 kirc 0.87 0.96 0.91 178 lich 0.81 0.77 0.79 120 luad 0.66 0.66 0.66 173 ov 0.99 0.96 0.97 144 paad 0.64 0.54 0.59 50 prad 0.85 0.93 0.89 170 skcm 0.97 0.76 0.85 123 stad 0.79 0.66 0.72 146 thca 0.93 0.89 0.91 169 ucec 0.89 0.78 0.83 179 micro avg 0.83 0.83 0.83 2376 macro avg 0.82 0.79 0.80 2376 weighted avg 0.83 0.83 0.83 2376 0.8320707070707071
## SVM
svmmodel = svm.SVC(gamma='scale', decision_function_shape='ovo')
svm = svmmodel.fit(train, r_train_labels)
svm_pred = svm.predict(test)
# Model Accuracy, how often is the classifier correct
print("Accuracy: ", metrics.accuracy_score(r_test_labels, svm_pred))
svm_cm = confusion_matrix(r_test_labels, svm_pred,)
y_true = pd.Series(r_test_labels)
knn_pred = pd.Series(svm_pred)
pd.crosstab(y_true, svm_pred, rownames=['True'], colnames=['Predicted'], margins=True)
('Accuracy: ', 0.15698653198653198)
Predicted | brca | All |
---|---|---|
True | ||
blca | 132 | 132 |
brca | 373 | 373 |
chol | 12 | 12 |
coad | 154 | 154 |
esca | 66 | 66 |
hnsc | 157 | 157 |
kich | 30 | 30 |
kirc | 178 | 178 |
lich | 120 | 120 |
luad | 173 | 173 |
ov | 144 | 144 |
paad | 50 | 50 |
prad | 170 | 170 |
skcm | 123 | 123 |
stad | 146 | 146 |
thca | 169 | 169 |
ucec | 179 | 179 |
All | 2376 | 2376 |
# Plot sample expression distribution
values = test[100]
times = numpy.arange(len(test[0])) * 10
plt.plot(times, values)
[<matplotlib.lines.Line2D at 0x12ae67990>]
# Top 10 most important features, ranked by random forest
importances = rfmodel.feature_importances_
indices = numpy.argsort(importances)[::-1]
#for f in range(train.shape[1]):
for f in range(0,10):
print("%d. feature %s (%f)" % (f + 1 , data.columns[indices[f]], importances[indices[f]]))
1. feature hsa_miR_205_5p (0.011021) 2. feature hsa_miR_135a_5p (0.010891) 3. feature hsa_miR_194_5p (0.010295) 4. feature hsa_miR_192_5p (0.010200) 5. feature hsa_miR_375 (0.009484) 6. feature hsa_miR_205_3p (0.009425) 7. feature hsa_miR_944 (0.008933) 8. feature hsa_miR_200c_5p (0.008926) 9. feature hsa_miR_122_5p (0.008564) 10. feature hsa_miR_194_3p (0.008348)
The output 400epoch50learn
contains dictionaries for accuracy results at different learning rates, and accuracy results for different epochs.
Learning rates were generated using numpy.geomspace
for logarithmically spaced values.
#### CAREFUL
#### You are about to load in new variables from the cluster
#### To use for further visualization
filename='db/400epoch50learn'
with open(filename, 'rb') as fp:
learnloss = pickle.load(fp)
histories = pickle.load(fp)
## First plot accuracy vs learning rate to decide on a good range
def visualizeLearnLossRange(learnloss):
plt.title('model test accuracy')
plt.xscale('log')
plt.ylabel('accuracy')
plt.xlabel('learning rate')
plt.scatter(list(learnloss.keys()),list(learnloss.values()))
plt.gca().invert_xaxis()
#plt.axvspan(2.8e-04, 2e-05, color='yellow', alpha=0.5)
plt.savefig("mirna/accuracy_learningrate_range.pdf")
plt.show()
visualizeLearnLossRange(learnloss)
## Look at rates and decide lower/upper bound
max_acc = max(learnloss.values()) # maximum value
max_lr = [k for k, v in learnloss.items() if v == max_acc] # getting all keys containing the `maximum`
print("Max test accuracy: ")
print(max_acc, max_lr)
# print(ll)
# print("Mean Accuracy: " + str(numpy.mean(histories[ll]['acc'])))
# print("Max Accuracy: " + str(max(histories[ll]['acc'])))
orderedkeys = []
for ll in learnloss:
orderedkeys.append(ll)
orderedkeys = sorted(orderedkeys)
# get numbers for plotting
# good rates are above 0.77 when training
counter = 0
goodrates = []
for ll in orderedkeys:
# if ll < 3e-04 and ll > 2.5e-05:
if learnloss[ll] > 0.77:
counter+=1
goodrates.append(ll)
print("Rate: " + str(ll) + "\tAccuracy: "+ str(learnloss[ll]))
print(counter)
filename='goodrates'
with open(filename, 'wb') as fp:
pickle.dump(goodrates, fp)
Max test accuracy: (0.7946127946127947, [5.1794746792312125e-05]) Rate: 2.4420530945486548e-05 Accuracy: 0.7727272727272727 Rate: 5.1794746792312125e-05 Accuracy: 0.7946127946127947 Rate: 6.250551925273976e-05 Accuracy: 0.773989898989899 Rate: 9.102981779915228e-05 Accuracy: 0.7815656565656566 Rate: 0.00013257113655901095 Accuracy: 0.7849326599326599 Rate: 0.00023299518105153718 Accuracy: 0.7781986531986532 6
def visualizeEpochs(learnloss, histories):
fig = plt.figure(figsize=(10,3))
counter = 0
nrRows = 4
nrCols = 3
for lr in goodrates:
# generate subplots
ax = fig.add_subplot(nrRows, nrCols, counter+1)
ax.plot(histories[lr]['acc'])
ax.plot(histories[lr]['val_acc'])
plt.title('learning rate: ' + '{:.3g}'.format(lr))
ax.set_ylabel('accuracy')
ax.set_xlabel('epoch')
ax.legend(['train', 'test'], loc='upper left')
counter +=1
fig.set_figheight(12)
fig.set_figwidth(15)
plt.tight_layout()
plt.savefig("mirna/6-accuracy_epochs.pdf")
plt.show()
visualizeEpochs(learnloss, histories)
# Learning rate: 0.01 to 0.000001
# encoded labels are one-hot encoded
# Test labels are treated with ravel
learnloss = {}
histories = {}
def learnLoss(learningRate, epochs, train, encoded_train, test, encoded_test, test_labels):
model = tf.keras.Sequential()
model.add(layers.Dense(128, activation='sigmoid'))
model.add(layers.Dense(128, activation='sigmoid'))
model.add(layers.Dense(17, activation='softmax'))
model.compile(optimizer=tf.train.RMSPropOptimizer(learningRate),
loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit(train, encoded_train, validation_data=(test, encoded_test), epochs=epochs, batch_size=32)
# test
pred_y = model.predict_classes(test)
nnyhat = confusion_matrix(test_types, pred_y)
accuracy = metrics.accuracy_score(test_labels, pred_y)
print("Accuracy: ", accuracy)
learnloss[learningRate] = accuracy
histories[learningRate] = model.history.history
learningRates = numpy.geomspace(0.01, 0.000001, num=50)
print(learningRates)
for lr in learningRates:
learnLoss(lr, 500, train, encoded_train, test, encoded_test, r_test_types)
### CAREFUL
### You are about to DUMP and replace the current saved variables
filename='500epoch50learn'
with open(filename, 'wb')as fp:
pickle.dump(learnloss, fp)
pickle.dump(histories, fp)
filename='500epoch50learn'
with open(filename, 'rb') as fp:
learnloss = pickle.load(fp)
histories = pickle.load(fp)
Batch sizes were chosen from 100 to 2000, and then narrowed down from 1 to 100
filename='200epoch-10batch'
with open(filename, 'rb') as fp:
batch_acc = pickle.load(fp)
batch_hist = pickle.load(fp)
orderedkeys = []
for b in batch_acc:
orderedkeys.append(b)
orderedkeys = sorted(orderedkeys)
counter = 0
goodrates = []
for b in orderedkeys:
# if learnloss[ll] > 0.77:
# counter+=1
# goodrates.append(ll)
print("Batch: " + str(b) + "\tAccuracy: "+ str(batch_acc[b]))
print(counter)
Batch: 1 Accuracy: 0.7373737373737373 Batch: 11 Accuracy: 0.7672558922558923 Batch: 21 Accuracy: 0.7243265993265994 Batch: 31 Accuracy: 0.742003367003367 Batch: 41 Accuracy: 0.7617845117845118 Batch: 51 Accuracy: 0.7441077441077442 Batch: 61 Accuracy: 0.7310606060606061 Batch: 71 Accuracy: 0.7184343434343434 Batch: 81 Accuracy: 0.7268518518518519 Batch: 91 Accuracy: 0.7264309764309764 Batch: 101 Accuracy: 0.7251683501683501 Batch: 111 Accuracy: 0.7344276094276094 Batch: 121 Accuracy: 0.7117003367003367 Batch: 131 Accuracy: 0.7293771043771043 Batch: 141 Accuracy: 0.7024410774410774 Batch: 151 Accuracy: 0.70496632996633 Batch: 161 Accuracy: 0.7175925925925926 Batch: 171 Accuracy: 0.7184343434343434 Batch: 181 Accuracy: 0.6746632996632996 Batch: 191 Accuracy: 0.6898148148148148 Batch: 201 Accuracy: 0.6712962962962963 Batch: 211 Accuracy: 0.6708754208754208 Batch: 221 Accuracy: 0.6944444444444444 Batch: 231 Accuracy: 0.6553030303030303 Batch: 241 Accuracy: 0.6632996632996633 Batch: 251 Accuracy: 0.6498316498316499 Batch: 261 Accuracy: 0.6561447811447811 Batch: 271 Accuracy: 0.6485690235690236 Batch: 281 Accuracy: 0.6590909090909091 Batch: 291 Accuracy: 0.6599326599326599 0
## First plot accuracy vs learning rate to decide on a good range
def visualizeMetricAcc(dic):
plt.title('model test accuracy')
plt.xscale('log')
plt.ylabel('accuracy')
plt.xlabel('batch')
plt.scatter(list(dic.keys()),list(dic.values()))
plt.gca().invert_xaxis()
# plt.axvspan(2.8e-04, 2e-05, color='yellow', alpha=0.5)
# plt.savefig("accuracy_learningrate_range.pdf")
plt.show()
visualizeMetricAcc(batch_acc)
## Look at rates and decide lower/upper bound
max_acc = max(batch_acc.values()) # maximum value
max_lr = [k for k, v in batch_acc.items() if v == max_acc] # getting all keys containing the `maximum`
print("Max test accuracy: ")
print(max_acc, max_lr)
Max test accuracy: (0.7672558922558923, [11])
def visualizeEpochsGeneral(learnloss, histories):
fig = plt.figure(figsize=(10,3))
counter = 0
nrRows = 2
nrCols = 3
for lr in learnloss:
if lr < 100 and lr < 50:
ax = fig.add_subplot(nrRows, nrCols, counter+1)
ax.plot(histories[lr]['acc'])
ax.plot(histories[lr]['val_acc'])
plt.title('learning rate: ' + '{:.3g}'.format(lr))
ax.set_ylabel('accuracy')
ax.set_xlabel('epoch')
ax.legend(['train', 'test'], loc='upper left')
counter +=1
fig.set_figheight(8)
fig.set_figwidth(15)
plt.tight_layout()
#plt.savefig("accuracy_epochs.pdf")
plt.show()
visualizeEpochsGeneral(batch_acc, batch_hist)
model = tf.keras.Sequential()
print(train.shape)
#model.add(layers.InputLayer(input_tensor=train, input_shape=train.shape))
model.add(layers.Dense(128, activation='sigmoid'))
#model.add(layers.Dense(128, activation='sigmoid'))
model.add(layers.Dense(128, activation='sigmoid'))
model.add(layers.Dense(17, activation='softmax'))
model.compile(optimizer=tf.train.RMSPropOptimizer(5.1794746792312125e-05),
loss='categorical_crossentropy',
metrics=['accuracy'])
(5574, 2403)
model.fit(train, encoded_train, validation_data=(test, encoded_test), epochs=200, batch_size=32)
Train on 5574 samples, validate on 2376 samples Epoch 1/200 5574/5574 [==============================] - 4s 736us/step - loss: 2.8503 - acc: 0.0673 - val_loss: 2.7450 - val_acc: 0.1570 Epoch 2/200 5574/5574 [==============================] - 2s 383us/step - loss: 2.6926 - acc: 0.1496 - val_loss: 2.6553 - val_acc: 0.1570 Epoch 3/200 5574/5574 [==============================] - 2s 288us/step - loss: 2.6412 - acc: 0.1498 - val_loss: 2.6215 - val_acc: 0.1591 Epoch 4/200 5574/5574 [==============================] - 1s 213us/step - loss: 2.6082 - acc: 0.1552 - val_loss: 2.5906 - val_acc: 0.1742 Epoch 5/200 5574/5574 [==============================] - 1s 248us/step - loss: 2.5755 - acc: 0.1760 - val_loss: 2.5571 - val_acc: 0.1848 Epoch 6/200 5574/5574 [==============================] - 3s 465us/step - loss: 2.5404 - acc: 0.2015 - val_loss: 2.5210 - val_acc: 0.2348 Epoch 7/200 5574/5574 [==============================] - 2s 335us/step - loss: 2.5024 - acc: 0.2408 - val_loss: 2.4831 - val_acc: 0.2723 Epoch 8/200 5574/5574 [==============================] - 1s 260us/step - loss: 2.4621 - acc: 0.2722 - val_loss: 2.4441 - val_acc: 0.3056 Epoch 9/200 5574/5574 [==============================] - 3s 475us/step - loss: 2.4195 - acc: 0.3256 - val_loss: 2.4002 - val_acc: 0.3245 Epoch 10/200 5574/5574 [==============================] - 2s 349us/step - loss: 2.3751 - acc: 0.3470 - val_loss: 2.3568 - val_acc: 0.3552 Epoch 11/200 5574/5574 [==============================] - 1s 207us/step - loss: 2.3301 - acc: 0.3789 - val_loss: 2.3132 - val_acc: 0.3716 Epoch 12/200 5574/5574 [==============================] - 1s 212us/step - loss: 2.2842 - acc: 0.4013 - val_loss: 2.2688 - val_acc: 0.4007 Epoch 13/200 5574/5574 [==============================] - 1s 244us/step - loss: 2.2370 - acc: 0.4255 - val_loss: 2.2238 - val_acc: 0.4175 Epoch 14/200 5574/5574 [==============================] - 1s 215us/step - loss: 2.1898 - acc: 0.4392 - val_loss: 2.1778 - val_acc: 0.4327 Epoch 15/200 5574/5574 [==============================] - 1s 212us/step - loss: 2.1434 - acc: 0.4582 - val_loss: 2.1337 - val_acc: 0.4419 Epoch 16/200 5574/5574 [==============================] - 1s 192us/step - loss: 2.0974 - acc: 0.4781 - val_loss: 2.0913 - val_acc: 0.4423 Epoch 17/200 5574/5574 [==============================] - 1s 226us/step - loss: 2.0521 - acc: 0.4770 - val_loss: 2.0472 - val_acc: 0.4579 Epoch 18/200 5574/5574 [==============================] - 1s 216us/step - loss: 2.0066 - acc: 0.4916 - val_loss: 2.0049 - val_acc: 0.4760 Epoch 19/200 5574/5574 [==============================] - 1s 235us/step - loss: 1.9626 - acc: 0.5039 - val_loss: 1.9618 - val_acc: 0.4870 Epoch 20/200 5574/5574 [==============================] - 2s 282us/step - loss: 1.9182 - acc: 0.5099 - val_loss: 1.9205 - val_acc: 0.5147 Epoch 21/200 5574/5574 [==============================] - 2s 376us/step - loss: 1.8755 - acc: 0.5305 - val_loss: 1.8798 - val_acc: 0.5177 Epoch 22/200 5574/5574 [==============================] - 1s 219us/step - loss: 1.8322 - acc: 0.5355 - val_loss: 1.8389 - val_acc: 0.5265 Epoch 23/200 5574/5574 [==============================] - 1s 198us/step - loss: 1.7903 - acc: 0.5499 - val_loss: 1.8005 - val_acc: 0.5236 Epoch 24/200 5574/5574 [==============================] - 1s 227us/step - loss: 1.7498 - acc: 0.5527 - val_loss: 1.7623 - val_acc: 0.5417 Epoch 25/200 5574/5574 [==============================] - 1s 222us/step - loss: 1.7095 - acc: 0.5694 - val_loss: 1.7245 - val_acc: 0.5497 Epoch 26/200 5574/5574 [==============================] - 1s 229us/step - loss: 1.6705 - acc: 0.5743 - val_loss: 1.6877 - val_acc: 0.5640 Epoch 27/200 5574/5574 [==============================] - 2s 311us/step - loss: 1.6320 - acc: 0.5849 - val_loss: 1.6520 - val_acc: 0.5715 Epoch 28/200 5574/5574 [==============================] - 1s 244us/step - loss: 1.5946 - acc: 0.5940 - val_loss: 1.6166 - val_acc: 0.5850 Epoch 29/200 5574/5574 [==============================] - 1s 211us/step - loss: 1.5576 - acc: 0.6039 - val_loss: 1.5817 - val_acc: 0.5964 Epoch 30/200 5574/5574 [==============================] - 1s 249us/step - loss: 1.5216 - acc: 0.6173 - val_loss: 1.5490 - val_acc: 0.6065 Epoch 31/200 5574/5574 [==============================] - 1s 238us/step - loss: 1.4872 - acc: 0.6240 - val_loss: 1.5162 - val_acc: 0.6162 Epoch 32/200 5574/5574 [==============================] - 1s 224us/step - loss: 1.4533 - acc: 0.6381 - val_loss: 1.4856 - val_acc: 0.6153 Epoch 33/200 5574/5574 [==============================] - 1s 253us/step - loss: 1.4204 - acc: 0.6457 - val_loss: 1.4526 - val_acc: 0.6359 Epoch 34/200 5574/5574 [==============================] - 2s 283us/step - loss: 1.3879 - acc: 0.6577 - val_loss: 1.4217 - val_acc: 0.6448 Epoch 35/200 5574/5574 [==============================] - 2s 272us/step - loss: 1.3566 - acc: 0.6647 - val_loss: 1.3912 - val_acc: 0.6540 Epoch 36/200 5574/5574 [==============================] - 2s 273us/step - loss: 1.3258 - acc: 0.6726 - val_loss: 1.3622 - val_acc: 0.6599 Epoch 37/200 5574/5574 [==============================] - 2s 278us/step - loss: 1.2962 - acc: 0.6773 - val_loss: 1.3331 - val_acc: 0.6633 Epoch 38/200 5574/5574 [==============================] - 1s 235us/step - loss: 1.2666 - acc: 0.6825 - val_loss: 1.3047 - val_acc: 0.6776 Epoch 39/200 5574/5574 [==============================] - 1s 217us/step - loss: 1.2383 - acc: 0.6923 - val_loss: 1.2788 - val_acc: 0.6738 Epoch 40/200 5574/5574 [==============================] - 1s 261us/step - loss: 1.2108 - acc: 0.6959 - val_loss: 1.2524 - val_acc: 0.6835 Epoch 41/200 5574/5574 [==============================] - 1s 205us/step - loss: 1.1834 - acc: 0.7020 - val_loss: 1.2243 - val_acc: 0.6932 Epoch 42/200 5574/5574 [==============================] - 1s 199us/step - loss: 1.1566 - acc: 0.7104 - val_loss: 1.1988 - val_acc: 0.6970 Epoch 43/200 5574/5574 [==============================] - 2s 293us/step - loss: 1.1308 - acc: 0.7142 - val_loss: 1.1737 - val_acc: 0.7050 Epoch 44/200 5574/5574 [==============================] - 1s 256us/step - loss: 1.1056 - acc: 0.7214 - val_loss: 1.1495 - val_acc: 0.7050 Epoch 45/200 5574/5574 [==============================] - 1s 210us/step - loss: 1.0810 - acc: 0.7237 - val_loss: 1.1250 - val_acc: 0.7151 Epoch 46/200 5574/5574 [==============================] - 1s 224us/step - loss: 1.0571 - acc: 0.7309 - val_loss: 1.1024 - val_acc: 0.7205 Epoch 47/200 5574/5574 [==============================] - 1s 241us/step - loss: 1.0337 - acc: 0.7348 - val_loss: 1.0793 - val_acc: 0.7264 Epoch 48/200 5574/5574 [==============================] - 1s 189us/step - loss: 1.0113 - acc: 0.7382 - val_loss: 1.0581 - val_acc: 0.7285 Epoch 49/200 5574/5574 [==============================] - 1s 264us/step - loss: 0.9890 - acc: 0.7447 - val_loss: 1.0356 - val_acc: 0.7311 Epoch 50/200 5574/5574 [==============================] - 1s 265us/step - loss: 0.9671 - acc: 0.7490 - val_loss: 1.0134 - val_acc: 0.7391 Epoch 51/200 5574/5574 [==============================] - 3s 459us/step - loss: 0.9463 - acc: 0.7571 - val_loss: 0.9941 - val_acc: 0.7386 Epoch 52/200 5574/5574 [==============================] - 3s 468us/step - loss: 0.9261 - acc: 0.7585 - val_loss: 0.9738 - val_acc: 0.7403 Epoch 53/200 5574/5574 [==============================] - 4s 719us/step - loss: 0.9062 - acc: 0.7609 - val_loss: 0.9537 - val_acc: 0.7483 Epoch 54/200 5574/5574 [==============================] - 3s 593us/step - loss: 0.8868 - acc: 0.7653 - val_loss: 0.9350 - val_acc: 0.7508 Epoch 55/200 5574/5574 [==============================] - 2s 335us/step - loss: 0.8679 - acc: 0.7702 - val_loss: 0.9164 - val_acc: 0.7551 Epoch 56/200 5574/5574 [==============================] - 5s 892us/step - loss: 0.8497 - acc: 0.7747 - val_loss: 0.8979 - val_acc: 0.7618 Epoch 57/200 5574/5574 [==============================] - 4s 704us/step - loss: 0.8319 - acc: 0.7770 - val_loss: 0.8807 - val_acc: 0.7635 Epoch 58/200 5574/5574 [==============================] - 2s 407us/step - loss: 0.8148 - acc: 0.7829 - val_loss: 0.8643 - val_acc: 0.7647 Epoch 59/200 5574/5574 [==============================] - 2s 289us/step - loss: 0.7982 - acc: 0.7851 - val_loss: 0.8476 - val_acc: 0.7656 Epoch 60/200 5574/5574 [==============================] - 3s 534us/step - loss: 0.7819 - acc: 0.7874 - val_loss: 0.8318 - val_acc: 0.7702 Epoch 61/200 5574/5574 [==============================] - 3s 460us/step - loss: 0.7664 - acc: 0.7912 - val_loss: 0.8157 - val_acc: 0.7710 Epoch 62/200 5574/5574 [==============================] - 2s 355us/step - loss: 0.7511 - acc: 0.7933 - val_loss: 0.8005 - val_acc: 0.7811 Epoch 63/200 5574/5574 [==============================] - 2s 396us/step - loss: 0.7362 - acc: 0.7973 - val_loss: 0.7859 - val_acc: 0.7799 Epoch 64/200 5574/5574 [==============================] - 2s 307us/step - loss: 0.7218 - acc: 0.8009 - val_loss: 0.7722 - val_acc: 0.7811 Epoch 65/200 5574/5574 [==============================] - 3s 452us/step - loss: 0.7079 - acc: 0.8041 - val_loss: 0.7579 - val_acc: 0.7854 Epoch 66/200 5574/5574 [==============================] - 2s 363us/step - loss: 0.6943 - acc: 0.8062 - val_loss: 0.7439 - val_acc: 0.7934 Epoch 67/200 5574/5574 [==============================] - 2s 282us/step - loss: 0.6809 - acc: 0.8114 - val_loss: 0.7311 - val_acc: 0.7925 Epoch 68/200 5574/5574 [==============================] - 2s 284us/step - loss: 0.6681 - acc: 0.8150 - val_loss: 0.7180 - val_acc: 0.8030 Epoch 69/200 5574/5574 [==============================] - 1s 257us/step - loss: 0.6560 - acc: 0.8181 - val_loss: 0.7062 - val_acc: 0.8085 Epoch 70/200 5574/5574 [==============================] - 1s 216us/step - loss: 0.6439 - acc: 0.8208 - val_loss: 0.6935 - val_acc: 0.8199 Epoch 71/200 5574/5574 [==============================] - 1s 192us/step - loss: 0.6324 - acc: 0.8249 - val_loss: 0.6817 - val_acc: 0.8224 Epoch 72/200 5574/5574 [==============================] - 1s 201us/step - loss: 0.6209 - acc: 0.8301 - val_loss: 0.6706 - val_acc: 0.8211 Epoch 73/200 5574/5574 [==============================] - 1s 201us/step - loss: 0.6099 - acc: 0.8319 - val_loss: 0.6593 - val_acc: 0.8266 Epoch 74/200 5574/5574 [==============================] - 2s 270us/step - loss: 0.5991 - acc: 0.8376 - val_loss: 0.6496 - val_acc: 0.8253 Epoch 75/200 5574/5574 [==============================] - 2s 284us/step - loss: 0.5888 - acc: 0.8394 - val_loss: 0.6386 - val_acc: 0.8295 Epoch 76/200 5574/5574 [==============================] - 2s 334us/step - loss: 0.5783 - acc: 0.8448 - val_loss: 0.6276 - val_acc: 0.8418 Epoch 77/200 5574/5574 [==============================] - 2s 383us/step - loss: 0.5683 - acc: 0.8495 - val_loss: 0.6187 - val_acc: 0.8354 Epoch 78/200 5574/5574 [==============================] - 2s 330us/step - loss: 0.5587 - acc: 0.8518 - val_loss: 0.6080 - val_acc: 0.8430 Epoch 79/200 5574/5574 [==============================] - 2s 314us/step - loss: 0.5492 - acc: 0.8567 - val_loss: 0.5987 - val_acc: 0.8451 Epoch 80/200 5574/5574 [==============================] - 1s 236us/step - loss: 0.5399 - acc: 0.8599 - val_loss: 0.5901 - val_acc: 0.8464 Epoch 81/200 5574/5574 [==============================] - 2s 296us/step - loss: 0.5308 - acc: 0.8656 - val_loss: 0.5808 - val_acc: 0.8464 Epoch 82/200 5574/5574 [==============================] - 1s 267us/step - loss: 0.5222 - acc: 0.8671 - val_loss: 0.5720 - val_acc: 0.8485 Epoch 83/200 5574/5574 [==============================] - 3s 457us/step - loss: 0.5132 - acc: 0.8705 - val_loss: 0.5623 - val_acc: 0.8565 Epoch 84/200 5574/5574 [==============================] - 2s 310us/step - loss: 0.5050 - acc: 0.8735 - val_loss: 0.5545 - val_acc: 0.8540 Epoch 85/200 5574/5574 [==============================] - 2s 408us/step - loss: 0.4969 - acc: 0.8742 - val_loss: 0.5469 - val_acc: 0.8573 Epoch 86/200 5574/5574 [==============================] - 2s 280us/step - loss: 0.4889 - acc: 0.8760 - val_loss: 0.5387 - val_acc: 0.8594 Epoch 87/200 5574/5574 [==============================] - 2s 286us/step - loss: 0.4811 - acc: 0.8793 - val_loss: 0.5309 - val_acc: 0.8615 Epoch 88/200 5574/5574 [==============================] - 1s 269us/step - loss: 0.4735 - acc: 0.8812 - val_loss: 0.5242 - val_acc: 0.8598 Epoch 89/200 5574/5574 [==============================] - 2s 355us/step - loss: 0.4660 - acc: 0.8837 - val_loss: 0.5157 - val_acc: 0.8666 Epoch 90/200 5574/5574 [==============================] - 2s 277us/step - loss: 0.4584 - acc: 0.8863 - val_loss: 0.5096 - val_acc: 0.8649 Epoch 91/200 5574/5574 [==============================] - 1s 265us/step - loss: 0.4515 - acc: 0.8861 - val_loss: 0.5014 - val_acc: 0.8721 Epoch 92/200 5574/5574 [==============================] - 2s 271us/step - loss: 0.4446 - acc: 0.8900 - val_loss: 0.4944 - val_acc: 0.8725 Epoch 93/200 5574/5574 [==============================] - 2s 349us/step - loss: 0.4377 - acc: 0.8902 - val_loss: 0.4872 - val_acc: 0.8754 Epoch 94/200 5574/5574 [==============================] - 2s 277us/step - loss: 0.4309 - acc: 0.8931 - val_loss: 0.4811 - val_acc: 0.8779 Epoch 95/200 5574/5574 [==============================] - 2s 276us/step - loss: 0.4243 - acc: 0.8958 - val_loss: 0.4757 - val_acc: 0.8758 Epoch 96/200 5574/5574 [==============================] - 1s 192us/step - loss: 0.4180 - acc: 0.8952 - val_loss: 0.4682 - val_acc: 0.8792 Epoch 97/200 5574/5574 [==============================] - 1s 193us/step - loss: 0.4117 - acc: 0.8979 - val_loss: 0.4634 - val_acc: 0.8775 Epoch 98/200 5574/5574 [==============================] - 1s 225us/step - loss: 0.4055 - acc: 0.8983 - val_loss: 0.4562 - val_acc: 0.8855 Epoch 99/200 5574/5574 [==============================] - 1s 180us/step - loss: 0.3996 - acc: 0.9011 - val_loss: 0.4505 - val_acc: 0.8826 Epoch 100/200 5574/5574 [==============================] - 1s 201us/step - loss: 0.3937 - acc: 0.9003 - val_loss: 0.4453 - val_acc: 0.8834 Epoch 101/200 5574/5574 [==============================] - 1s 260us/step - loss: 0.3878 - acc: 0.9024 - val_loss: 0.4390 - val_acc: 0.8893 Epoch 102/200 5574/5574 [==============================] - 1s 216us/step - loss: 0.3823 - acc: 0.9058 - val_loss: 0.4334 - val_acc: 0.8906 Epoch 103/200 5574/5574 [==============================] - 1s 235us/step - loss: 0.3769 - acc: 0.9064 - val_loss: 0.4291 - val_acc: 0.8889 Epoch 104/200 5574/5574 [==============================] - 1s 192us/step - loss: 0.3718 - acc: 0.9069 - val_loss: 0.4244 - val_acc: 0.8914 Epoch 105/200 5574/5574 [==============================] - 1s 250us/step - loss: 0.3666 - acc: 0.9089 - val_loss: 0.4195 - val_acc: 0.8914 Epoch 106/200 5574/5574 [==============================] - 1s 211us/step - loss: 0.3615 - acc: 0.9099 - val_loss: 0.4132 - val_acc: 0.8935 Epoch 107/200 5574/5574 [==============================] - 1s 250us/step - loss: 0.3566 - acc: 0.9116 - val_loss: 0.4088 - val_acc: 0.8952 Epoch 108/200 5574/5574 [==============================] - 1s 197us/step - loss: 0.3517 - acc: 0.9128 - val_loss: 0.4043 - val_acc: 0.8931 Epoch 109/200 5574/5574 [==============================] - 1s 188us/step - loss: 0.3468 - acc: 0.9139 - val_loss: 0.3996 - val_acc: 0.8960 Epoch 110/200 5574/5574 [==============================] - 2s 342us/step - loss: 0.3420 - acc: 0.9144 - val_loss: 0.3951 - val_acc: 0.8973 Epoch 111/200 5574/5574 [==============================] - 2s 362us/step - loss: 0.3374 - acc: 0.9160 - val_loss: 0.3917 - val_acc: 0.8948 Epoch 112/200 5574/5574 [==============================] - 2s 325us/step - loss: 0.3330 - acc: 0.9173 - val_loss: 0.3864 - val_acc: 0.9007 Epoch 113/200 5574/5574 [==============================] - 1s 246us/step - loss: 0.3286 - acc: 0.9191 - val_loss: 0.3824 - val_acc: 0.8986 Epoch 114/200 5574/5574 [==============================] - 1s 222us/step - loss: 0.3242 - acc: 0.9194 - val_loss: 0.3783 - val_acc: 0.8986 Epoch 115/200 5574/5574 [==============================] - 2s 373us/step - loss: 0.3199 - acc: 0.9209 - val_loss: 0.3747 - val_acc: 0.9011 Epoch 116/200 5574/5574 [==============================] - 2s 301us/step - loss: 0.3157 - acc: 0.9216 - val_loss: 0.3699 - val_acc: 0.9057 Epoch 117/200 5574/5574 [==============================] - 1s 260us/step - loss: 0.3116 - acc: 0.9238 - val_loss: 0.3668 - val_acc: 0.9032 Epoch 118/200 5574/5574 [==============================] - 2s 288us/step - loss: 0.3077 - acc: 0.9247 - val_loss: 0.3629 - val_acc: 0.9040 Epoch 119/200 5574/5574 [==============================] - 1s 192us/step - loss: 0.3039 - acc: 0.9241 - val_loss: 0.3594 - val_acc: 0.9036 Epoch 120/200 5574/5574 [==============================] - 1s 221us/step - loss: 0.2998 - acc: 0.9268 - val_loss: 0.3562 - val_acc: 0.9045 Epoch 121/200 5574/5574 [==============================] - 1s 200us/step - loss: 0.2963 - acc: 0.9257 - val_loss: 0.3521 - val_acc: 0.9070 Epoch 122/200 5574/5574 [==============================] - 1s 184us/step - loss: 0.2924 - acc: 0.9277 - val_loss: 0.3486 - val_acc: 0.9082 Epoch 123/200 5574/5574 [==============================] - 1s 183us/step - loss: 0.2889 - acc: 0.9290 - val_loss: 0.3455 - val_acc: 0.9091 Epoch 124/200 5574/5574 [==============================] - 1s 183us/step - loss: 0.2852 - acc: 0.9309 - val_loss: 0.3422 - val_acc: 0.9095 Epoch 125/200 5574/5574 [==============================] - 1s 190us/step - loss: 0.2818 - acc: 0.9299 - val_loss: 0.3392 - val_acc: 0.9091 Epoch 126/200 5574/5574 [==============================] - 1s 190us/step - loss: 0.2784 - acc: 0.9315 - val_loss: 0.3354 - val_acc: 0.9104 Epoch 127/200 5574/5574 [==============================] - 1s 193us/step - loss: 0.2749 - acc: 0.9324 - val_loss: 0.3334 - val_acc: 0.9095 Epoch 128/200 5574/5574 [==============================] - 1s 203us/step - loss: 0.2717 - acc: 0.9333 - val_loss: 0.3304 - val_acc: 0.9099 Epoch 129/200 5574/5574 [==============================] - 1s 206us/step - loss: 0.2686 - acc: 0.9336 - val_loss: 0.3279 - val_acc: 0.9099 Epoch 130/200 5574/5574 [==============================] - 1s 183us/step - loss: 0.2654 - acc: 0.9351 - val_loss: 0.3250 - val_acc: 0.9104 Epoch 131/200 5574/5574 [==============================] - 1s 181us/step - loss: 0.2622 - acc: 0.9354 - val_loss: 0.3224 - val_acc: 0.9120 Epoch 132/200 5574/5574 [==============================] - 1s 182us/step - loss: 0.2592 - acc: 0.9351 - val_loss: 0.3193 - val_acc: 0.9112 Epoch 133/200 5574/5574 [==============================] - 1s 187us/step - loss: 0.2563 - acc: 0.9365 - val_loss: 0.3167 - val_acc: 0.9120 Epoch 134/200 5574/5574 [==============================] - 1s 182us/step - loss: 0.2534 - acc: 0.9374 - val_loss: 0.3144 - val_acc: 0.9116 Epoch 135/200 5574/5574 [==============================] - 1s 179us/step - loss: 0.2505 - acc: 0.9370 - val_loss: 0.3112 - val_acc: 0.9158 Epoch 136/200 5574/5574 [==============================] - 1s 180us/step - loss: 0.2477 - acc: 0.9374 - val_loss: 0.3092 - val_acc: 0.9162 Epoch 137/200 5574/5574 [==============================] - 1s 181us/step - loss: 0.2449 - acc: 0.9388 - val_loss: 0.3064 - val_acc: 0.9162 Epoch 138/200 5574/5574 [==============================] - 1s 180us/step - loss: 0.2423 - acc: 0.9390 - val_loss: 0.3045 - val_acc: 0.9184 Epoch 139/200 5574/5574 [==============================] - 1s 200us/step - loss: 0.2395 - acc: 0.9390 - val_loss: 0.3022 - val_acc: 0.9179 Epoch 140/200 5574/5574 [==============================] - 1s 202us/step - loss: 0.2368 - acc: 0.9401 - val_loss: 0.2997 - val_acc: 0.9184 Epoch 141/200 5574/5574 [==============================] - 1s 239us/step - loss: 0.2344 - acc: 0.9403 - val_loss: 0.2972 - val_acc: 0.9205 Epoch 142/200 5574/5574 [==============================] - 1s 263us/step - loss: 0.2320 - acc: 0.9406 - val_loss: 0.2957 - val_acc: 0.9192 Epoch 143/200 5574/5574 [==============================] - 2s 292us/step - loss: 0.2295 - acc: 0.9415 - val_loss: 0.2941 - val_acc: 0.9179 Epoch 144/200 5574/5574 [==============================] - 2s 301us/step - loss: 0.2271 - acc: 0.9412 - val_loss: 0.2923 - val_acc: 0.9179 Epoch 145/200 5574/5574 [==============================] - 2s 279us/step - loss: 0.2248 - acc: 0.9421 - val_loss: 0.2899 - val_acc: 0.9196 Epoch 146/200 5574/5574 [==============================] - 2s 297us/step - loss: 0.2225 - acc: 0.9419 - val_loss: 0.2873 - val_acc: 0.9226 Epoch 147/200 5574/5574 [==============================] - 2s 315us/step - loss: 0.2202 - acc: 0.9419 - val_loss: 0.2855 - val_acc: 0.9217: 1s - loss: Epoch 148/200 5574/5574 [==============================] - 1s 198us/step - loss: 0.2176 - acc: 0.9428 - val_loss: 0.2845 - val_acc: 0.9209 Epoch 149/200 5574/5574 [==============================] - 1s 207us/step - loss: 0.2157 - acc: 0.9428 - val_loss: 0.2825 - val_acc: 0.9213 Epoch 150/200 5574/5574 [==============================] - 1s 225us/step - loss: 0.2135 - acc: 0.9429 - val_loss: 0.2806 - val_acc: 0.9200 Epoch 151/200 5574/5574 [==============================] - 2s 275us/step - loss: 0.2113 - acc: 0.9429 - val_loss: 0.2785 - val_acc: 0.9226 Epoch 152/200 5574/5574 [==============================] - 2s 310us/step - loss: 0.2093 - acc: 0.9440 - val_loss: 0.2770 - val_acc: 0.9221 Epoch 153/200 5574/5574 [==============================] - 1s 208us/step - loss: 0.2072 - acc: 0.9440 - val_loss: 0.2746 - val_acc: 0.9230 Epoch 154/200 5574/5574 [==============================] - 1s 186us/step - loss: 0.2051 - acc: 0.9444 - val_loss: 0.2738 - val_acc: 0.9226 Epoch 155/200 5574/5574 [==============================] - 1s 215us/step - loss: 0.2031 - acc: 0.9447 - val_loss: 0.2717 - val_acc: 0.9230 Epoch 156/200 5574/5574 [==============================] - 2s 309us/step - loss: 0.2010 - acc: 0.9447 - val_loss: 0.2707 - val_acc: 0.9230 Epoch 157/200 5574/5574 [==============================] - 2s 298us/step - loss: 0.1992 - acc: 0.9455 - val_loss: 0.2696 - val_acc: 0.9230 Epoch 158/200 5574/5574 [==============================] - 2s 321us/step - loss: 0.1973 - acc: 0.9455 - val_loss: 0.2679 - val_acc: 0.9226 Epoch 159/200 5574/5574 [==============================] - 2s 321us/step - loss: 0.1953 - acc: 0.9455 - val_loss: 0.2663 - val_acc: 0.9238 Epoch 160/200 5574/5574 [==============================] - 2s 291us/step - loss: 0.1935 - acc: 0.9473 - val_loss: 0.2645 - val_acc: 0.9238 Epoch 161/200 5574/5574 [==============================] - 2s 320us/step - loss: 0.1914 - acc: 0.9482 - val_loss: 0.2627 - val_acc: 0.9242 Epoch 162/200 5574/5574 [==============================] - 2s 275us/step - loss: 0.1899 - acc: 0.9480 - val_loss: 0.2611 - val_acc: 0.9238 Epoch 163/200 5574/5574 [==============================] - 1s 229us/step - loss: 0.1880 - acc: 0.9490 - val_loss: 0.2599 - val_acc: 0.9242 Epoch 164/200 5574/5574 [==============================] - 1s 180us/step - loss: 0.1863 - acc: 0.9494 - val_loss: 0.2586 - val_acc: 0.9242 Epoch 165/200 5574/5574 [==============================] - 1s 186us/step - loss: 0.1845 - acc: 0.9494 - val_loss: 0.2578 - val_acc: 0.9242 Epoch 166/200 5574/5574 [==============================] - 1s 249us/step - loss: 0.1828 - acc: 0.9494 - val_loss: 0.2564 - val_acc: 0.9238 Epoch 167/200 5574/5574 [==============================] - 1s 259us/step - loss: 0.1811 - acc: 0.9499 - val_loss: 0.2550 - val_acc: 0.9247 Epoch 168/200 5574/5574 [==============================] - 1s 249us/step - loss: 0.1796 - acc: 0.9507 - val_loss: 0.2537 - val_acc: 0.9238 Epoch 169/200 5574/5574 [==============================] - 1s 254us/step - loss: 0.1777 - acc: 0.9510 - val_loss: 0.2526 - val_acc: 0.9247 Epoch 170/200 5574/5574 [==============================] - 1s 186us/step - loss: 0.1763 - acc: 0.9516 - val_loss: 0.2517 - val_acc: 0.9247 Epoch 171/200 5574/5574 [==============================] - 1s 183us/step - loss: 0.1746 - acc: 0.9519 - val_loss: 0.2510 - val_acc: 0.9251 Epoch 172/200 5574/5574 [==============================] - 1s 221us/step - loss: 0.1732 - acc: 0.9512 - val_loss: 0.2482 - val_acc: 0.9247 Epoch 173/200 5574/5574 [==============================] - 2s 308us/step - loss: 0.1714 - acc: 0.9526 - val_loss: 0.2480 - val_acc: 0.9259 Epoch 174/200 5574/5574 [==============================] - 2s 284us/step - loss: 0.1700 - acc: 0.9516 - val_loss: 0.2467 - val_acc: 0.9255 Epoch 175/200 5574/5574 [==============================] - 1s 196us/step - loss: 0.1684 - acc: 0.9528 - val_loss: 0.2454 - val_acc: 0.9259 Epoch 176/200 5574/5574 [==============================] - 1s 196us/step - loss: 0.1670 - acc: 0.9530 - val_loss: 0.2452 - val_acc: 0.9263 Epoch 177/200 5574/5574 [==============================] - 1s 203us/step - loss: 0.1656 - acc: 0.9543 - val_loss: 0.2437 - val_acc: 0.9251 Epoch 178/200 5574/5574 [==============================] - 2s 299us/step - loss: 0.1641 - acc: 0.9532 - val_loss: 0.2421 - val_acc: 0.9255 Epoch 179/200 5574/5574 [==============================] - 2s 311us/step - loss: 0.1627 - acc: 0.9548 - val_loss: 0.2412 - val_acc: 0.9255 Epoch 180/200 5574/5574 [==============================] - 2s 287us/step - loss: 0.1612 - acc: 0.9562 - val_loss: 0.2410 - val_acc: 0.9255 Epoch 181/200 5574/5574 [==============================] - 1s 194us/step - loss: 0.1598 - acc: 0.9543 - val_loss: 0.2397 - val_acc: 0.9263 Epoch 182/200 5574/5574 [==============================] - 1s 190us/step - loss: 0.1584 - acc: 0.9553 - val_loss: 0.2380 - val_acc: 0.9259 Epoch 183/200 5574/5574 [==============================] - 2s 312us/step - loss: 0.1572 - acc: 0.9562 - val_loss: 0.2378 - val_acc: 0.9268 Epoch 184/200 5574/5574 [==============================] - 2s 282us/step - loss: 0.1558 - acc: 0.9555 - val_loss: 0.2375 - val_acc: 0.9259 Epoch 185/200 5574/5574 [==============================] - 2s 273us/step - loss: 0.1545 - acc: 0.9566 - val_loss: 0.2360 - val_acc: 0.9268 Epoch 186/200 5574/5574 [==============================] - 2s 307us/step - loss: 0.1531 - acc: 0.9566 - val_loss: 0.2350 - val_acc: 0.9263 Epoch 187/200 5574/5574 [==============================] - 2s 307us/step - loss: 0.1519 - acc: 0.9569 - val_loss: 0.2345 - val_acc: 0.9259 Epoch 188/200 5574/5574 [==============================] - 2s 298us/step - loss: 0.1507 - acc: 0.9571 - val_loss: 0.2336 - val_acc: 0.9263 Epoch 189/200 5574/5574 [==============================] - 2s 282us/step - loss: 0.1493 - acc: 0.9577 - val_loss: 0.2327 - val_acc: 0.9263 Epoch 190/200 5574/5574 [==============================] - 2s 289us/step - loss: 0.1481 - acc: 0.9578 - val_loss: 0.2315 - val_acc: 0.9263 Epoch 191/200 5574/5574 [==============================] - 1s 241us/step - loss: 0.1468 - acc: 0.9580 - val_loss: 0.2315 - val_acc: 0.9259 Epoch 192/200 5574/5574 [==============================] - 1s 219us/step - loss: 0.1456 - acc: 0.9584 - val_loss: 0.2292 - val_acc: 0.9276 Epoch 193/200 5574/5574 [==============================] - 2s 308us/step - loss: 0.1444 - acc: 0.9589 - val_loss: 0.2293 - val_acc: 0.9263 Epoch 194/200 5574/5574 [==============================] - 1s 203us/step - loss: 0.1433 - acc: 0.9589 - val_loss: 0.2280 - val_acc: 0.9268 Epoch 195/200 5574/5574 [==============================] - 1s 264us/step - loss: 0.1420 - acc: 0.9595 - val_loss: 0.2273 - val_acc: 0.9285 Epoch 196/200 5574/5574 [==============================] - 1s 211us/step - loss: 0.1409 - acc: 0.9600 - val_loss: 0.2267 - val_acc: 0.9285 Epoch 197/200 5574/5574 [==============================] - 1s 242us/step - loss: 0.1398 - acc: 0.9598 - val_loss: 0.2256 - val_acc: 0.9289 Epoch 198/200 5574/5574 [==============================] - 1s 210us/step - loss: 0.1386 - acc: 0.9609 - val_loss: 0.2254 - val_acc: 0.9293 Epoch 199/200 5574/5574 [==============================] - 1s 205us/step - loss: 0.1375 - acc: 0.9611 - val_loss: 0.2243 - val_acc: 0.9289 Epoch 200/200 5574/5574 [==============================] - 1s 191us/step - loss: 0.1363 - acc: 0.9618 - val_loss: 0.2241 - val_acc: 0.9285
<tensorflow.python.keras.callbacks.History at 0x14d1a9c10>
nn_pred = model.predict_classes(test)
nnyhat = confusion_matrix(test_types, nn_pred)
print(nnyhat)
print("Accuracy: ", metrics.accuracy_score(r_test_types, nn_pred))
# for i in range(len(r_test_labels)):
# print("X=%s, Predicted=%s" % (r_test_labels[i], ynew[i]))
y_true = pd.Series(r_test_labels)
y_pred = pd.Series(nn_pred)
pd.crosstab(y_true, y_pred, rownames=['True'], colnames=['Predicted'], margins=True)
[[139 0 1 0 0 0 0 0 0 0 0 0 0 0 4 0 0] [ 0 123 0 0 1 7 1 0 0 0 0 0 0 0 0 0 0] [ 0 4 369 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 2 0 10 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 2 0 0 145 5 0 0 0 0 0 0 2 0 0 0 0] [ 0 24 0 0 6 33 1 0 0 0 0 0 2 0 0 0 0] [ 0 3 0 0 0 5 144 0 0 0 0 0 3 1 1 0 0] [ 0 0 0 0 0 0 0 30 0 0 0 0 0 0 0 0 0] [ 0 0 2 0 0 0 0 0 176 0 0 0 0 0 0 0 0] [ 0 0 0 1 0 0 0 0 0 116 1 0 1 0 0 1 0] [ 0 3 4 0 1 1 0 0 0 0 159 0 3 0 2 0 0] [ 0 0 2 0 0 0 0 0 0 0 0 168 0 0 0 0 0] [ 0 0 0 0 7 7 2 0 0 0 1 0 126 0 0 3 0] [ 0 0 0 0 0 0 0 1 0 0 3 0 0 165 0 0 0] [ 0 1 3 0 0 0 0 0 0 0 1 0 2 0 172 0 0] [ 0 0 0 0 0 0 1 0 1 0 0 0 6 0 0 42 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 122]] ('Accuracy: ', 0.9423400673400674)
Predicted | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | All |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
True | ||||||||||||||||||
blca | 0 | 123 | 0 | 0 | 1 | 7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 132 |
brca | 0 | 4 | 369 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 373 |
chol | 0 | 2 | 0 | 10 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
coad | 0 | 2 | 0 | 0 | 145 | 5 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 154 |
esca | 0 | 24 | 0 | 0 | 6 | 33 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 66 |
hnsc | 0 | 3 | 0 | 0 | 0 | 5 | 144 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 1 | 0 | 0 | 157 |
kich | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 30 |
kirc | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 178 |
lich | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 116 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 120 |
luad | 0 | 3 | 4 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 159 | 0 | 3 | 0 | 2 | 0 | 0 | 173 |
ov | 139 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 0 | 144 |
paad | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 6 | 0 | 0 | 42 | 0 | 50 |
prad | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 168 | 0 | 0 | 0 | 0 | 0 | 170 |
skcm | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 122 | 123 |
stad | 0 | 0 | 0 | 0 | 7 | 7 | 2 | 0 | 0 | 0 | 1 | 0 | 126 | 0 | 0 | 3 | 0 | 146 |
thca | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 | 0 | 0 | 165 | 0 | 0 | 0 | 169 |
ucec | 0 | 1 | 3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 | 0 | 172 | 0 | 0 | 179 |
All | 139 | 162 | 381 | 11 | 160 | 58 | 149 | 31 | 177 | 116 | 165 | 168 | 145 | 166 | 179 | 47 | 122 | 2376 |
model.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_15 (Dense) (None, 128) 307712 _________________________________________________________________ dense_16 (Dense) (None, 128) 16512 _________________________________________________________________ dense_17 (Dense) (None, 17) 2193 ================================================================= Total params: 326,417 Trainable params: 326,417 Non-trainable params: 0 _________________________________________________________________
print(classification_report(test_types, nn_pred))
print(accuracy_score(test_types, nn_pred, normalize=True, sample_weight=None))
precision recall f1-score support 0 0.99 0.97 0.98 144 1 0.78 0.94 0.86 132 2 0.96 1.00 0.98 373 3 1.00 0.83 0.91 12 4 0.90 0.94 0.92 154 5 0.66 0.53 0.59 66 6 0.96 0.95 0.96 157 7 1.00 0.97 0.98 30 8 0.98 0.99 0.99 178 9 1.00 0.97 0.99 120 10 0.97 0.93 0.95 173 11 1.00 0.98 0.99 170 12 0.89 0.87 0.88 146 13 1.00 0.99 0.99 169 14 0.97 0.96 0.96 179 15 0.93 0.86 0.90 50 16 0.99 0.98 0.99 123 micro avg 0.95 0.95 0.95 2376 macro avg 0.94 0.92 0.93 2376 weighted avg 0.95 0.95 0.95 2376 0.9482323232323232
print(classification_report(test_types, nn_pred))
print(accuracy_score(test_types, nn_pred, normalize=True, sample_weight=None))
precision recall f1-score support 0 1.00 0.97 0.98 144 1 0.76 0.93 0.84 132 2 0.97 0.99 0.98 373 3 0.91 0.83 0.87 12 4 0.91 0.94 0.92 154 5 0.57 0.50 0.53 66 6 0.97 0.92 0.94 157 7 0.97 1.00 0.98 30 8 0.99 0.99 0.99 178 9 1.00 0.97 0.98 120 10 0.96 0.92 0.94 173 11 1.00 0.99 0.99 170 12 0.87 0.86 0.87 146 13 0.99 0.98 0.99 169 14 0.96 0.96 0.96 179 15 0.89 0.84 0.87 50 16 1.00 0.99 1.00 123 micro avg 0.94 0.94 0.94 2376 macro avg 0.92 0.92 0.92 2376 weighted avg 0.94 0.94 0.94 2376 0.9423400673400674
model = tf.keras.Sequential()
print(train.shape)
#model.add(layers.InputLayer(input_tensor=train, input_shape=train.shape))
model.add(layers.Dense(128, activation='sigmoid', input_dim=2403))
#model.add(layers.Dense(128, activation='sigmoid'))
#model.add(layers.Dense(128, activation='sigmoid'))
model.add(layers.Dense(128, activation='sigmoid'))
model.add(layers.Dense(17, activation='softmax'))
model.compile(optimizer=tf.train.RMSPropOptimizer(5.1794746792312125e-05),
loss='categorical_crossentropy',
metrics=['accuracy'])
(5574, 2403)
model.fit(train, encoded_train, validation_data=(test, encoded_test), epochs=200, batch_size=32)
Train on 5574 samples, validate on 2376 samples Epoch 1/200 5574/5574 [==============================] - 2s 311us/step - loss: 2.9108 - acc: 0.0646 - val_loss: 2.7692 - val_acc: 0.1578 Epoch 2/200 5574/5574 [==============================] - 1s 206us/step - loss: 2.6696 - acc: 0.1502 - val_loss: 2.5982 - val_acc: 0.1574 Epoch 3/200 5574/5574 [==============================] - 1s 158us/step - loss: 2.5494 - acc: 0.1620 - val_loss: 2.5057 - val_acc: 0.1936 Epoch 4/200 5574/5574 [==============================] - 1s 160us/step - loss: 2.4710 - acc: 0.2269 - val_loss: 2.4383 - val_acc: 0.2744 Epoch 5/200 5574/5574 [==============================] - 1s 158us/step - loss: 2.4032 - acc: 0.2784 - val_loss: 2.3725 - val_acc: 0.2997 Epoch 6/200 5574/5574 [==============================] - 1s 157us/step - loss: 2.3370 - acc: 0.2982 - val_loss: 2.3110 - val_acc: 0.3228 Epoch 7/200 5574/5574 [==============================] - 1s 159us/step - loss: 2.2743 - acc: 0.3204 - val_loss: 2.2530 - val_acc: 0.3350 Epoch 8/200 5574/5574 [==============================] - 1s 158us/step - loss: 2.2148 - acc: 0.3428 - val_loss: 2.1938 - val_acc: 0.3468 Epoch 9/200 5574/5574 [==============================] - 1s 158us/step - loss: 2.1573 - acc: 0.3590 - val_loss: 2.1400 - val_acc: 0.3775 Epoch 10/200 5574/5574 [==============================] - 1s 158us/step - loss: 2.1038 - acc: 0.3850 - val_loss: 2.0897 - val_acc: 0.4053 Epoch 11/200 5574/5574 [==============================] - 1s 155us/step - loss: 2.0528 - acc: 0.4123 - val_loss: 2.0400 - val_acc: 0.4137 Epoch 12/200 5574/5574 [==============================] - 1s 158us/step - loss: 2.0036 - acc: 0.4309 - val_loss: 1.9905 - val_acc: 0.4390 Epoch 13/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.9563 - acc: 0.4519 - val_loss: 1.9450 - val_acc: 0.4545 Epoch 14/200 5574/5574 [==============================] - 1s 156us/step - loss: 1.9116 - acc: 0.4700 - val_loss: 1.9043 - val_acc: 0.4621 Epoch 15/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.8695 - acc: 0.4787 - val_loss: 1.8636 - val_acc: 0.4844 Epoch 16/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.8314 - acc: 0.4937 - val_loss: 1.8284 - val_acc: 0.4920 Epoch 17/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.7946 - acc: 0.5077 - val_loss: 1.7914 - val_acc: 0.5042 Epoch 18/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.7605 - acc: 0.5201 - val_loss: 1.7566 - val_acc: 0.5093 Epoch 19/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.7267 - acc: 0.5337 - val_loss: 1.7250 - val_acc: 0.5181 Epoch 20/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.6960 - acc: 0.5368 - val_loss: 1.6971 - val_acc: 0.5366 Epoch 21/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.6661 - acc: 0.5513 - val_loss: 1.6682 - val_acc: 0.5429 Epoch 22/200 5574/5574 [==============================] - 1s 157us/step - loss: 1.6366 - acc: 0.5590 - val_loss: 1.6400 - val_acc: 0.5551 Epoch 23/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.6086 - acc: 0.5633 - val_loss: 1.6136 - val_acc: 0.5627 Epoch 24/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.5818 - acc: 0.5721 - val_loss: 1.5861 - val_acc: 0.5686 Epoch 25/200 5574/5574 [==============================] - 1s 157us/step - loss: 1.5563 - acc: 0.5780 - val_loss: 1.5619 - val_acc: 0.5842 Epoch 26/200 5574/5574 [==============================] - 1s 157us/step - loss: 1.5324 - acc: 0.5861 - val_loss: 1.5418 - val_acc: 0.5880 Epoch 27/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.5108 - acc: 0.5883 - val_loss: 1.5180 - val_acc: 0.5930 Epoch 28/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.4888 - acc: 0.5949 - val_loss: 1.4993 - val_acc: 0.5913 Epoch 29/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.4687 - acc: 0.5992 - val_loss: 1.4763 - val_acc: 0.5997 Epoch 30/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.4460 - acc: 0.6042 - val_loss: 1.4563 - val_acc: 0.6002 Epoch 31/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.4265 - acc: 0.6078 - val_loss: 1.4370 - val_acc: 0.6065 Epoch 32/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.4088 - acc: 0.6116 - val_loss: 1.4209 - val_acc: 0.6111 Epoch 33/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.3914 - acc: 0.6139 - val_loss: 1.4045 - val_acc: 0.6136 Epoch 34/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.3769 - acc: 0.6172 - val_loss: 1.3874 - val_acc: 0.6128 Epoch 35/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.3592 - acc: 0.6195 - val_loss: 1.3703 - val_acc: 0.6183 Epoch 36/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.3425 - acc: 0.6250 - val_loss: 1.3556 - val_acc: 0.6258 Epoch 37/200 5574/5574 [==============================] - 1s 156us/step - loss: 1.3248 - acc: 0.6331 - val_loss: 1.3399 - val_acc: 0.6254 Epoch 38/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.3141 - acc: 0.6301 - val_loss: 1.3292 - val_acc: 0.6237 Epoch 39/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.3026 - acc: 0.6304 - val_loss: 1.3178 - val_acc: 0.6279 Epoch 40/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.2883 - acc: 0.6329 - val_loss: 1.3037 - val_acc: 0.6313 Epoch 41/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.2718 - acc: 0.6380 - val_loss: 1.2888 - val_acc: 0.6317 Epoch 42/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.2600 - acc: 0.6401 - val_loss: 1.2751 - val_acc: 0.6359 Epoch 43/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.2462 - acc: 0.6457 - val_loss: 1.2641 - val_acc: 0.6338 Epoch 44/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.2369 - acc: 0.6448 - val_loss: 1.2505 - val_acc: 0.6406 Epoch 45/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.2246 - acc: 0.6485 - val_loss: 1.2406 - val_acc: 0.6469 Epoch 46/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.2092 - acc: 0.6514 - val_loss: 1.2264 - val_acc: 0.6448 Epoch 47/200 5574/5574 [==============================] - 1s 157us/step - loss: 1.1984 - acc: 0.6570 - val_loss: 1.2197 - val_acc: 0.6473 Epoch 48/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.1897 - acc: 0.6568 - val_loss: 1.2102 - val_acc: 0.6486 Epoch 49/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.1826 - acc: 0.6581 - val_loss: 1.2014 - val_acc: 0.6511 Epoch 50/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.1761 - acc: 0.6568 - val_loss: 1.1919 - val_acc: 0.6524 Epoch 51/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.1641 - acc: 0.6572 - val_loss: 1.1782 - val_acc: 0.6582 Epoch 52/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.1525 - acc: 0.6618 - val_loss: 1.1711 - val_acc: 0.6553 Epoch 53/200 5574/5574 [==============================] - 1s 172us/step - loss: 1.1426 - acc: 0.6688 - val_loss: 1.1599 - val_acc: 0.6599 Epoch 54/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.1311 - acc: 0.6713 - val_loss: 1.1525 - val_acc: 0.6599 Epoch 55/200 5574/5574 [==============================] - ETA: 0s - loss: 1.1240 - acc: 0.671 - 1s 158us/step - loss: 1.1242 - acc: 0.6710 - val_loss: 1.1486 - val_acc: 0.6604 Epoch 56/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.1193 - acc: 0.6719 - val_loss: 1.1409 - val_acc: 0.6633 Epoch 57/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.1129 - acc: 0.6769 - val_loss: 1.1358 - val_acc: 0.6625 Epoch 58/200 5574/5574 [==============================] - 1s 161us/step - loss: 1.1063 - acc: 0.6764 - val_loss: 1.1293 - val_acc: 0.6675 Epoch 59/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.0978 - acc: 0.6781 - val_loss: 1.1228 - val_acc: 0.6650 Epoch 60/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.0917 - acc: 0.6781 - val_loss: 1.1192 - val_acc: 0.6671 Epoch 61/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.0843 - acc: 0.6807 - val_loss: 1.1102 - val_acc: 0.6709 Epoch 62/200 5574/5574 [==============================] - 1s 159us/step - loss: 1.0773 - acc: 0.6864 - val_loss: 1.0999 - val_acc: 0.6734 Epoch 63/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.0694 - acc: 0.6868 - val_loss: 1.0976 - val_acc: 0.6742 Epoch 64/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.0663 - acc: 0.6875 - val_loss: 1.0883 - val_acc: 0.6738 Epoch 65/200 5574/5574 [==============================] - 1s 158us/step - loss: 1.0599 - acc: 0.6871 - val_loss: 1.0819 - val_acc: 0.6785 Epoch 66/200 5574/5574 [==============================] - 1s 160us/step - loss: 1.0559 - acc: 0.6907 - val_loss: 1.0777 - val_acc: 0.6797 Epoch 67/200 5574/5574 [==============================] - 1s 166us/step - loss: 1.0479 - acc: 0.6921 - val_loss: 1.0683 - val_acc: 0.6818 Epoch 68/200 5574/5574 [==============================] - 1s 180us/step - loss: 1.0409 - acc: 0.6973 - val_loss: 1.0651 - val_acc: 0.6852 Epoch 69/200 5574/5574 [==============================] - 1s 169us/step - loss: 1.0348 - acc: 0.6990 - val_loss: 1.0573 - val_acc: 0.6860 Epoch 70/200 5574/5574 [==============================] - 1s 162us/step - loss: 1.0285 - acc: 0.6990 - val_loss: 1.0505 - val_acc: 0.6932 Epoch 71/200 5574/5574 [==============================] - 1s 175us/step - loss: 1.0230 - acc: 0.7017 - val_loss: 1.0433 - val_acc: 0.6902 Epoch 72/200 5574/5574 [==============================] - 1s 164us/step - loss: 1.0175 - acc: 0.7070 - val_loss: 1.0382 - val_acc: 0.6932 Epoch 73/200 5574/5574 [==============================] - 1s 161us/step - loss: 1.0110 - acc: 0.7106 - val_loss: 1.0326 - val_acc: 0.6949 Epoch 74/200 5574/5574 [==============================] - 1s 161us/step - loss: 1.0076 - acc: 0.7094 - val_loss: 1.0290 - val_acc: 0.6944 Epoch 75/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9980 - acc: 0.7124 - val_loss: 1.0237 - val_acc: 0.6940 Epoch 76/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9927 - acc: 0.7131 - val_loss: 1.0159 - val_acc: 0.6978 Epoch 77/200 5574/5574 [==============================] - 1s 164us/step - loss: 0.9839 - acc: 0.7151 - val_loss: 1.0112 - val_acc: 0.6923 Epoch 78/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.9856 - acc: 0.7130 - val_loss: 1.0073 - val_acc: 0.6982 Epoch 79/200 5574/5574 [==============================] - 1s 162us/step - loss: 0.9805 - acc: 0.7158 - val_loss: 1.0040 - val_acc: 0.7033 Epoch 80/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9767 - acc: 0.7142 - val_loss: 1.0000 - val_acc: 0.7050 Epoch 81/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.9661 - acc: 0.7137 - val_loss: 0.9884 - val_acc: 0.7062 Epoch 82/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.9601 - acc: 0.7178 - val_loss: 0.9877 - val_acc: 0.7071 Epoch 83/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.9545 - acc: 0.7180 - val_loss: 0.9844 - val_acc: 0.7050 Epoch 84/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9486 - acc: 0.7182 - val_loss: 0.9775 - val_acc: 0.7100 Epoch 85/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.9484 - acc: 0.7174 - val_loss: 0.9782 - val_acc: 0.7075 Epoch 86/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.9449 - acc: 0.7201 - val_loss: 0.9745 - val_acc: 0.7054 Epoch 87/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9396 - acc: 0.7191 - val_loss: 0.9723 - val_acc: 0.7083 Epoch 88/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.9402 - acc: 0.7187 - val_loss: 0.9687 - val_acc: 0.7100 Epoch 89/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.9439 - acc: 0.7131 - val_loss: 0.9674 - val_acc: 0.7113 Epoch 90/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9372 - acc: 0.7173 - val_loss: 0.9646 - val_acc: 0.7125 Epoch 91/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.9318 - acc: 0.7194 - val_loss: 0.9530 - val_acc: 0.7205 Epoch 92/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9232 - acc: 0.7207 - val_loss: 0.9476 - val_acc: 0.7163 Epoch 93/200 5574/5574 [==============================] - 1s 157us/step - loss: 0.9168 - acc: 0.7232 - val_loss: 0.9464 - val_acc: 0.7184 Epoch 94/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9130 - acc: 0.7230 - val_loss: 0.9430 - val_acc: 0.7155 Epoch 95/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9104 - acc: 0.7246 - val_loss: 0.9405 - val_acc: 0.7138 Epoch 96/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.9066 - acc: 0.7286 - val_loss: 0.9406 - val_acc: 0.7151 Epoch 97/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9026 - acc: 0.7259 - val_loss: 0.9370 - val_acc: 0.7134 Epoch 98/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.9001 - acc: 0.7273 - val_loss: 0.9352 - val_acc: 0.7117 Epoch 99/200 5574/5574 [==============================] - 1s 162us/step - loss: 0.8978 - acc: 0.7277 - val_loss: 0.9276 - val_acc: 0.7146 Epoch 100/200 5574/5574 [==============================] - 1s 165us/step - loss: 0.8924 - acc: 0.7278 - val_loss: 0.9241 - val_acc: 0.7151 Epoch 101/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.8905 - acc: 0.7271 - val_loss: 0.9262 - val_acc: 0.7159 Epoch 102/200 5574/5574 [==============================] - 1s 167us/step - loss: 0.8889 - acc: 0.7302 - val_loss: 0.9211 - val_acc: 0.7159 Epoch 103/200 5574/5574 [==============================] - 1s 164us/step - loss: 0.8851 - acc: 0.7282 - val_loss: 0.9173 - val_acc: 0.7201 Epoch 104/200 5574/5574 [==============================] - 1s 163us/step - loss: 0.8793 - acc: 0.7314 - val_loss: 0.9157 - val_acc: 0.7168 Epoch 105/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8772 - acc: 0.7336 - val_loss: 0.9110 - val_acc: 0.7231 Epoch 106/200 5574/5574 [==============================] - 1s 162us/step - loss: 0.8759 - acc: 0.7336 - val_loss: 0.9070 - val_acc: 0.7210 Epoch 107/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8711 - acc: 0.7350 - val_loss: 0.9075 - val_acc: 0.7247 Epoch 108/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8698 - acc: 0.7347 - val_loss: 0.9076 - val_acc: 0.7235 Epoch 109/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.8638 - acc: 0.7352 - val_loss: 0.9051 - val_acc: 0.7193 Epoch 110/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.8620 - acc: 0.7345 - val_loss: 0.9022 - val_acc: 0.7222 Epoch 111/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8602 - acc: 0.7366 - val_loss: 0.8932 - val_acc: 0.7273 Epoch 112/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.8568 - acc: 0.7390 - val_loss: 0.8911 - val_acc: 0.7306 Epoch 113/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.8591 - acc: 0.7399 - val_loss: 0.8899 - val_acc: 0.7269 Epoch 114/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.8590 - acc: 0.7400 - val_loss: 0.8927 - val_acc: 0.7252 Epoch 115/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8589 - acc: 0.7400 - val_loss: 0.8880 - val_acc: 0.7340 Epoch 116/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8563 - acc: 0.7418 - val_loss: 0.8895 - val_acc: 0.7315 Epoch 117/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.8523 - acc: 0.7399 - val_loss: 0.8890 - val_acc: 0.7285 Epoch 118/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8505 - acc: 0.7422 - val_loss: 0.8869 - val_acc: 0.7285 Epoch 119/200 5574/5574 [==============================] - 1s 157us/step - loss: 0.8507 - acc: 0.7427 - val_loss: 0.8806 - val_acc: 0.7336 Epoch 120/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8429 - acc: 0.7452 - val_loss: 0.8744 - val_acc: 0.7353 Epoch 121/200 5574/5574 [==============================] - 1s 157us/step - loss: 0.8390 - acc: 0.7460 - val_loss: 0.8733 - val_acc: 0.7336 Epoch 122/200 5574/5574 [==============================] - 1s 162us/step - loss: 0.8346 - acc: 0.7504 - val_loss: 0.8728 - val_acc: 0.7336 Epoch 123/200 5574/5574 [==============================] - 1s 166us/step - loss: 0.8349 - acc: 0.7481 - val_loss: 0.8689 - val_acc: 0.7344 Epoch 124/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.8323 - acc: 0.7487 - val_loss: 0.8649 - val_acc: 0.7386 Epoch 125/200 5574/5574 [==============================] - 1s 163us/step - loss: 0.8275 - acc: 0.7519 - val_loss: 0.8664 - val_acc: 0.7370 Epoch 126/200 5574/5574 [==============================] - 1s 164us/step - loss: 0.8284 - acc: 0.7499 - val_loss: 0.8627 - val_acc: 0.7374 Epoch 127/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.8226 - acc: 0.7476 - val_loss: 0.8652 - val_acc: 0.7273 Epoch 128/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.8189 - acc: 0.7474 - val_loss: 0.8606 - val_acc: 0.7315 Epoch 129/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.8145 - acc: 0.7504 - val_loss: 0.8591 - val_acc: 0.7332 Epoch 130/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8124 - acc: 0.7519 - val_loss: 0.8511 - val_acc: 0.7332 Epoch 131/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.8097 - acc: 0.7522 - val_loss: 0.8480 - val_acc: 0.7348 Epoch 132/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.8069 - acc: 0.7522 - val_loss: 0.8472 - val_acc: 0.7374 Epoch 133/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.8049 - acc: 0.7555 - val_loss: 0.8457 - val_acc: 0.7407 Epoch 134/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8057 - acc: 0.7539 - val_loss: 0.8407 - val_acc: 0.7412 Epoch 135/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.8005 - acc: 0.7557 - val_loss: 0.8451 - val_acc: 0.7353 Epoch 136/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7990 - acc: 0.7578 - val_loss: 0.8446 - val_acc: 0.7332 Epoch 137/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.7969 - acc: 0.7573 - val_loss: 0.8331 - val_acc: 0.7361 Epoch 138/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7970 - acc: 0.7569 - val_loss: 0.8352 - val_acc: 0.7370 Epoch 139/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.7923 - acc: 0.7594 - val_loss: 0.8332 - val_acc: 0.7407 Epoch 140/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7924 - acc: 0.7571 - val_loss: 0.8336 - val_acc: 0.7344 Epoch 141/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7870 - acc: 0.7605 - val_loss: 0.8298 - val_acc: 0.7378 Epoch 142/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7864 - acc: 0.7569 - val_loss: 0.8311 - val_acc: 0.7391 Epoch 143/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.7871 - acc: 0.7567 - val_loss: 0.8304 - val_acc: 0.7433 Epoch 144/200 5574/5574 [==============================] - 1s 163us/step - loss: 0.7867 - acc: 0.7583 - val_loss: 0.8294 - val_acc: 0.7433 Epoch 145/200 5574/5574 [==============================] - 1s 220us/step - loss: 0.7823 - acc: 0.7621 - val_loss: 0.8304 - val_acc: 0.7391 Epoch 146/200 5574/5574 [==============================] - 2s 422us/step - loss: 0.7806 - acc: 0.7634 - val_loss: 0.8230 - val_acc: 0.7479 Epoch 147/200 5574/5574 [==============================] - 1s 228us/step - loss: 0.7777 - acc: 0.7610 - val_loss: 0.8154 - val_acc: 0.7475 Epoch 148/200 5574/5574 [==============================] - 2s 292us/step - loss: 0.7751 - acc: 0.7623 - val_loss: 0.8084 - val_acc: 0.7504 Epoch 149/200 5574/5574 [==============================] - 1s 212us/step - loss: 0.7732 - acc: 0.7635 - val_loss: 0.8112 - val_acc: 0.7483 Epoch 150/200 5574/5574 [==============================] - 1s 197us/step - loss: 0.7760 - acc: 0.7628 - val_loss: 0.8133 - val_acc: 0.7420 Epoch 151/200 5574/5574 [==============================] - 1s 197us/step - loss: 0.7720 - acc: 0.7637 - val_loss: 0.8133 - val_acc: 0.7424 Epoch 152/200 5574/5574 [==============================] - 1s 165us/step - loss: 0.7686 - acc: 0.7630 - val_loss: 0.8135 - val_acc: 0.7424 Epoch 153/200 5574/5574 [==============================] - 1s 196us/step - loss: 0.7700 - acc: 0.7625 - val_loss: 0.8138 - val_acc: 0.7420 Epoch 154/200 5574/5574 [==============================] - 1s 249us/step - loss: 0.7710 - acc: 0.7594 - val_loss: 0.8134 - val_acc: 0.7424 Epoch 155/200 5574/5574 [==============================] - 1s 222us/step - loss: 0.7686 - acc: 0.7601 - val_loss: 0.8128 - val_acc: 0.7458 Epoch 156/200 5574/5574 [==============================] - 1s 203us/step - loss: 0.7654 - acc: 0.7635 - val_loss: 0.8083 - val_acc: 0.7462 Epoch 157/200 5574/5574 [==============================] - 1s 164us/step - loss: 0.7593 - acc: 0.7657 - val_loss: 0.8086 - val_acc: 0.7466 Epoch 158/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.7608 - acc: 0.7644 - val_loss: 0.8074 - val_acc: 0.7449 Epoch 159/200 5574/5574 [==============================] - 1s 164us/step - loss: 0.7590 - acc: 0.7675 - val_loss: 0.8088 - val_acc: 0.7445 Epoch 160/200 5574/5574 [==============================] - 1s 163us/step - loss: 0.7598 - acc: 0.7655 - val_loss: 0.8081 - val_acc: 0.7433 Epoch 161/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.7601 - acc: 0.7650 - val_loss: 0.8036 - val_acc: 0.7458 Epoch 162/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.7596 - acc: 0.7646 - val_loss: 0.8073 - val_acc: 0.7416 Epoch 163/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7577 - acc: 0.7666 - val_loss: 0.8060 - val_acc: 0.7391 Epoch 164/200 5574/5574 [==============================] - 1s 221us/step - loss: 0.7520 - acc: 0.7652 - val_loss: 0.7993 - val_acc: 0.7420 Epoch 165/200 5574/5574 [==============================] - 1s 182us/step - loss: 0.7521 - acc: 0.7679 - val_loss: 0.8015 - val_acc: 0.7504 Epoch 166/200 5574/5574 [==============================] - 1s 168us/step - loss: 0.7513 - acc: 0.7679 - val_loss: 0.8032 - val_acc: 0.7454 Epoch 167/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.7503 - acc: 0.7670 - val_loss: 0.8009 - val_acc: 0.7462 Epoch 168/200 5574/5574 [==============================] - 1s 161us/step - loss: 0.7489 - acc: 0.7657 - val_loss: 0.8024 - val_acc: 0.7496 Epoch 169/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.7502 - acc: 0.7677 - val_loss: 0.7998 - val_acc: 0.7479 Epoch 170/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7480 - acc: 0.7679 - val_loss: 0.7956 - val_acc: 0.7487 Epoch 171/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7449 - acc: 0.7686 - val_loss: 0.7971 - val_acc: 0.7487 Epoch 172/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.7436 - acc: 0.7689 - val_loss: 0.7962 - val_acc: 0.7504 Epoch 173/200 5574/5574 [==============================] - 1s 192us/step - loss: 0.7453 - acc: 0.7702 - val_loss: 0.7953 - val_acc: 0.7487 Epoch 174/200 5574/5574 [==============================] - 1s 165us/step - loss: 0.7424 - acc: 0.7695 - val_loss: 0.7979 - val_acc: 0.7462 Epoch 175/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7361 - acc: 0.7731 - val_loss: 0.7866 - val_acc: 0.7534 Epoch 176/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.7341 - acc: 0.7748 - val_loss: 0.7885 - val_acc: 0.7513 Epoch 177/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.7344 - acc: 0.7716 - val_loss: 0.7884 - val_acc: 0.7492 Epoch 178/200 5574/5574 [==============================] - 1s 166us/step - loss: 0.7364 - acc: 0.7716 - val_loss: 0.7910 - val_acc: 0.7462 Epoch 179/200 5574/5574 [==============================] - 1s 164us/step - loss: 0.7344 - acc: 0.7740 - val_loss: 0.7891 - val_acc: 0.7517 Epoch 180/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7330 - acc: 0.7756 - val_loss: 0.7933 - val_acc: 0.7471 Epoch 181/200 5574/5574 [==============================] - 1s 159us/step - loss: 0.7315 - acc: 0.7731 - val_loss: 0.7905 - val_acc: 0.7496 Epoch 182/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.7359 - acc: 0.7711 - val_loss: 0.7861 - val_acc: 0.7492 Epoch 183/200 5574/5574 [==============================] - 1s 158us/step - loss: 0.7306 - acc: 0.7752 - val_loss: 0.7880 - val_acc: 0.7479 Epoch 184/200 5574/5574 [==============================] - 1s 160us/step - loss: 0.7303 - acc: 0.7752 - val_loss: 0.7850 - val_acc: 0.7521 Epoch 185/200 5574/5574 [==============================] - ETA: 0s - loss: 0.7274 - acc: 0.776 - 1s 165us/step - loss: 0.7258 - acc: 0.7775 - val_loss: 0.7846 - val_acc: 0.7551 Epoch 186/200 5574/5574 [==============================] - 1s 207us/step - loss: 0.7236 - acc: 0.7836 - val_loss: 0.7883 - val_acc: 0.7508 Epoch 187/200 5574/5574 [==============================] - 1s 207us/step - loss: 0.7234 - acc: 0.7802 - val_loss: 0.7811 - val_acc: 0.7546 Epoch 188/200 5574/5574 [==============================] - 1s 174us/step - loss: 0.7224 - acc: 0.7817 - val_loss: 0.7828 - val_acc: 0.7517 Epoch 189/200 5574/5574 [==============================] - 1s 169us/step - loss: 0.7207 - acc: 0.7813 - val_loss: 0.7825 - val_acc: 0.7555 Epoch 190/200 5574/5574 [==============================] - 1s 177us/step - loss: 0.7259 - acc: 0.7777 - val_loss: 0.7835 - val_acc: 0.7529 Epoch 191/200 5574/5574 [==============================] - 1s 190us/step - loss: 0.7209 - acc: 0.7815 - val_loss: 0.7743 - val_acc: 0.7601 Epoch 192/200 5574/5574 [==============================] - 1s 205us/step - loss: 0.7166 - acc: 0.7838 - val_loss: 0.7747 - val_acc: 0.7563 Epoch 193/200 5574/5574 [==============================] - 1s 207us/step - loss: 0.7154 - acc: 0.7817 - val_loss: 0.7769 - val_acc: 0.7525 Epoch 194/200 5574/5574 [==============================] - 1s 182us/step - loss: 0.7156 - acc: 0.7795 - val_loss: 0.7720 - val_acc: 0.7559 Epoch 195/200 5574/5574 [==============================] - 1s 197us/step - loss: 0.7118 - acc: 0.7804 - val_loss: 0.7641 - val_acc: 0.7597 Epoch 196/200 5574/5574 [==============================] - 1s 205us/step - loss: 0.7136 - acc: 0.7820 - val_loss: 0.7681 - val_acc: 0.7551 Epoch 197/200 5574/5574 [==============================] - 1s 194us/step - loss: 0.7143 - acc: 0.7804 - val_loss: 0.7648 - val_acc: 0.7588 Epoch 198/200 5574/5574 [==============================] - 1s 204us/step - loss: 0.7118 - acc: 0.7813 - val_loss: 0.7596 - val_acc: 0.7593 Epoch 199/200 5574/5574 [==============================] - 1s 205us/step - loss: 0.7091 - acc: 0.7844 - val_loss: 0.7595 - val_acc: 0.7580 Epoch 200/200 5574/5574 [==============================] - 1s 207us/step - loss: 0.7071 - acc: 0.7824 - val_loss: 0.7602 - val_acc: 0.7601
<tensorflow.python.keras.callbacks.History at 0x10e314bd0>
# 3 sigmoid (+ input) and 1 output: 0.7441077441077442
nn_pred = model.predict_classes(test)
nnyhat = confusion_matrix(test_types, nn_pred)
print(nnyhat)
print("Accuracy: ", metrics.accuracy_score(r_test_types, nn_pred))
# for i in range(len(r_test_labels)):
# print("X=%s, Predicted=%s" % (r_test_labels[i], ynew[i]))
y_true = pd.Series(r_test_labels)
y_pred = pd.Series(nn_pred)
pd.crosstab(y_true, y_pred, rownames=['True'], colnames=['Predicted'], margins=True)
## Try cross validation
# Load libraries
import numpy as np
from tensorflow.keras import models
from tensorflow.keras import layers
from tensorflow.keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import cross_val_score
from sklearn.datasets import make_classification
# Create function returning a compiled network
def create_network():
model = tf.keras.Sequential()
model.add(layers.Dense(128, activation='sigmoid'))
model.add(layers.Dense(128, activation='sigmoid'))
model.add(layers.Dense(17, activation='softmax'))
model.compile(optimizer=tf.train.RMSPropOptimizer(5.1794746792312125e-05),
loss='categorical_crossentropy',
metrics=['accuracy'])
# Return compiled network
return model
# Wrap Keras model so it can be used by scikit-learn
neural_network = KerasClassifier(build_fn=create_network,
epochs=100,
batch_size=32,
verbose=0)
# Evaluate neural network using three-fold cross-validation
cross_val_score(neural_network, train, encoded_train, cv=3)
array([0.65285253, 0.66469322, 0.66953714])