I am trying to make a tf model that can train from a dataset like this:
f1 | f2 | ... | f30 | r1 |
---|---|---|---|---|
0.5 | 0.66 | ... | 0.11 | 1 |
0.3 | 0.7 | ... | 0.3 | 0 |
and return a percentage of how confident it is that an entry is 1 in r1. I saw a model used for classifying MNIST pictures from this notebook: https://github.com/luisroque/probabilistic-deep-learning/blob/main/workshops/Primer%20on%20Probabilistic%20Deep%20Learning%20-%20Modeling%20Aleatoric%20Uncertainty.ipynb and tried to adapt it for my own use. This is what I have so far:
import tensorflow as tf
import tensorflow_probability as tfp
import pandas as pd
from keras import Sequential
from keras.layers import Dense
from keras.optimizers import RMSprop
import tensorflow_datasets as tfds
tfd = tfp.distributions
tfpl = tfp.layers
def load_data(name):
dataset = pd.read_csv(name)
inputShape = len(dataset.columns)-1
x_train = dataset.loc[:, dataset.columns != 'is_fraud']
y_train = dataset['is_fraud'].values
x_test = dataset.loc[:, dataset.columns != 'is_fraud']
y_test = dataset['is_fraud'].values
return x_train, y_train, x_test, y_test, inputShape
def get_probabilistic_model(input_shape, loss, optimizer, metrics):
model = Sequential([
Dense(30, input_dim=input_shape, activation='relu'),
#Dense(50, activation='relu'),
Dense(units=tfpl.OneHotCategorical.params_size(2)),
tfpl.OneHotCategorical(event_size=2,
convert_to_tensor_fn=tfd.Distribution.mode)
])
model.compile(loss = loss,
optimizer=optimizer,
metrics=metrics,
experimental_run_tf_function=False)
return model
def nll(y_true, y_pred):
return -y_pred.log_prob(y_true)
x_train, y_train, x_test, y_test, inputShape = load_data('hashed_txns.csv')
tf.random.set_seed(0)
probabilistic_model = get_probabilistic_model(
input_shape=inputShape,
loss=nll,
optimizer=RMSprop(),
metrics=['accuracy'])
probabilistic_model.fit(x_train, tf.keras.utils.to_categorical(y_train), epochs=5)
print('Accuracy on test set: ',
str(probabilistic_model.evaluate(x_test, tf.keras.utils.to_categorical(y_test), verbose=False)[1]))
However, because the various things used in the model probably is not used for the purpose of binary probability classification, I only got a 50% accuracy for mine. I've tried to use binary crossentropy for the loss function like I did with my normal classification code, but it was saying something about there not being gradients for variables. Any recommendations for improvement? The dataset I used is here if needed:
profileType,state,nearScams,category,is_fraud,acct_num-1,acct_num-2,acct_num-3,acct_num-4,acct_num-5,acct_num-6,acct_num-7,acct_num-8,acct_num-9,acct_num-10,acct_num-11,acct_num-12,acct_num-13,acct_num-14,acct_num-15,acct_num-16,acct_num-17,acct_num-18,acct_num-19,acct_num-20
0.0,0.0,0.14285714285714285,0.0,1.0,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.0,0.14285714285714285,0.07692307692307693,0.0,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.020833333333333332,0.14285714285714285,0.15384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.020833333333333332,0.14285714285714285,0.23076923076923078,0.0,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.04166666666666667,0.14285714285714285,0.3076923076923077,1.0,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.04166666666666667,0.14285714285714285,0.3076923076923077,0.0,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.0625,0.14285714285714285,0.38461538461538464,1.0,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.0625,0.14285714285714285,0.07692307692307693,0.0,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.08333333333333333,0.14285714285714285,0.4615384615384616,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.08333333333333333,0.14285714285714285,0.07692307692307693,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.10416666666666667,0.2857142857142857,0.15384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.10416666666666667,0.2857142857142857,0.5384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.12499999999999999,0.14285714285714285,0.6153846153846154,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5
0.0,0.12499999999999999,0.14285714285714285,0.6923076923076923,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5
0.0,0.14583333333333331,0.2857142857142857,0.0,1.0,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.14583333333333331,0.2857142857142857,0.3076923076923077,0.0,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.16666666666666666,0.14285714285714285,0.38461538461538464,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0
0.0,0.16666666666666666,0.14285714285714285,0.23076923076923078,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0
0.0,0.18749999999999997,0.14285714285714285,0.6923076923076923,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.18749999999999997,0.14285714285714285,0.23076923076923078,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.20833333333333331,0.2857142857142857,0.6923076923076923,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5
0.0,0.20833333333333331,0.2857142857142857,0.7692307692307694,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5
0.0,0.22916666666666666,0.2857142857142857,0.38461538461538464,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.0,0.22916666666666666,0.2857142857142857,0.6923076923076923,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.0,0.24999999999999997,0.14285714285714285,0.0,1.0,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.24999999999999997,0.14285714285714285,0.6153846153846154,0.0,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.22916666666666666,0.2857142857142857,0.8461538461538463,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5
0.0,0.22916666666666666,0.2857142857142857,0.15384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5
0.0,0.2708333333333333,0.14285714285714285,0.8461538461538463,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0
0.0,0.2708333333333333,0.14285714285714285,0.3076923076923077,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0
0.0,0.14583333333333331,0.0,0.6923076923076923,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5
0.0,0.2916666666666667,0.5714285714285714,0.8461538461538463,1.0,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.2916666666666667,0.5714285714285714,0.6153846153846154,0.0,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.2708333333333333,0.14285714285714285,0.38461538461538464,1.0,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.2708333333333333,0.14285714285714285,0.38461538461538464,0.0,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3125,0.14285714285714285,0.4615384615384616,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3125,0.14285714285714285,0.15384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.0,0.14285714285714285,0.38461538461538464,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5
0.0,0.0,0.14285714285714285,0.4615384615384616,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5
0.0,0.24999999999999997,0.14285714285714285,0.23076923076923078,1.0,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.24999999999999997,0.14285714285714285,0.6923076923076923,0.0,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.18749999999999997,0.14285714285714285,0.8461538461538463,1.0,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.18749999999999997,0.14285714285714285,0.9230769230769231,0.0,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.14583333333333331,0.14285714285714285,0.6153846153846154,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5
0.0,0.14583333333333331,0.14285714285714285,0.5384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5
0.0,0.22916666666666666,0.2857142857142857,0.8461538461538463,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.22916666666666666,0.2857142857142857,0.0,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3333333333333333,0.14285714285714285,0.4615384615384616,1.0,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3333333333333333,0.14285714285714285,0.8461538461538463,0.0,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3333333333333333,0.14285714285714285,0.15384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3333333333333333,0.14285714285714285,0.6153846153846154,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.08333333333333333,0.14285714285714285,0.7692307692307694,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0
0.0,0.08333333333333333,0.14285714285714285,0.7692307692307694,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0
0.0,0.18749999999999997,0.14285714285714285,0.23076923076923078,1.0,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.18749999999999997,0.14285714285714285,0.23076923076923078,0.0,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.14583333333333331,0.14285714285714285,0.9230769230769231,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.0,0.14583333333333331,0.14285714285714285,0.6153846153846154,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.0,0.3541666666666667,0.14285714285714285,0.6923076923076923,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3541666666666667,0.14285714285714285,0.4615384615384616,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.2708333333333333,0.14285714285714285,0.0,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.2708333333333333,0.14285714285714285,0.38461538461538464,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.10416666666666667,0.2857142857142857,0.3076923076923077,1.0,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.10416666666666667,0.2857142857142857,0.23076923076923078,0.0,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.375,0.14285714285714285,0.3076923076923077,1.0,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.375,0.14285714285714285,0.3076923076923077,0.0,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3541666666666667,0.2857142857142857,0.5384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3541666666666667,0.2857142857142857,0.5384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3958333333333333,0.14285714285714285,0.8461538461538463,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3958333333333333,0.14285714285714285,0.5384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.4166666666666667,0.14285714285714285,0.6153846153846154,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5
0.0,0.4166666666666667,0.14285714285714285,0.7692307692307694,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5
0.0,0.08333333333333333,0.14285714285714285,0.07692307692307693,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5
0.0,0.08333333333333333,0.14285714285714285,0.7692307692307694,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5
0.0,0.375,0.14285714285714285,1.0000000000000002,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.0,0.375,0.14285714285714285,0.38461538461538464,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.0,0.4375,0.14285714285714285,0.4615384615384616,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.4375,0.14285714285714285,0.23076923076923078,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.4583333333333333,0.14285714285714285,0.07692307692307693,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.0,0.4583333333333333,0.14285714285714285,0.6153846153846154,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.0,0.12499999999999999,0.14285714285714285,0.15384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0
0.0,0.12499999999999999,0.14285714285714285,0.6153846153846154,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0
0.0,0.24999999999999997,0.14285714285714285,0.0,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.24999999999999997,0.14285714285714285,0.15384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.4791666666666667,0.14285714285714285,0.6153846153846154,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.4791666666666667,0.14285714285714285,0.23076923076923078,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3125,0.14285714285714285,0.3076923076923077,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5
0.0,0.3125,0.14285714285714285,0.4615384615384616,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5
0.0,0.2708333333333333,0.14285714285714285,0.0,1.0,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.2708333333333333,0.14285714285714285,0.4615384615384616,0.0,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.49999999999999994,0.14285714285714285,0.0,1.0,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.49999999999999994,0.14285714285714285,0.07692307692307693,0.0,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.49999999999999994,0.14285714285714285,0.5384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.49999999999999994,0.14285714285714285,0.0,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3541666666666667,0.2857142857142857,0.3076923076923077,1.0,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.3541666666666667,0.2857142857142857,0.5384615384615385,0.0,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.22916666666666666,0.14285714285714285,0.7692307692307694,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.22916666666666666,0.14285714285714285,0.6923076923076923,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.14583333333333331,0.14285714285714285,0.23076923076923078,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.14583333333333331,0.14285714285714285,0.38461538461538464,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.10416666666666667,0.14285714285714285,0.38461538461538464,1.0,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.10416666666666667,0.14285714285714285,0.38461538461538464,0.0,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.0,0.4583333333333333,0.14285714285714285,0.6923076923076923,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5
0.0,0.4583333333333333,0.14285714285714285,0.07692307692307693,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5
0.09090909090909091,0.5208333333333333,0.14285714285714285,0.07692307692307693,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.5208333333333333,0.14285714285714285,0.23076923076923078,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.24999999999999997,0.14285714285714285,0.8461538461538463,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.5416666666666666,0.42857142857142855,0.5384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.5416666666666666,0.42857142857142855,0.5384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.22916666666666666,0.2857142857142857,0.07692307692307693,1.0,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.22916666666666666,0.2857142857142857,0.0,0.0,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.49999999999999994,0.2857142857142857,0.0,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.49999999999999994,0.2857142857142857,0.4615384615384616,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.3333333333333333,0.42857142857142855,0.8461538461538463,1.0,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.3333333333333333,0.42857142857142855,0.8461538461538463,0.0,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.22916666666666666,0.8571428571428571,0.9230769230769231,1.0,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.22916666666666666,0.8571428571428571,0.8461538461538463,0.0,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.5624999999999999,0.14285714285714285,0.23076923076923078,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.5624999999999999,0.14285714285714285,0.9230769230769231,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.22916666666666666,0.8571428571428571,0.15384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5
0.09090909090909091,0.22916666666666666,0.8571428571428571,0.07692307692307693,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5
0.09090909090909091,0.0625,0.14285714285714285,0.0,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.0625,0.14285714285714285,0.07692307692307693,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.08333333333333333,0.14285714285714285,0.5384615384615385,1.0,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.08333333333333333,0.14285714285714285,0.15384615384615385,0.0,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.3125,0.14285714285714285,0.8461538461538463,1.0,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.3125,0.14285714285714285,0.0,0.0,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.020833333333333332,0.14285714285714285,0.6153846153846154,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5
0.09090909090909091,0.020833333333333332,0.14285714285714285,0.15384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5
0.09090909090909091,0.5833333333333333,0.14285714285714285,0.6923076923076923,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.5833333333333333,0.14285714285714285,0.15384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.3333333333333333,0.2857142857142857,0.7692307692307694,1.0,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.3333333333333333,0.2857142857142857,0.0,0.0,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.0625,0.14285714285714285,0.7692307692307694,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.0625,0.14285714285714285,0.38461538461538464,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.49999999999999994,0.14285714285714285,0.15384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.49999999999999994,0.14285714285714285,0.15384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.6041666666666666,0.14285714285714285,0.15384615384615385,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.6041666666666666,0.14285714285714285,0.15384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.10416666666666667,0.14285714285714285,0.6153846153846154,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.10416666666666667,0.14285714285714285,0.38461538461538464,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.020833333333333332,0.2857142857142857,1.0000000000000002,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5
0.09090909090909091,0.020833333333333332,0.2857142857142857,0.8461538461538463,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5
0.09090909090909091,0.5624999999999999,0.14285714285714285,0.6923076923076923,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.5624999999999999,0.14285714285714285,0.5384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.6041666666666666,0.0,0.3076923076923077,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.3541666666666667,0.14285714285714285,0.6153846153846154,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.3541666666666667,0.14285714285714285,0.5384615384615385,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
0.09090909090909091,0.22916666666666666,0.42857142857142855,0.0,1.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5
0.09090909090909091,0.22916666666666666,0.42857142857142855,0.7692307692307694,0.0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.0,0.5
Ok uh, just turned out I was dumb and forgot that you need way more than 5 epoch for non image based training, upped it to 300 and it works, would still appreciate any feedback for improvement tho.