move to new cspot

This commit is contained in:
philippe44
2023-03-25 16:48:41 -07:00
parent c712b78931
commit 008c36facf
2983 changed files with 465270 additions and 13569 deletions

View File

@@ -0,0 +1,57 @@
#!/usr/bin/python
from __future__ import print_function
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import GRU
from keras.models import load_model
from keras import backend as K
import numpy as np
def printVector(f, vector, name):
v = np.reshape(vector, (-1));
#print('static const float ', name, '[', len(v), '] = \n', file=f)
f.write('static const opus_int16 {}[{}] = {{\n '.format(name, len(v)))
for i in range(0, len(v)):
f.write('{}'.format(int(round(8192*v[i]))))
if (i!=len(v)-1):
f.write(',')
else:
break;
if (i%8==7):
f.write("\n ")
else:
f.write(" ")
#print(v, file=f)
f.write('\n};\n\n')
return;
def binary_crossentrop2(y_true, y_pred):
return K.mean(2*K.abs(y_true-0.5) * K.binary_crossentropy(y_pred, y_true), axis=-1)
model = load_model("weights.hdf5", custom_objects={'binary_crossentrop2': binary_crossentrop2})
weights = model.get_weights()
f = open('rnn_weights.c', 'w')
f.write('/*This file is automatically generated from a Keras model*/\n\n')
f.write('#ifdef HAVE_CONFIG_H\n#include "config.h"\n#endif\n\n#include "mlp.h"\n\n')
printVector(f, weights[0], 'layer0_weights')
printVector(f, weights[1], 'layer0_bias')
printVector(f, weights[2], 'layer1_weights')
printVector(f, weights[3], 'layer1_recur_weights')
printVector(f, weights[4], 'layer1_bias')
printVector(f, weights[5], 'layer2_weights')
printVector(f, weights[6], 'layer2_bias')
f.write('const DenseLayer layer0 = {\n layer0_bias,\n layer0_weights,\n 25, 16, 0\n};\n\n')
f.write('const GRULayer layer1 = {\n layer1_bias,\n layer1_weights,\n layer1_recur_weights,\n 16, 12\n};\n\n')
f.write('const DenseLayer layer2 = {\n layer2_bias,\n layer2_weights,\n 12, 2, 1\n};\n\n')
f.close()

View File

@@ -0,0 +1,67 @@
#!/usr/bin/python
from __future__ import print_function
from keras.models import Sequential
from keras.models import Model
from keras.layers import Input
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import GRU
from keras.layers import SimpleRNN
from keras.layers import Dropout
from keras import losses
import h5py
from keras import backend as K
import numpy as np
def binary_crossentrop2(y_true, y_pred):
return K.mean(2*K.abs(y_true-0.5) * K.binary_crossentropy(y_pred, y_true), axis=-1)
print('Build model...')
#model = Sequential()
#model.add(Dense(16, activation='tanh', input_shape=(None, 25)))
#model.add(GRU(12, dropout=0.0, recurrent_dropout=0.0, activation='tanh', recurrent_activation='sigmoid', return_sequences=True))
#model.add(Dense(2, activation='sigmoid'))
main_input = Input(shape=(None, 25), name='main_input')
x = Dense(16, activation='tanh')(main_input)
x = GRU(12, dropout=0.1, recurrent_dropout=0.1, activation='tanh', recurrent_activation='sigmoid', return_sequences=True)(x)
x = Dense(2, activation='sigmoid')(x)
model = Model(inputs=main_input, outputs=x)
batch_size = 64
print('Loading data...')
with h5py.File('features.h5', 'r') as hf:
all_data = hf['features'][:]
print('done.')
window_size = 1500
nb_sequences = len(all_data)/window_size
print(nb_sequences, ' sequences')
x_train = all_data[:nb_sequences*window_size, :-2]
x_train = np.reshape(x_train, (nb_sequences, window_size, 25))
y_train = np.copy(all_data[:nb_sequences*window_size, -2:])
y_train = np.reshape(y_train, (nb_sequences, window_size, 2))
all_data = 0;
x_train = x_train.astype('float32')
y_train = y_train.astype('float32')
print(len(x_train), 'train sequences. x shape =', x_train.shape, 'y shape = ', y_train.shape)
# try using different optimizers and different optimizer configs
model.compile(loss=binary_crossentrop2,
optimizer='adam',
metrics=['binary_accuracy'])
print('Train...')
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=200,
validation_data=(x_train, y_train))
model.save("newweights.hdf5")