小白使用kares搭建神经网络预测期货误差高,求教各位大神如何改进

小白使用kares搭建神经网络预测期货误差高,求教各位大神如何改进

import numpy as np
import pandas as pd
import keras
from keras.models import Sequential 
from keras.layers import Dense
from keras import optimizers
import matplotlib.pyplot as plt
def excel_one_line_to_list():
    global X_train
    global Y_train
    global X_test
    global Y_test
    X = pd.read_excel('训练集X.xlsx', usecols=[0,1,2,3,4,5,6,7],
                       names=None)  
    X_train = X.values.tolist()
    Y = pd.read_excel('训练集Y.xlsx', usecols=[0],
                       names=None)  
    Y_train = Y.values.tolist()    
    Z = pd.read_excel('测试集X.xlsx', usecols=[0,1,2,3,4,5,6,7],
                       names=None)  
    X_test = Z.values.tolist()
    W = pd.read_excel('测试集Y.xlsx', usecols=[0],
                       names=None)  
    Y_test = W.values.tolist() 

if __name__ == '__main__':
    excel_one_line_to_list() 
X_train = np.array(X_train)
Y_train = np.array(Y_train)
X_test = np.array(X_test)
Y_test = np.array(Y_test)
model=Sequential()
model.add(Dense(input_dim=7,units=50))
model.add(Dense(input_dim=7,units=7))
model.add(Dense(input_dim=7,units=1))
sgd = optimizers.SGD(lr=0.00000000001, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='mse',optimizer=sgd)
print('Training---------')
for step in range(300):#迭代300次
    cost=model.train_on_batch(X_train,Y_train)#利用model.train_on_batch训练,输出损失
    if step % 50 == 0:
        print(cost)
print('\nTesting---------')
cost=model.evaluate(X_test,Y_test,batch_size=10)#测试
print('Test cost:',cost)
W0,b0=model.layers[0].get_weights()#输出参数
#W1,b1=model.layers[1].get_weights()
#w2,b2=model.layers[2].get_weights()
print('Weight=',W0,'\nbiases=',b0)
Y_pred=model.predict(X_test) #输出预测值
print(Y_pred)
plt.scatter(Y_test, Y_pred)#绘图
plt.plot(X_test, Y_pred)
model.save('my_model.h5')   # HDF5 file,保存模型HDFS格式
del model  # 删除模型XA
# load
model = keras.models.load_model('my_model.h5')
 
print('test before save: ', model.predict(X_test[0:2]))
model.save('my_model.h5')   # HDF5 file, you have to pip3 install h5py if don't have it
del model  # deletes the existing model
 
# loadx
model = keras.models.load_model('my_model.h5')
print('test after load: ', model.predict(X_test[0:2]))

输出如下
Training---------
217635580.0
16840.266
16832.242
16829.719
16827.205
16824.766

Testing---------
27/27 [==============================] - 0s 6ms/step
Test cost: 7129.940700954861
Weight= [[ 1.59597933e-01 -1.63164452e-01 -6.74404949e-02 -1.50668785e-01
-7.16855228e-02 2.76058435e-01 3.66568640e-02 1.81173950e-01
2.34727278e-01 -1.51966503e-02 1.90960333e-01 1.70924038e-01
-1.45164192e-01 -1.81863993e-01 -2.59632647e-01 1.47559658e-01
1.22054346e-01 1.33561328e-01 2.75858909e-01 6.11867979e-02
-1.46601468e-01 3.40002552e-02 1.21036783e-01 -2.53365934e-01
-2.22889278e-02 2.98361003e-01 2.18154620e-02 3.14030170e-01
3.08404416e-01 -1.99642107e-01 1.17396496e-01 -2.86993623e-01
1.27667114e-01 1.92788790e-03 2.84913164e-02 -1.59505382e-01
-1.88906804e-01 2.59341657e-01 3.04084182e-01 -2.98908353e-01
-2.94419408e-01 -1.32994652e-01 -1.42567888e-01 7.57894367e-02
3.18280458e-01 -2.74127990e-01 1.43482804e-01 2.67924406e-02
5.11021316e-02 2.52739966e-01]
[-1.66780651e-01 3.07876259e-01 -2.76544631e-01 2.33634457e-01
-1.78904429e-01 -2.29675695e-01 3.04972410e-01 -1.92832649e-01
-3.02256435e-01 1.38059556e-01 -2.12680325e-01 -1.51886493e-01
9.22016501e-02 -2.83646375e-01 -3.00320327e-01 -2.06528515e-01
-2.40478650e-01 -2.85602391e-01 2.54638135e-01 -3.36858928e-02
-3.23215783e-01 1.68423444e-01 4.92839441e-02 -2.63119578e-01
-2.74162471e-01 -1.90333754e-01 1.24003828e-01 -1.98525712e-01
1.50023922e-01 2.61018276e-01 -2.23563433e-01 -3.13018143e-01
6.88578039e-02 -2.78955281e-01 2.06810683e-01 1.30589426e-01
-2.17648610e-01 -9.92771760e-02 -1.47404701e-01 2.34659910e-01
-1.36551604e-01 2.42027745e-01 2.68154293e-01 -2.63032496e-01
-2.34111473e-01 -1.64425164e-01 2.46726826e-01 3.33464980e-01
2.27206722e-01 9.81995761e-02]
[-6.70408458e-02 2.08520666e-01 -7.75955990e-02 6.91927522e-02
-1.53108239e-01 -1.41667947e-01 1.54191643e-01 2.60643512e-01
-8.13121200e-02 -2.15802357e-01 -2.05318049e-01 8.74011368e-02
-1.34235352e-01 -2.01000154e-01 1.00846887e-01 -2.05755420e-02
-1.02780834e-02 1.55998483e-01 -7.24828839e-02 2.35828646e-02
-2.65243828e-01 -1.25664353e-01 1.72558930e-02 -3.11459899e-01
1.55316874e-01 2.61702925e-01 -2.89170951e-01 -5.47949672e-02
-3.98722701e-02 1.94728404e-01 -8.64629894e-02 -3.00667465e-01
3.07185054e-01 1.26889898e-02 9.49738100e-02 -8.75065699e-02
2.59513050e-01 1.28450364e-01 2.76126325e-01 2.07807302e-01
7.32704019e-03 2.00960010e-01 -1.12939902e-01 1.77014083e-01
-1.11420169e-01 -3.12859416e-01 -3.37550044e-02 -1.70576110e-01
-3.02477032e-01 -2.92989701e-01]
[-1.09660029e-01 -1.81069091e-01 -7.94327781e-02 9.95974913e-02
2.87229121e-01 2.85690755e-01 2.99655557e-01 3.13406795e-01
-5.44350408e-02 5.66164106e-02 -2.83484638e-01 -1.33925155e-01
1.97724193e-01 1.82862267e-01 -6.35709241e-02 4.83132489e-02
2.63011932e-01 -3.41850407e-02 -2.82212675e-01 3.15599531e-01
-6.37678022e-05 -6.17437400e-02 2.58905888e-01 6.56804964e-02
-1.52315289e-01 -1.85559392e-01 1.35489359e-01 4.78170328e-02
9.94811114e-03 -2.91493475e-01 -8.33625812e-03 -1.82240471e-01
1.34082854e-01 1.01142839e-01 2.91566521e-01 -1.56018212e-01
-1.23699130e-02 7.75429383e-02 5.92802763e-02 3.03424627e-01
-1.75819993e-01 -2.08792821e-01 2.77032465e-01 2.53719747e-01
-2.22494826e-01 -2.60701537e-01 -3.07346493e-01 -7.64319599e-02
2.89467514e-01 -8.60424191e-02]
[-1.26385450e-01 -9.45294350e-02 1.24021634e-01 -1.56894356e-01
-1.76307470e-01 -7.78776081e-03 -4.68005873e-02 -2.39356399e-01
-2.24311367e-01 -2.71645844e-01 1.91212595e-01 1.77200716e-02
-8.72606859e-02 1.58455446e-01 6.70219883e-02 5.44556379e-02
-1.38896629e-01 -1.14846729e-01 8.11912417e-02 -8.31231289e-03
-3.74341048e-02 -1.64775401e-01 -1.82532340e-01 -3.09377670e-01
-2.52738893e-01 -2.84718722e-01 -1.23631194e-01 2.83754438e-01
1.42215475e-01 2.98020244e-03 3.08217496e-01 2.26806581e-01
-9.16632637e-02 3.19284916e-01 -1.45015642e-01 -1.06431708e-01
1.34517699e-01 -1.95531875e-01 -3.05214822e-01 -1.90642089e-01
6.76082894e-02 3.08624089e-01 -2.80148208e-01 7.30376393e-02
-2.85099477e-01 2.12775216e-01 -1.90101445e-01 -6.91573247e-02
-5.94927929e-02 -1.59442723e-01]
[-1.23906769e-01 -3.29780787e-01 2.68762112e-01 -3.23099911e-01
4.81704511e-02 3.14826369e-01 2.94110537e-01 2.94298381e-01
2.47400045e-01 2.70754009e-01 -2.46968105e-01 -1.85076997e-01
2.48071458e-02 -1.04475960e-01 2.92056054e-01 3.04980427e-01
2.01537296e-01 2.49997064e-01 1.37962535e-01 6.10233471e-02
1.60643503e-01 9.05257929e-03 2.45197982e-01 2.15029344e-01
4.63397056e-02 2.14143723e-01 2.82797337e-01 -2.86038876e-01
-1.23920098e-01 6.74028769e-02 -2.16121495e-01 3.17502394e-03
2.23689377e-01 -1.32720366e-01 -2.53937483e-01 1.83955461e-01
2.56248042e-02 -1.02779761e-01 1.83873437e-02 -2.00882554e-02
3.71782761e-03 -2.41676226e-01 -1.79727033e-01 -1.27795145e-01
2.20942304e-01 -2.70347059e-01 -1.16545968e-01 -2.51793444e-01
-6.62985966e-02 -1.22825086e-01]
[ 2.52447665e-01 2.41396502e-01 2.95232713e-01 -7.07795396e-02
1.22460179e-01 3.21973294e-01 -2.26764590e-01 5.30654453e-02
-2.64158487e-01 -1.67560995e-01 -1.39557391e-01 -6.21598065e-02
-1.56105310e-01 -1.69044703e-01 -9.92008671e-02 -2.60418683e-01
2.01619670e-01 2.00537130e-01 3.01766068e-01 9.98505726e-02
1.62128434e-01 1.64082184e-01 3.84744629e-02 4.55580070e-04
-1.87980741e-01 1.72524586e-01 2.02567533e-01 1.79847375e-01
2.74119806e-03 -2.77546436e-01 -3.10965478e-01 1.60256505e-01
2.91219503e-01 -9.80987921e-02 -1.97619557e-01 -2.56504953e-01
2.36543939e-01 -8.91020447e-02 -1.83409318e-01 1.31210774e-01
-7.19914287e-02 -1.66420981e-01 2.21108332e-01 2.40670830e-01
1.61334276e-02 5.69368266e-02 -8.93439278e-02 -2.44198784e-01
2.52900720e-01 1.88874081e-01]]
biases= [-1.5381387e-07 -7.3514889e-07 1.2183398e-07 -3.2101477e-07
-1.4650695e-07 -1.4695290e-07 -3.2917245e-07 -4.8284932e-07
1.8946234e-07 -2.0457924e-07 3.7356835e-07 -3.4910833e-08
3.4664760e-07 -4.6028853e-07 5.1097128e-08 1.6981201e-07
-2.2270484e-07 8.0620015e-07 2.9956064e-07 6.3730874e-07
-7.1930806e-08 -3.0350770e-07 -1.7092061e-07 5.5827945e-07
2.1870336e-07 -7.4191399e-07 -1.3861235e-07 1.0414294e-06
-1.6211536e-07 7.7069089e-08 -4.4825600e-07 2.6387267e-08
-5.1543225e-07 -2.7083811e-07 4.4753241e-07 -6.6388274e-08
7.2385882e-07 3.2414823e-07 -2.5756131e-07 -3.1792567e-07
3.4727802e-07 5.8008828e-07 -6.8781191e-07 2.4056214e-07
2.1064696e-07 3.6660620e-07 4.5628033e-07 6.7019124e-07
-1.7887301e-07 -1.1754273e-07]
[[13679.574 ]
[13767.229 ]
[13806.827 ]
[13760.908 ]
[13779.671 ]
[13685.294 ]
[13658.456 ]
[13595.613 ]
[13598.314 ]
[13791.237 ]
[13764.337 ]
[13654.178 ]
[13530.802 ]
[13643.936 ]
[13627.445 ]
[13608.184 ]
[13578.342 ]
[13601.504 ]
[13580.279 ]
[13589.182 ]
[13632.572 ]
[13600.038 ]
[13631.083 ]
[13679.852 ]
[13645.439 ]
[13727.245 ]
[13652.1875]]
test before save: [[13679.573 ]
[13767.2295]]
test after load: [[13679.573 ]
[13767.2295]]


版权声明:本文为lzz12138原创文章,遵循CC 4.0 BY-SA版权协议,转载请附上原文出处链接和本声明。