0

I'm new in Python and neural networks. I have been trying to fix the anomaly residing in the code below but without finding a track. How can I correct it?

It should be noted that the code is downloaded from Github, it simulate firstly the Lorenz system to get the nn_input and nn_output and secondly train a neural network with these input and output.

import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow import keras
from matplotlib import rcParams
from scipy import integrate
from mpl_toolkits.mplot3d import Axes3D
from keras.models import Sequential
from keras.layers import Dense, Conv2D, Flatten, MaxPool2D
from keras import optimizers
from keras.layers import Activation
from keras.utils.generic_utils import get_custom_objects
from keras import backend as K

    ## Simulate the Lorenz System

dt = 0.01
T = 8
t = np.arange(0,T+dt,dt)
beta = 8/3
sigma = 10
rho = 28

nn_input = np.zeros((100*(len(t)-1),3))
nn_output = np.zeros_like(nn_input)

fig,ax = plt.subplots(1,1,subplot_kw={'projection': '3d'})

def lorenz_deriv(x_y_z, t0, sigma=sigma, beta=beta, rho=rho):
    x, y, z = x_y_z
    return [sigma * (y - x), x * (rho - z) - y, x * y - beta * z]

np.random.seed(123)
x0 = -15 + 30 * np.random.random((100, 3))
x_t = np.asarray([integrate.odeint(lorenz_deriv, x0_j, t)
                  for x0_j in x0])
for j in range(100):
    nn_input[j*(len(t)-1):(j+1)*(len(t)-1),:] = x_t[j,:-1,:]
    nn_output[j*(len(t)-1):(j+1)*(len(t)-1),:] = x_t[j,1:,:]
    x, y, z = x_t[j,:,:].T
    ax.plot(x, y, z,linewidth=1)
    ax.scatter(x0[j,0],x0[j,1],x0[j,2],color='r')

plt.plot(x, t)            
ax.view_init(18, -113)
plt.show()

## Neural Net

# Define activation functions
def logsig(x):
    return K.variable(np.divide(1,(1+np.exp(-K.eval(x)))))

def radbas(x):
    return K.variable(np.exp(-np.power(K.eval(x),2)))

def purelin(x):
    return x

#create model
model = Sequential()

#add model layers
model.add(Dense(3, activation=logsig))
model.add(Dense(3, activation=radbas))
model.add(Dense(3, activation=purelin))

sgd_optimizer = tf.keras.optimizers.SGD(
    learning_rate=0.01, momentum=0.9)
#sgd_optimizer = optimizers.SGD(momentum=0.9)
model.compile(optimizer=sgd_optimizer, loss='categorical_crossentropy')
#model.summary()
model.fit(nn_input, nn_output, epochs=30)
halfer
  • 19,824
  • 17
  • 99
  • 186
  • Check this question https://stackoverflow.com/questions/52357542/attributeerror-tensor-object-has-no-attribute-numpy , adding the line `tf.config.run_functions_eagerly(True)` made your code run. – DNy Nov 08 '21 at 14:17
  • 2
    Does this answer your question? [AttributeError: 'Tensor' object has no attribute 'numpy'](https://stackoverflow.com/questions/52357542/attributeerror-tensor-object-has-no-attribute-numpy) – Latra Nov 08 '21 at 14:39
  • There is no need to use numpy in your radbas function, you can use K.square instead. – Dr. Snoopy Nov 08 '21 at 17:18

0 Answers0