pythonnumpyneural-networkrelu

The truth value of an array with more than one element is ambiguous. Use a.any() or a.all() python numpy using ReLu function


import numpy as np

class NeuralNetwork():

    def __init__(self):
        np.random.seed(1)

        self.synaptic_weights = np.random.random((8, 5))

    def rectified(self, x):

        return max(0, x)

    def rectified_derivative(x):
         x[x<=0] = 0
         x[x>0] = 1
         return x

    def train(self, training_inputs, training_outputs, training_iterations):

        for iteration in range(training_iterations):
            output = self.think(training_inputs)

            error = training_outputs - output

            adjustments = np.dot(training_inputs.T, error * self.rectified_derivative(output))

            self.synaptic_weights += adjustments

    def think(self, inputs):

        inputs = inputs.astype(float)
        output = self.rectified(np.dot(inputs, self.synaptic_weights))
        return output

Not sure why i am receiving this error. Could someone please point me in the right direction? Error is on this line:

ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()

    return max(0, x)

Solution

  • If you are trying to clamp all the values to be non-negative, use numpy.clip as follows:

    x.clip(0)
    

    Python's builtin max operator does not play well with numpy arrays