keraspython-3.5keras-layerrelu

TypeError: relu() missing 1 required positional argument: 'x'


I am getting this error and I don't know why it's coming. Anyone help me out.

import warnings
warnings.filterwarnings('ignore',category=FutureWarning)
import tensorflow as tf
import keras
from keras.layers.convolutional import Conv2D, AtrousConvolution2D
from keras.layers import Activation, Dense, Input, Conv2DTranspose, Dense, Flatten
from keras.layers import Dropout, Concatenate, BatchNormalization, Reshape
from keras.layers.advanced_activations import LeakyReLU
from keras.models import Model, model_from_json
from keras.optimizers import Adam
from keras.layers.convolutional import UpSampling2D
import keras.backend as K
from keras.activations import relu


def g_build_conv(layer_input, filter_size, kernel_size=4, strides=2, activation='leakyrelu', 
    dropout_rate=g_dropout, norm='inst', dilation=1):
    c = AtrousConvolution2D(filter_size, kernel_size=kernel_size, strides=strides,atrous_rate= 
        (dilation,dilation), padding='same')(layer_input)
    if activation == 'leakyrelu':
        c = relu()(c)
    if dropout_rate:
        c = Dropout(dropout_rate)(c)
    if norm == 'inst':
        c = InstanceNormalization()(c)
    return c

Warning (from warnings module): File "C:\Users\xyz\AppData\Local\Programs\Python\Python35\lib\site-packages\keras\legacy\layers.py", line 762 warnings.warn('The AtrousConvolution2D layer ' UserWarning: The AtrousConvolution2D layer has been deprecated. Use instead the Conv2D layer with the dilation_rate argument. Traceback (most recent call last): File "D:\Image Outpaining\outpaint.py", line 146, in GEN = build_generator() File "D:\Image Outpaining\outpaint.py", line 120, in build_generator g1 = g_build_conv(g_input, 64, 5, strides=1) File "D:\Image Outpaining\outpaint.py", line 102, in g_build_conv c = relu()(c) TypeError: relu() missing 1 required positional argument: 'x'


Solution

  • The keras.activations.relu is a function, not a layer, so you are calling it incorrectly. To add the ReLu as a layer, do the following:

    from keras.layers import Activation
    
    if activation == 'leakyrelu':
        c = Activation("relu")(c)