ValueError: Layer activation_1 was called with an input that isn't a symbolic tensor

huangapple go评论96阅读模式
英文:

ValueError: Layer activation_1 was called with an input that isn't a symbolic tensor

问题

from keras.layers import AveragePooling2D
from keras.models import Sequential
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dropout
from keras.layers.core import Dense
from keras import backend as K

class SmallerVGGNet:
    @staticmethod
    def build(width, height, depth, classes, finalAct="softmax"):

        x = (height, width, depth)
        output = -1

        # CONV => RELU => POOL
        x = Conv2D(16, (3, 3), padding="same", input_shape=x)(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = MaxPooling2D(pool_size=(3, 3))(x)

        x = Conv2D(32, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = MaxPooling2D(pool_size=(3, 3))(x)
        x = BatchNormalization(axis=output)(x)

        # (CONV => RELU) * 2 => POOL
        x = Conv2D(64, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = Conv2D(64, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = AveragePooling2D(pool_size=(2, 2))(x)

        # (CONV => RELU) * 2 => POOL
        x = Conv2D(128, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = Conv2D(128, (3, 3))(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)

        # first (and only) set of FC => RELU layers
        x = Flatten()(x)
        x = Dense(128)(x)
        x = Activation("relu")(x)
        x = BatchNormalization()(x)
        x = Dropout(0.5)(x)

        # softmax classifier
        x = Dense(classes)(x)
        x = Activation(finalAct)(x)

        x.summary()

        # return the constructed network architecture

It looks like the issue in your code is related to the parentheses surrounding some of the layers' definitions. The corrected code above removes the unnecessary parentheses around the layer definitions. This should help resolve the "ValueError" you encountered.

英文:
from keras.layers import AveragePooling2D
from keras.models import Sequential
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dropout
from keras.layers.core import Dense
from keras import backend as K
class SmallerVGGNet:
@staticmethod
def build(width, height, depth, classes, finalAct="softmax"):
x = (height, width, depth)
output = -1
# CONV => RELU => POOL
x = (Conv2D(16, (3, 3), padding="same", input_shape=x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(3, 3))(x))
x = (Conv2D(32, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(3, 3))(x))
x = (BatchNormalization(axis=output)(x))
# (CONV => RELU) * 2 => POOL
x = (Conv2D(64, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (Conv2D(64, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (AveragePooling2D(pool_size=(2, 2))(x))
# (CONV => RELU) * 2 => POOL
x = (Conv2D(128, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (Conv2D(128, (3, 3))(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(2, 2))(x))
# first (and only) set of FC => RELU layers
x = (Flatten()(x))
x = (Dense(128)(x))
x = (Activation("relu")(x))
x = (BatchNormalization()(x))
x = (Dropout(0.5)(x))
# softmax classifier
x = (Dense(classes)(x))
x = (Activation(finalAct)(x))
x.summary()
# return the constructed network architecture
[enter image description here][2]

Why this comes when i run the code it says The layer activation was called with an input that isn't a symbolic tensor. please help me to fix this issue

ValueError: Layer activation_1 was called with an input that isn't a symbolic tensor. Received type: <class 'keras.layers.convolutional.Conv2D'>. Full input: [<keras.layers.convolutional
.Conv2D object at 0x000002711AAEB588>]. All inputs to the layer should be tensors.

答案1

得分: 1

在代码行x = (Conv2D(16, (3, 3), padding="same", input_shape=x))中,您没有提供输入。因此,当您运行代码并达到x = (Activation("relu")(x))这一行时,x是一个层而不是张量,这就导致了上述错误。因此,如评论中所提到的,您必须将输入传递给第一个层。编辑后的代码如下(请注意,我使用了tensorflow.keras库而不是keras):

from tensorflow.compat.v1.keras.layers import AveragePooling2D
from tensorflow.compat.v1.keras.models import Sequential, Model
from tensorflow.compat.v1.keras.layers import Input, Dense, Dropout, Flatten, Activation, BatchNormalization, Conv2D, MaxPooling2D
from tensorflow.compat.v1.keras import backend as K

class SmallerVGGNet:
    @staticmethod
    def build(width, height, depth, classes, finalAct="softmax"):
        x = (height, width, depth)
        output = -1

        # CONV => RELU => POOL
        inputs = Input(shape=x)
        x = Conv2D(16, (3, 3), padding="same", input_shape=x)(inputs)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = MaxPooling2D(pool_size=(3, 3))(x)

        x = Conv2D(32, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = MaxPooling2D(pool_size=(3, 3))(x)
        x = BatchNormalization(axis=output)(x)

        # (CONV => RELU) * 2 => POOL
        x = Conv2D(64, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = Conv2D(64, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = AveragePooling2D(pool_size=(2, 2))(x)

        # (CONV => RELU) * 2 => POOL
        x = Conv2D(128, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = Conv2D(128, (3, 3))(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=output)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)

        # first (and only) set of FC => RELU layers
        x = Flatten()(x)
        x = Dense(128)(x)
        x = Activation("relu")(x)
        x = BatchNormalization()(x)
        x = Dropout(0.5)(x)

        # softmax classifier
        x = Dense(classes)(x)
        x = Activation(finalAct)(x)
        model = Model(inputs, x)
        model.summary()

a = SmallerVGGNet()
a.build(100, 100, 100, 10)
英文:

In the line x = (Conv2D(16, (3, 3), padding=&quot;same&quot;, input_shape=x)) you have given no input. Therefore when you run the code and it reaches the line x = (Activation(&quot;relu&quot;)(x)) where x is a layer and not a tensor and it gives the above error. Therefore as mentioned in the comment you have to pass an input to the first layer. The edited code is as below(note that I have used tensorflow.keras library instead of keras)

from tensorflow.compat.v1.keras.layers import AveragePooling2D
from tensorflow.compat.v1.keras.models import Sequential, Model
from tensorflow.compat.v1.keras.layers import Input, Dense, Dropout, Flatten, Activation, BatchNormalization, Conv2D, MaxPooling2D
from tensorflow.compat.v1.keras import backend as K


class SmallerVGGNet:
    @staticmethod
    def build(width, height, depth, classes, finalAct=&quot;softmax&quot;):

        x = (height, width, depth)
        output = -1


        # CONV =&gt; RELU =&gt; POOL
        inputs = Input(shape=x)
        x = (Conv2D(16, (3, 3), padding=&quot;same&quot;, input_shape=x)(inputs))
        x = (Activation(&quot;relu&quot;)(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(3, 3))(x))

        x = (Conv2D(32, (3, 3), padding=&quot;same&quot;)(x))
        x = (Activation(&quot;relu&quot;)(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(3, 3))(x))
        x = (BatchNormalization(axis=output)(x))

        # (CONV =&gt; RELU) * 2 =&gt; POOL
        x = (Conv2D(64, (3, 3), padding=&quot;same&quot;)(x))
        x = (Activation(&quot;relu&quot;)(x))
        x = (BatchNormalization(axis=output)(x))
        x = (Conv2D(64, (3, 3), padding=&quot;same&quot;)(x))
        x = (Activation(&quot;relu&quot;)(x))
        x = (BatchNormalization(axis=output)(x))
        x = (AveragePooling2D(pool_size=(2, 2))(x))

        # (CONV =&gt; RELU) * 2 =&gt; POOL
        x = (Conv2D(128, (3, 3), padding=&quot;same&quot;)(x))
        x = (Activation(&quot;relu&quot;)(x))
        x = (BatchNormalization(axis=output)(x))
        x = (Conv2D(128, (3, 3))(x))
        x = (Activation(&quot;relu&quot;)(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(2, 2))(x))

        # first (and only) set of FC =&gt; RELU layers
        x = (Flatten()(x))
        x = (Dense(128)(x))
        x = (Activation(&quot;relu&quot;)(x))
        x = (BatchNormalization()(x))
        x = (Dropout(0.5)(x))

        # softmax classifier
        x = (Dense(classes)(x))
        x = (Activation(finalAct)(x))
        model = Model(inputs,x)
        model.summary()

a = SmallerVGGNet()
a.build(100,100,100,10)

huangapple
  • 本文由 发表于 2020年1月6日 16:35:01
  • 转载请务必保留本文链接:https://go.coder-hub.com/59608911.html
匿名

发表评论

匿名网友

:?: :razz: :sad: :evil: :!: :smile: :oops: :grin: :eek: :shock: :???: :cool: :lol: :mad: :twisted: :roll: :wink: :idea: :arrow: :neutral: :cry: :mrgreen:

确定