You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
/home/c410/anaconda3/lib/python3.6/site-packages/k**eras/lay**ers/core.py:577: UserWarning: output_shapeargument not specified for layer mean_subtraction and cannot be automatically inferred with the Theano backend. Defaulting to output shape(None, 3, 227, 227)(same as input shape). If the expected output shape is different, specify it via theoutput_shape` argument.
.format(self.name, input_shape))
(Subtensor{int64}.0, Elemwise{add,no_inplace}.0, Elemwise{add,no_inplace}.0, Subtensor{int64}.0)
(Subtensor{int64}.0, Elemwise{add,no_inplace}.0, Elemwise{add,no_inplace}.0, Subtensor{int64}.0)
(Subtensor{int64}.0, Elemwise{add,no_inplace}.0, Elemwise{add,no_inplace}.0, Subtensor{int64}.0)
(Subtensor{int64}.0, Elemwise{add,no_inplace}.0, Elemwise{add,no_inplace}.0, Subtensor{int64}.0)
/home/c410/anaconda3/lib/python3.6/site-packages/k**eras/lay**ers/core.py:577: UserWarning:
output_shapeargument not specified for layer mean_subtraction and cannot be automatically inferred with the Theano backend. Defaulting to output shape
(None, 3, 227, 227)(same as input shape). If the expected output shape is different, specify it via the
output_shape` argument..format(self.name, input_shape))
(Subtensor{int64}.0, Elemwise{add,no_inplace}.0, Elemwise{add,no_inplace}.0, Subtensor{int64}.0)
(Subtensor{int64}.0, Elemwise{add,no_inplace}.0, Elemwise{add,no_inplace}.0, Subtensor{int64}.0)
(Subtensor{int64}.0, Elemwise{add,no_inplace}.0, Elemwise{add,no_inplace}.0, Subtensor{int64}.0)
(Subtensor{int64}.0, Elemwise{add,no_inplace}.0, Elemwise{add,no_inplace}.0, Subtensor{int64}.0)
ValueError Traceback (most recent call last)
in ()
----> 1 alexnet = get_alexnet(input_size,nb_classes,mean_flag)
2
3 print(alexnet.summary())
~/Desktop/AlexNet/AlexNet-Experiments-Keras-master/Code/alexnet_base.py in get_alexnet(input_shape, nb_classes, mean_flag)
64
65 dense_1 = Flatten(name="flatten")(dense_1)
---> 66 dense_1 = Dense(4096, activation='relu',name='dense_1',init='he_normal')(dense_1)
67 dense_2 = Dropout(0.5)(dense_1)
68 dense_2 = Dense(4096, activation='relu',name='dense_2',init='he_normal')(dense_2)
~/anaconda3/lib/python3.6/site-packages/keras/engine/topology.py in call(self, x, mask)
541 '
layer.build(batch_input_shape)
')542 if len(input_shapes) == 1:
--> 543 self.build(input_shapes[0])
544 else:
545 self.build(input_shapes)
~/anaconda3/lib/python3.6/site-packages/keras/layers/core.py in build(self, input_shape)
750 name='{}_W'.format(self.name),
751 regularizer=self.W_regularizer,
--> 752 constraint=self.W_constraint)
753 if self.bias:
754 self.b = self.add_weight((self.output_dim,),
~/anaconda3/lib/python3.6/site-packages/keras/engine/topology.py in add_weight(self, shape, initializer, name, trainable, regularizer, constraint)
413 '''
414 initializer = initializations.get(initializer)
--> 415 weight = initializer(shape, name=name)
416 if regularizer is not None:
417 self.add_loss(regularizer(weight))
~/anaconda3/lib/python3.6/site-packages/keras/initializations.py in he_normal(shape, name, dim_ordering)
66 fan_in, fan_out = get_fans(shape, dim_ordering=dim_ordering)
67 s = np.sqrt(2. / fan_in)
---> 68 return normal(shape, s, name=name)
69
70
~/anaconda3/lib/python3.6/site-packages/keras/initializations.py in normal(shape, scale, name)
35
36 def normal(shape, scale=0.05, name=None):
---> 37 return K.random_normal_variable(shape, 0.0, scale, name=name)
38
39
~/anaconda3/lib/python3.6/site-packages/keras/backend/theano_backend.py in random_normal_variable(shape, mean, scale, dtype, name)
181
182 def random_normal_variable(shape, mean, scale, dtype=None, name=None):
--> 183 return variable(np.random.normal(loc=0.0, scale=scale, size=shape),
184 dtype=dtype, name=name)
185
mtrand.pyx in mtrand.RandomState.normal()
ValueError: scale < 0
`
My keras aready is release 1.2, jason to configrationed! But runing code show to error above.
Why?
The text was updated successfully, but these errors were encountered: