Welcome to OGeek Q&A Community for programmer and developer-Open, Learning and Share
Welcome To Ask or Share your Answers For Others

Categories

0 votes
949 views
in Technique[技术] by (71.8m points)

python - AssertionError: Could not compute output Tensor

I am trying to build a model that takes multiple inputs and multiple outputs using a functional API. I followed this to create the code.

def create_model_multiple():
    input1 = tf.keras.Input(shape=(13,), name = 'I1')
    input2 = tf.keras.Input(shape=(6,), name = 'I2')
    hidden1 = tf.keras.layers.Dense(units = 4, activation='relu')(input1)
    hidden2 = tf.keras.layers.Dense(units = 4, activation='relu')(input2)
    merge = tf.keras.layers.concatenate([hidden1, hidden2])
    hidden3 = tf.keras.layers.Dense(units = 3, activation='relu')(merge)
    output1 = tf.keras.layers.Dense(units = 2, activation='softmax', name ='O1')(hidden3)
    output2 = tf.keras.layers.Dense(units = 2, activation='softmax', name = 'O2')(hidden3)
    model = tf.keras.models.Model(inputs = [input1,input2], outputs = [output1,output2])
    model.compile(optimizer='adam',
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])
    return model

My model.fit command looks like this:

history = model.fit({'I1':train_data, 'I2':new_train_data},
                    {'O1':train_labels, 'O2': new_target_label},
                    validation_data=(val_data,val_labels),
                    epochs=100,
                    verbose = 1)

The shapes of input data are as follows:
train_data is (192,13)
new_train_data is (192,6)
train-labels,new_target_labels is (192,)
The code runs for a few steps then raises this error:

Epoch 1/100
1/6 [====>.........................] - ETA: 0s - loss: 360.3317 - O1_loss: 127.8019 - O2_loss: 232.5298 - O1_accuracy: 0.3438 - O2_accuracy: 0.4062
---------------------------------------------------------------------------
AssertionError                            Traceback (most recent call last)
<ipython-input-29-db61ad0a9d8b> in <module>
      3                     validation_data=(val_data,val_labels),
      4                     epochs=100,
----> 5                     verbose = 1)

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasengineraining.py in _method_wrapper(self, *args, **kwargs)
     64   def _method_wrapper(self, *args, **kwargs):
     65     if not self._in_multi_worker_mode():  # pylint: disable=protected-access
---> 66       return method(self, *args, **kwargs)
     67 
     68     # Running inside `run_distribute_coordinator` already.

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasengineraining.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
    870               workers=workers,
    871               use_multiprocessing=use_multiprocessing,
--> 872               return_dict=True)
    873           val_logs = {'val_' + name: val for name, val in val_logs.items()}
    874           epoch_logs.update(val_logs)

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasengineraining.py in _method_wrapper(self, *args, **kwargs)
     64   def _method_wrapper(self, *args, **kwargs):
     65     if not self._in_multi_worker_mode():  # pylint: disable=protected-access
---> 66       return method(self, *args, **kwargs)
     67 
     68     # Running inside `run_distribute_coordinator` already.

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasengineraining.py in evaluate(self, x, y, batch_size, verbose, sample_weight, steps, callbacks, max_queue_size, workers, use_multiprocessing, return_dict)
   1079                 step_num=step):
   1080               callbacks.on_test_batch_begin(step)
-> 1081               tmp_logs = test_function(iterator)
   1082               # Catch OutOfRangeError for Datasets of unknown size.
   1083               # This blocks until the batch has finished executing.

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythoneagerdef_function.py in __call__(self, *args, **kwds)
    578         xla_context.Exit()
    579     else:
--> 580       result = self._call(*args, **kwds)
    581 
    582     if tracing_count == self._get_tracing_count():

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythoneagerdef_function.py in _call(self, *args, **kwds)
    616       # In this case we have not created variables on the first call. So we can
    617       # run the first trace but we should fail if variables are created.
--> 618       results = self._stateful_fn(*args, **kwds)
    619       if self._created_variables:
    620         raise ValueError("Creating variables on a non-first call to a function"

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythoneagerfunction.py in __call__(self, *args, **kwargs)
   2417     """Calls a graph function specialized to the inputs."""
   2418     with self._lock:
-> 2419       graph_function, args, kwargs = self._maybe_define_function(args, kwargs)
   2420     return graph_function._filtered_call(args, kwargs)  # pylint: disable=protected-access
   2421 

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythoneagerfunction.py in _maybe_define_function(self, args, kwargs)
   2772           and self.input_signature is None
   2773           and call_context_key in self._function_cache.missed):
-> 2774         return self._define_function_with_shape_relaxation(args, kwargs)
   2775 
   2776       self._function_cache.missed.add(call_context_key)

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythoneagerfunction.py in _define_function_with_shape_relaxation(self, args, kwargs)
   2704         relaxed_arg_shapes)
   2705     graph_function = self._create_graph_function(
-> 2706         args, kwargs, override_flat_arg_shapes=relaxed_arg_shapes)
   2707     self._function_cache.arg_relaxed[rank_only_cache_key] = graph_function
   2708 

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythoneagerfunction.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
   2665             arg_names=arg_names,
   2666             override_flat_arg_shapes=override_flat_arg_shapes,
-> 2667             capture_by_value=self._capture_by_value),
   2668         self._function_attributes,
   2669         # Tell the ConcreteFunction to clean up its graph once it goes out of

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonframeworkfunc_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
    979         _, original_func = tf_decorator.unwrap(python_func)
    980 
--> 981       func_outputs = python_func(*func_args, **func_kwargs)
    982 
    983       # invariant: `func_outputs` contains only Tensors, CompositeTensors,

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythoneagerdef_function.py in wrapped_fn(*args, **kwds)
    439         # __wrapped__ allows AutoGraph to swap in a converted function. We give
    440         # the function a weak reference to itself to avoid a reference cycle.
--> 441         return weak_wrapped_fn().__wrapped__(*args, **kwds)
    442     weak_wrapped_fn = weakref.ref(wrapped_fn)
    443 

c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonframeworkfunc_graph.py in wrapper(*args, **kwargs)
    966           except Exception as e:  # pylint:disable=broad-except
    967             if hasattr(e, "ag_error_metadata"):
--> 968               raise e.ag_error_metadata.to_exception(e)
    969             else:
    970               raise

AssertionError: in user code:

    c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasengineraining.py:941 test_function  *
        outputs = self.distribute_strategy.run(
    c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythondistributedistribute_lib.py:951 run  **
        return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
    c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythondistributedistribute_lib.py:2290 call_for_each_replica
        return self._call_for_each_replica(fn, args, kwargs)
    c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythondistributedistribute_lib.py:2649 _call_for_each_replica
        return fn(*args, **kwargs)
    c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasengineraining.py:909 test_step  **
        y_pred = self(x, training=False)
    c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasenginease_layer.py:927 __call__
        outputs = call_fn(cast_inputs, *args, **kwargs)
    c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasengine
etwork.py:719 call
        convert_kwargs_to_constants=base_layer_utils.call_context().saving)
    c:usersaniketdocumentsaniketlearning-mlml_envlibsite-packagesensorflowpythonkerasengine
etwork.py:899 _run_internal_graph
        assert str(id(x)) in tensor_dict, 'Could not compute output ' + str(x)

    AssertionError: Could not compute output Tensor("O1_6/Identity:0", shape=(None, 2), dtype=float32)

The jupyter-notebook with complete code is here

See Question&Answers more detail:os

与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…
Welcome To Ask or Share your Answers For Others

1 Reply

0 votes
by (71.8m points)

you have to provide validation_data in the correct format (like your train). you have to pass 2 input data and 2 targets... you are passing only one

this is a dummy example

def create_model_multiple():

    input1 = tf.keras.Input(shape=(13,), name = 'I1')
    input2 = tf.keras.Input(shape=(6,), name = 'I2')
    hidden1 = tf.keras.layers.Dense(units = 4, activation='relu')(input1)
    hidden2 = tf.keras.layers.Dense(units = 4, activation='relu')(input2)
    merge = tf.keras.layers.concatenate([hidden1, hidden2])
    hidden3 = tf.keras.layers.Dense(units = 3, activation='relu')(merge)
    output1 = tf.keras.layers.Dense(units = 2, activation='softmax', name ='O1')(hidden3)
    output2 = tf.keras.layers.Dense(units = 2, activation='softmax', name = 'O2')(hidden3)
    model = tf.keras.models.Model(inputs = [input1,input2], outputs = [output1,output2])
    model.compile(optimizer='adam',
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])
    return model


x1 = np.random.uniform(0,1, (190,13))
x2 = np.random.uniform(0,1, (190,6))
val_x1 = np.random.uniform(0,1, (50,13))
val_x2 = np.random.uniform(0,1, (50,6))

y1 = np.random.randint(0,2, 190)
y2 = np.random.randint(0,2, 190)
val_y1 = np.random.randint(0,2, 50)
val_y2 = np.random.randint(0,2, 50)


model = create_model_multiple()

history = model.fit({'I1':x1, 'I2':x2},
                    {'O1':y1, 'O2': y2},
                    validation_data=([val_x1,val_x2], [val_y1,val_y2]), # <=========
                    epochs=100,
                    verbose = 1)

与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…
OGeek|极客中国-欢迎来到极客的世界,一个免费开放的程序员编程交流平台!开放,进步,分享!让技术改变生活,让极客改变未来! Welcome to OGeek Q&A Community for programmer and developer-Open, Learning and Share
Click Here to Ask a Question

...