AssertionError:无法计算输出Tensor“ keras_layer / cond / Identity:0”,shape =None,768,dtype = float32

问题描述

我正在按照一个教程使用BERT执行句子嵌入。 这里的代码:

import tensorflow as tf
import tensorflow_hub as hub
import bert
from tensorflow.keras.models import Model 
import math
from tensorflow.keras.layers import Input
import numpy as np


def get_masks(tokens,max_seq_length):
    """Mask for padding"""
    if len(tokens)>max_seq_length:
        raise IndexError("Token length more than max seq length!")
    return [1]*len(tokens) + [0] * (max_seq_length - len(tokens))


def get_segments(tokens,max_seq_length):
    """Segments: 0 for the first sequence,1 for the second"""
    if len(tokens)>max_seq_length:
        raise IndexError("Token length more than max seq length!")
    segments = []
    current_segment_id = 0
    for token in tokens:
        segments.append(current_segment_id)
        if token == "[SEP]":
            current_segment_id = 1
    return segments + [0] * (max_seq_length - len(tokens))


def get_ids(tokens,tokenizer,max_seq_length):
    """Token ids from Tokenizer vocab"""
    token_ids = tokenizer.convert_tokens_to_ids(tokens)
    input_ids = token_ids + [0] * (max_seq_length-len(token_ids))
    return input_ids

modelPath = "tfhub_modules/bert"
max_seq_length = 128 
input_word_ids = tf.keras.layers.Input(shape=(max_seq_length,),dtype=tf.int32,name="input_word_ids")
input_mask = tf.keras.layers.Input(shape=(max_seq_length,name="input_mask")
segment_ids = tf.keras.layers.Input(shape=(max_seq_length,name="segment_ids")
bert_layer = hub.KerasLayer(modelPath,trainable=True)
pooled_output,sequence_output = bert_layer([input_word_ids,input_mask,segment_ids])

model = Model(inputs=[input_word_ids,segment_ids],outputs=[pooled_output,sequence_output])

fullTokenizer = bert.bert_tokenization.FullTokenizer
vocab_file = bert_layer.resolved_object.vocab_file.asset_path.numpy()
do_lower_case = bert_layer.resolved_object.do_lower_case.numpy()
tokenizer = fullTokenizer(vocab_file,do_lower_case)

s = "This is a beautiful sentence"
stokens = tokenizer.tokenize(s)
stokens = ["[CLS]"] + stokens + ["[SEP]"]

input_ids = get_ids(stokens,max_seq_length)
input_masks = get_masks(stokens,max_seq_length)
input_segments = get_segments(stokens,max_seq_length)

pool_embs,all_embs = model.predict([[input_ids],[input_masks],[input_segments]])

引发的错误如下:

WARNING:tensorflow:Model was constructed with shape (None,128) for input Tensor("input_word_ids:0",shape=(None,128),dtype=int32),but it was called on an input with incompatible shape (None,1,128).
WARNING:tensorflow:Model was constructed with shape (None,128).
---------------------------------------------------------------------------
AssertionError                            Traceback (most recent call last)
<ipython-input-52-82ba7e20d77c> in <module>
----> 1 pool_embs,[input_segments]])

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in _method_wrapper(self,*args,**kwargs)
    128       raise ValueError('{} is not supported in multi-worker mode.'.format(
    129           method.__name__))
--> 130     return method(self,**kwargs)
    131 
    132   return tf_decorator.make_decorator(

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in predict(self,x,batch_size,verbose,steps,callbacks,max_queue_size,workers,use_multiprocessing)
   1597           for step in data_handler.steps():
   1598             callbacks.on_predict_batch_begin(step)
-> 1599             tmp_batch_outputs = predict_function(iterator)
   1600             if data_handler.should_sync:
   1601               context.async_wait()

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in __call__(self,**kwds)
    778       else:
    779         compiler = "nonXla"
--> 780         result = self._call(*args,**kwds)
    781 
    782       new_tracing_count = self._get_tracing_count()

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _call(self,**kwds)
    821       # This is the first call of __call__,so we have to initialize.
    822       initializers = []
--> 823       self._initialize(args,kwds,add_initializers_to=initializers)
    824     finally:
    825       # At this point we know that the initialization is complete (or less

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _initialize(self,args,add_initializers_to)
    695     self._concrete_stateful_fn = (
    696         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
--> 697             *args,**kwds))
    698 
    699     def invalid_creator_scope(*unused_args,**unused_kwds):

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self,**kwargs)
   2853       args,kwargs = None,None
   2854     with self._lock:
-> 2855       graph_function,_,_ = self._maybe_define_function(args,kwargs)
   2856     return graph_function
   2857 

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self,kwargs)
   3211 
   3212       self._function_cache.missed.add(call_context_key)
-> 3213       graph_function = self._create_graph_function(args,kwargs)
   3214       self._function_cache.primary[cache_key] = graph_function
   3215       return graph_function,kwargs

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self,kwargs,override_flat_arg_shapes)
   3073             arg_names=arg_names,3074             override_flat_arg_shapes=override_flat_arg_shapes,-> 3075             capture_by_value=self._capture_by_value),3076         self._function_attributes,3077         function_spec=self.function_spec,~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name,python_func,signature,func_graph,autograph,autograph_options,add_control_dependencies,arg_names,op_return_value,collections,capture_by_value,override_flat_arg_shapes)
    984         _,original_func = tf_decorator.unwrap(python_func)
    985 
--> 986       func_outputs = python_func(*func_args,**func_kwargs)
    987 
    988       # invariant: `func_outputs` contains only Tensors,CompositeTensors,~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args,**kwds)
    598         # __wrapped__ allows AutoGraph to swap in a converted function. We give
    599         # the function a weak reference to itself to avoid a reference cycle.
--> 600         return weak_wrapped_fn().__wrapped__(*args,**kwds)
    601     weak_wrapped_fn = weakref.ref(wrapped_fn)
    602 

~/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args,**kwargs)
    971           except Exception as e:  # pylint:disable=broad-except
    972             if hasattr(e,"ag_error_metadata"):
--> 973               raise e.ag_error_metadata.to_exception(e)
    974             else:
    975               raise

AssertionError: in user code:

    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:1462 predict_function  *
        return step_function(self,iterator)
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:1452 step_function  **
        outputs = model.distribute_strategy.run(run_step,args=(data,))
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:1211 run
        return self._extended.call_for_each_replica(fn,args=args,kwargs=kwargs)
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2585 call_for_each_replica
        return self._call_for_each_replica(fn,kwargs)
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2945 _call_for_each_replica
        return fn(*args,**kwargs)
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:1445 run_step  **
        outputs = model.predict_step(data)
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:1418 predict_step
        return self(x,training=False)
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py:985 __call__
        outputs = call_fn(inputs,**kwargs)
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/functional.py:386 call
        inputs,training=training,mask=mask)
    /home/zast/.conda/envs/python3_7/lib/python3.7/site-packages/tensorflow/python/keras/engine/functional.py:517 _run_internal_graph
        assert x_id in tensor_dict,'Could not compute output ' + str(x)

    AssertionError: Could not compute output Tensor("keras_layer/cond/Identity:0",768),dtype=float32)

从警告中,我认为输入层(,128)的形状与列表input_ids(1,128)的形状不匹配,但我不明白为什么input_ids具有这种形状。 在本教程中,不会显示此错误。 区别在于tensorflow版本库(2.3.0的mine和2.0.0的教程)和BERT模型(多语言的mine,以教程为基础)。

解决方法

暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!

如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。

小编邮箱:dio#foxmail.com (将#修改为@)