本文介绍了TensorFlow Hub 模块可以在 TensorFlow 2.0 中使用吗?的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!
问题描述
我尝试在 TensorFlow 2.0 (alpha) 中运行此代码:
I tried running this code in TensorFlow 2.0 (alpha):
import tensorflow_hub as hub
@tf.function
def elmo(texts):
elmo_module = hub.Module("https://tfhub.dev/google/elmo/2", trainable=True)
return elmo_module(texts, signature="default", as_dict=True)
embeds = elmo(tf.constant(["the cat is on the mat",
"dogs are in the fog"]))
但是我收到了这个错误:
But I got this error:
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-1-c7f14c7ed0e9> in <module>
9
10 elmo(tf.constant(["the cat is on the mat",
---> 11 "dogs are in the fog"]))
.../tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
417 # This is the first call of __call__, so we have to initialize.
418 initializer_map = {}
--> 419 self._initialize(args, kwds, add_initializers_to=initializer_map)
420 if self._created_variables:
421 try:
.../tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
361 self._concrete_stateful_fn = (
362 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 363 *args, **kwds))
364
365 def invalid_creator_scope(*unused_args, **unused_kwds):
.../tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
1322 if self.input_signature:
1323 args, kwargs = None, None
-> 1324 graph_function, _, _ = self._maybe_define_function(args, kwargs)
1325 return graph_function
1326
.../tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
1585 or call_context_key not in self._function_cache.missed):
1586 self._function_cache.missed.add(call_context_key)
-> 1587 graph_function = self._create_graph_function(args, kwargs)
1588 self._function_cache.primary[cache_key] = graph_function
1589 return graph_function, args, kwargs
.../tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
1518 arg_names=arg_names,
1519 override_flat_arg_shapes=override_flat_arg_shapes,
-> 1520 capture_by_value=self._capture_by_value),
1521 self._function_attributes)
1522
.../tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
705 converted_func)
706
--> 707 func_outputs = python_func(*func_args, **func_kwargs)
708
709 # invariant: `func_outputs` contains only Tensors, IndexedSlices,
.../tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
314 # __wrapped__ allows AutoGraph to swap in a converted function. We give
315 # the function a weak reference to itself to avoid a reference cycle.
--> 316 return weak_wrapped_fn().__wrapped__(*args, **kwds)
317 weak_wrapped_fn = weakref.ref(wrapped_fn)
318
.../tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
697 optional_features=autograph_options,
698 force_conversion=True,
--> 699 ), args, kwargs)
700
701 # Wrapping around a decorator allows checks like tf_inspect.getargspec
.../tensorflow/python/autograph/impl/api.py in converted_call(f, owner, options, args, kwargs)
355
356 if kwargs is not None:
--> 357 result = converted_f(*effective_args, **kwargs)
358 else:
359 result = converted_f(*effective_args)
/var/folders/wy/h39t6kb11pnbb0pzhksd_fqh0000gn/T/tmp4v3g2d_1.py in tf__elmo(texts)
11 retval_ = None
12 print('Eager:', ag__.converted_call('executing_eagerly', tf, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), (), None))
---> 13 elmo_module = ag__.converted_call('Module', hub, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), ('https://tfhub.dev/google/elmo/2',), {'trainable': True})
14 do_return = True
15 retval_ = ag__.converted_call(elmo_module, None, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), (texts,), {'signature': 'default', 'as_dict': True})
.../tensorflow/python/autograph/impl/api.py in converted_call(f, owner, options, args, kwargs)
252 if tf_inspect.isclass(f):
253 logging.log(2, 'Permanently whitelisted: %s: constructor', f)
--> 254 return _call_unconverted(f, args, kwargs)
255
256 # Other built-in modules are permanently whitelisted.
.../tensorflow/python/autograph/impl/api.py in _call_unconverted(f, args, kwargs)
174
175 if kwargs is not None:
--> 176 return f(*args, **kwargs)
177 else:
178 return f(*args)
.../tensorflow_hub/module.py in __init__(self, spec, trainable, name, tags)
167 name=self._name,
168 trainable=self._trainable,
--> 169 tags=self._tags)
170 # pylint: enable=protected-access
171
.../tensorflow_hub/native_module.py in _create_impl(self, name, trainable, tags)
338 trainable=trainable,
339 checkpoint_path=self._checkpoint_variables_path,
--> 340 name=name)
341
342 def _export(self, path, variables_saver):
.../tensorflow_hub/native_module.py in __init__(self, spec, meta_graph, trainable, checkpoint_path, name)
389 # TPU training code.
390 with tf.init_scope():
--> 391 self._init_state(name)
392
393 def _init_state(self, name):
.../tensorflow_hub/native_module.py in _init_state(self, name)
392
393 def _init_state(self, name):
--> 394 variable_tensor_map, self._state_map = self._create_state_graph(name)
395 self._variable_map = recover_partitioned_variable_map(
396 get_node_map_from_tensor_map(variable_tensor_map))
.../tensorflow_hub/native_module.py in _create_state_graph(self, name)
449 meta_graph,
450 input_map={},
--> 451 import_scope=relative_scope_name)
452
453 # Build a list from the variable name in the module definition to the actual
.../tensorflow/python/training/saver.py in import_meta_graph(meta_graph_or_file, clear_devices, import_scope, **kwargs)
1443 """ # pylint: disable=g-doc-exception
1444 return _import_meta_graph_with_return_elements(
-> 1445 meta_graph_or_file, clear_devices, import_scope, **kwargs)[0]
1446
1447
.../tensorflow/python/training/saver.py in _import_meta_graph_with_return_elements(meta_graph_or_file, clear_devices, import_scope, return_elements, **kwargs)
1451 """Import MetaGraph, and return both a saver and returned elements."""
1452 if context.executing_eagerly():
-> 1453 raise RuntimeError("Exporting/importing meta graphs is not supported when "
1454 "eager execution is enabled. No graph exists when eager "
1455 "execution is enabled.")
RuntimeError: Exporting/importing meta graphs is not supported when eager execution is enabled. No graph exists when eager execution is enabled.
推荐答案
在 Tensorflow 2.0 中,您应该使用 hub.load()
或 hub.KerasLayer()
.
In Tensorflow 2.0 you should be using hub.load()
or hub.KerasLayer()
.
[2019 年 4 月] - 目前只有 Tensorflow 2.0 模块可以通过它们加载.将来,许多 1.x Hub 模块也应该是可加载的.
[April 2019] - For now only Tensorflow 2.0 modules are loadable via them. In the future many of 1.x Hub modules should be loadable as well.
对于仅 2.x 的模块,您可以在为模块创建的笔记本中查看示例此处
For the 2.x only modules you can see examples in the notebooks created for the modules here
这篇关于TensorFlow Hub 模块可以在 TensorFlow 2.0 中使用吗?的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持!