Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

as_list() is not defined on an unknown TensorShape. #20573

Open
apiszcz opened this issue Dec 1, 2024 · 2 comments
Open

as_list() is not defined on an unknown TensorShape. #20573

apiszcz opened this issue Dec 1, 2024 · 2 comments
Assignees
Labels

Comments

@apiszcz
Copy link

apiszcz commented Dec 1, 2024

Ubuntu 24.04
Python 3.12.3


keras                       3.7.0
keras-core                  0.1.7
keras-cv                    0.9.0

Following this example the data set visualizes/displays the bounding boxes properly. However the augmenter fails.
train_ds = train_ds.map(augmenter, num_parallel_calls=tf.data.AUTOTUNE)

https://colab.research.google.com/github/keras-team/keras-io/blob/master/examples/vision/ipynb/yolov8.ipynb

augmenter = keras.Sequential(
    layers=[
            keras_cv.layers.AutoContrast((0, 255)),
    ]
)

train_ds = train_data.map(load_dataset, num_parallel_calls=tf.data.AUTOTUNE)
train_ds = train_ds.shuffle(BATCH_SIZE * 4)
train_ds = train_ds.ragged_batch(BATCH_SIZE, drop_remainder=True)
train_ds = train_ds.map(augmenter, num_parallel_calls=tf.data.AUTOTUNE)

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
Cell In[14], line 4
      2 train_ds = train_ds.shuffle(BATCH_SIZE * 4)
      3 train_ds = train_ds.ragged_batch(BATCH_SIZE, drop_remainder=True)
----> 4 train_ds = train_ds.map(augmenter, num_parallel_calls=tf.data.AUTOTUNE)

File ~/test/lib/python3.12/site-packages/tensorflow/python/data/ops/dataset_ops.py:2341, in DatasetV2.map(self, map_func, num_parallel_calls, deterministic, synchronous, use_unbounded_threadpool, name)
   2336 # Loaded lazily due to a circular dependency (dataset_ops -> map_op ->
   2337 # dataset_ops).
   2338 # pylint: disable=g-import-not-at-top,protected-access
   2339 from tensorflow.python.data.ops import map_op
-> 2341 return map_op._map_v2(
   2342     self,
   2343     map_func,
   2344     num_parallel_calls=num_parallel_calls,
   2345     deterministic=deterministic,
   2346     synchronous=synchronous,
   2347     use_unbounded_threadpool=use_unbounded_threadpool,
   2348     name=name,
   2349 )

File ~/test/lib/python3.12/site-packages/tensorflow/python/data/ops/map_op.py:57, in _map_v2(input_dataset, map_func, num_parallel_calls, deterministic, synchronous, use_unbounded_threadpool, name)
     51 if synchronous:
     52   raise ValueError(
     53       "`synchronous` is not supported with `num_parallel_calls`, but"
     54       " `num_parallel_calls` was set to ",
     55       num_parallel_calls,
     56   )
---> 57 return _ParallelMapDataset(
     58     input_dataset,
     59     map_func,
     60     num_parallel_calls=num_parallel_calls,
     61     deterministic=deterministic,
     62     preserve_cardinality=True,
     63     use_unbounded_threadpool=use_unbounded_threadpool,
     64     name=name)

File ~/test/lib/python3.12/site-packages/tensorflow/python/data/ops/map_op.py:202, in _ParallelMapDataset.__init__(self, input_dataset, map_func, num_parallel_calls, deterministic, use_inter_op_parallelism, preserve_cardinality, use_legacy_function, use_unbounded_threadpool, name)
    200 self._input_dataset = input_dataset
    201 self._use_inter_op_parallelism = use_inter_op_parallelism
--> 202 self._map_func = structured_function.StructuredFunctionWrapper(
    203     map_func,
    204     self._transformation_name(),
    205     dataset=input_dataset,
    206     use_legacy_function=use_legacy_function)
    207 if deterministic is None:
    208   self._deterministic = "default"

File ~/test/lib/python3.12/site-packages/tensorflow/python/data/ops/structured_function.py:265, in StructuredFunctionWrapper.__init__(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs)
    258       warnings.warn(
    259           "Even though the `tf.config.experimental_run_functions_eagerly` "
    260           "option is set, this option does not apply to tf.data functions. "
    261           "To force eager execution of tf.data functions, please use "
    262           "`tf.data.experimental.enable_debug_mode()`.")
    263     fn_factory = trace_tf_function(defun_kwargs)
--> 265 self._function = fn_factory()
    266 # There is no graph to add in eager mode.
    267 add_to_graph &= not context.executing_eagerly()

File ~/test/lib/python3.12/site-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py:1251, in Function.get_concrete_function(self, *args, **kwargs)
   1249 def get_concrete_function(self, *args, **kwargs):
   1250   # Implements PolymorphicFunction.get_concrete_function.
-> 1251   concrete = self._get_concrete_function_garbage_collected(*args, **kwargs)
   1252   concrete._garbage_collector.release()  # pylint: disable=protected-access
   1253   return concrete

File ~/test/lib/python3.12/site-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py:1221, in Function._get_concrete_function_garbage_collected(self, *args, **kwargs)
   1219   if self._variable_creation_config is None:
   1220     initializers = []
-> 1221     self._initialize(args, kwargs, add_initializers_to=initializers)
   1222     self._initialize_uninitialized_variables(initializers)
   1224 if self._created_variables:
   1225   # In this case we have created variables on the first call, so we run the
   1226   # version which is guaranteed to never create variables.

File ~/test/lib/python3.12/site-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py:696, in Function._initialize(self, args, kwds, add_initializers_to)
    691 self._variable_creation_config = self._generate_scoped_tracing_options(
    692     variable_capturing_scope,
    693     tracing_compilation.ScopeType.VARIABLE_CREATION,
    694 )
    695 # Force the definition of the function for these arguments
--> 696 self._concrete_variable_creation_fn = tracing_compilation.trace_function(
    697     args, kwds, self._variable_creation_config
    698 )
    700 def invalid_creator_scope(*unused_args, **unused_kwds):
    701   """Disables variable creation."""

File ~/test/lib/python3.12/site-packages/tensorflow/python/eager/polymorphic_function/tracing_compilation.py:178, in trace_function(args, kwargs, tracing_options)
    175     args = tracing_options.input_signature
    176     kwargs = {}
--> 178   concrete_function = _maybe_define_function(
    179       args, kwargs, tracing_options
    180   )
    182 if not tracing_options.bind_graph_to_function:
    183   concrete_function._garbage_collector.release()  # pylint: disable=protected-access

File ~/test/lib/python3.12/site-packages/tensorflow/python/eager/polymorphic_function/tracing_compilation.py:283, in _maybe_define_function(args, kwargs, tracing_options)
    281 else:
    282   target_func_type = lookup_func_type
--> 283 concrete_function = _create_concrete_function(
    284     target_func_type, lookup_func_context, func_graph, tracing_options
    285 )
    287 if tracing_options.function_cache is not None:
    288   tracing_options.function_cache.add(
    289       concrete_function, current_func_context
    290   )

File ~/test/lib/python3.12/site-packages/tensorflow/python/eager/polymorphic_function/tracing_compilation.py:310, in _create_concrete_function(function_type, type_context, func_graph, tracing_options)
    303   placeholder_bound_args = function_type.placeholder_arguments(
    304       placeholder_context
    305   )
    307 disable_acd = tracing_options.attributes and tracing_options.attributes.get(
    308     attributes_lib.DISABLE_ACD, False
    309 )
--> 310 traced_func_graph = func_graph_module.func_graph_from_py_func(
    311     tracing_options.name,
    312     tracing_options.python_function,
    313     placeholder_bound_args.args,
    314     placeholder_bound_args.kwargs,
    315     None,
    316     func_graph=func_graph,
    317     add_control_dependencies=not disable_acd,
    318     arg_names=function_type_utils.to_arg_names(function_type),
    319     create_placeholders=False,
    320 )
    322 transform.apply_func_graph_transforms(traced_func_graph)
    324 graph_capture_container = traced_func_graph.function_captures

File ~/test/lib/python3.12/site-packages/tensorflow/python/framework/func_graph.py:1059, in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, create_placeholders)
   1056   return x
   1058 _, original_func = tf_decorator.unwrap(python_func)
-> 1059 func_outputs = python_func(*func_args, **func_kwargs)
   1061 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
   1062 # TensorArrays and `None`s.
   1063 func_outputs = variable_utils.convert_variables_to_tensors(func_outputs)

File ~/test/lib/python3.12/site-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py:599, in Function._generate_scoped_tracing_options.<locals>.wrapped_fn(*args, **kwds)
    595 with default_graph._variable_creator_scope(scope, priority=50):  # pylint: disable=protected-access
    596   # __wrapped__ allows AutoGraph to swap in a converted function. We give
    597   # the function a weak reference to itself to avoid a reference cycle.
    598   with OptionalXlaContext(compile_with_xla):
--> 599     out = weak_wrapped_fn().__wrapped__(*args, **kwds)
    600   return out

File ~/test/lib/python3.12/site-packages/tensorflow/python/data/ops/structured_function.py:231, in StructuredFunctionWrapper.__init__.<locals>.trace_tf_function.<locals>.wrapped_fn(*args)
    230 def wrapped_fn(*args):  # pylint: disable=missing-docstring
--> 231   ret = wrapper_helper(*args)
    232   ret = structure.to_tensor_list(self._output_structure, ret)
    233   return [ops.convert_to_tensor(t) for t in ret]

File ~/test/lib/python3.12/site-packages/tensorflow/python/data/ops/structured_function.py:161, in StructuredFunctionWrapper.__init__.<locals>.wrapper_helper(*args)
    159 if not _should_unpack(nested_args):
    160   nested_args = (nested_args,)
--> 161 ret = autograph.tf_convert(self._func, ag_ctx)(*nested_args)
    162 ret = variable_utils.convert_variables_to_tensors(ret)
    163 if _should_pack(ret):

File ~/test/lib/python3.12/site-packages/tensorflow/python/autograph/impl/api.py:690, in convert.<locals>.decorator.<locals>.wrapper(*args, **kwargs)
    688 try:
    689   with conversion_ctx:
--> 690     return converted_call(f, args, kwargs, options=options)
    691 except Exception as e:  # pylint:disable=broad-except
    692   if hasattr(e, 'ag_error_metadata'):

File ~/test/lib/python3.12/site-packages/tensorflow/python/autograph/impl/api.py:377, in converted_call(f, args, kwargs, caller_fn_scope, options)
    374   return _call_unconverted(f, args, kwargs, options)
    376 if not options.user_requested and conversion.is_allowlisted(f):
--> 377   return _call_unconverted(f, args, kwargs, options)
    379 # internal_convert_user_code is for example turned off when issuing a dynamic
    380 # call conversion from generated code while in nonrecursive mode. In that
    381 # case we evidently don't want to recurse, but we still have to convert
    382 # things like builtins.
    383 if not options.internal_convert_user_code:

File ~/test/lib/python3.12/site-packages/tensorflow/python/autograph/impl/api.py:459, in _call_unconverted(f, args, kwargs, options, update_cache)
    456   return f.__self__.call(args, kwargs)
    458 if kwargs is not None:
--> 459   return f(*args, **kwargs)
    460 return f(*args)

File ~/test/lib/python3.12/site-packages/keras/src/utils/traceback_utils.py:122, in filter_traceback.<locals>.error_handler(*args, **kwargs)
    119     filtered_tb = _process_traceback_frames(e.__traceback__)
    120     # To get the full stack trace, call:
    121     # `keras.config.disable_traceback_filtering()`
--> 122     raise e.with_traceback(filtered_tb) from None
    123 finally:
    124     del filtered_tb

File ~/test/lib/python3.12/site-packages/optree/ops.py:747, in tree_map(func, tree, is_leaf, none_is_leaf, namespace, *rests)
    745 leaves, treespec = _C.flatten(tree, is_leaf, none_is_leaf, namespace)
    746 flat_args = [leaves] + [treespec.flatten_up_to(r) for r in rests]
--> 747 return treespec.unflatten(map(func, *flat_args))

ValueError: as_list() is not defined on an unknown TensorShape.
@fchollet
Copy link
Collaborator

fchollet commented Dec 4, 2024

It looks like your dataset is yielding tensors that have no shape (i.e. not even a rank), and that isn't supported by augmentation layers. Fix: make sure your dataset yields tensors with at least a defined rank and a define channel dimension. (e.g. (None, None, None, 3) would be ok).

Note that you can use set_shape to achieve this, if you know the shape of your tensors but TF doesn't.

@apiszcz
Copy link
Author

apiszcz commented Dec 4, 2024

I am attempting to create them as the example shows, using a list of lists for each element.

image

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
Projects
None yet
Development

No branches or pull requests

4 participants