Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: dapricot_test #1876

Draft
wants to merge 1 commit into
base: tfdsv4
Choose a base branch
from

Conversation

ppark-twosixtech
Copy link
Contributor

No description provided.

@ppark-twosixtech
Copy link
Contributor Author

armory.datasets.load.load throws an error currently

File /workspace/armory/datasets/load.py:69, in load(name, version, config, data_dir, download_cached, verify, overwrite, public, **as_dataset_kwargs)
     67 try:
     68     builder = tfds.builder(name, version=version, data_dir=data_dir, config=config)
---> 69     ds = builder.as_dataset(**as_dataset_kwargs)
     70     return builder.info, ds
     71 except (tfds.core.registered.DatasetNotFoundError, AssertionError):

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/logging/__init__.py:81, in as_dataset.<locals>.decorator(function, builder, args, kwargs)
     68 for logger in _get_registered_loggers():
     69   logger.as_dataset(
     70       dataset_name=builder.name,
     71       config_name=config_name,
   (...)
     78       as_supervised=kwargs.get('as_supervised', False),
     79       decoders=kwargs.get('decoders', None))
---> 81 return function(*args, **kwargs)

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/dataset_builder.py:607, in DatasetBuilder.as_dataset(self, split, batch_size, shuffle_files, decoders, read_config, as_supervised)
    598 # Create a dataset for each of the given splits
    599 build_single_dataset = functools.partial(
    600     self._build_single_dataset,
    601     shuffle_files=shuffle_files,
   (...)
    605     as_supervised=as_supervised,
    606 )
--> 607 all_ds = tf.nest.map_structure(build_single_dataset, split)
    608 return all_ds

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/util/nest.py:917, in map_structure(func, *structure, **kwargs)
    913 flat_structure = (flatten(s, expand_composites) for s in structure)
    914 entries = zip(*flat_structure)
    916 return pack_sequence_as(
--> 917     structure[0], [func(*x) for x in entries],
    918     expand_composites=expand_composites)

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/util/nest.py:917, in <listcomp>(.0)
    913 flat_structure = (flatten(s, expand_composites) for s in structure)
    914 entries = zip(*flat_structure)
    916 return pack_sequence_as(
--> 917     structure[0], [func(*x) for x in entries],
    918     expand_composites=expand_composites)

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/dataset_builder.py:625, in DatasetBuilder._build_single_dataset(self, split, batch_size, shuffle_files, decoders, read_config, as_supervised)
    622   batch_size = self.info.splits.total_num_examples or sys.maxsize
    624 # Build base dataset
--> 625 ds = self._as_dataset(
    626     split=split,
    627     shuffle_files=shuffle_files,
    628     decoders=decoders,
    629     read_config=read_config,
    630 )
    631 # Auto-cache small datasets which are small enough to fit in memory.
    632 if self._should_cache_ds(
    633     split=split, shuffle_files=shuffle_files, read_config=read_config):

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/dataset_builder.py:991, in FileReaderBuilder._as_dataset(self, split, decoders, read_config, shuffle_files)
    985 reader = reader_lib.Reader(
    986     self._data_dir,
    987     example_specs=example_specs,
    988     file_format=self.info.file_format,
    989 )
    990 decode_fn = functools.partial(features.decode_example, decoders=decoders)
--> 991 return reader.read(
    992     instructions=split,
    993     split_infos=self.info.splits.values(),
    994     decode_fn=decode_fn,
    995     read_config=read_config,
    996     shuffle_files=shuffle_files,
    997     disable_shuffling=self.info.disable_shuffling,
    998 )

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/reader.py:346, in Reader.read(self, instructions, split_infos, read_config, shuffle_files, disable_shuffling, decode_fn)
    337   file_instructions = splits_dict[instruction].file_instructions
    338   return self.read_files(
    339       file_instructions,
    340       read_config=read_config,
   (...)
    343       decode_fn=decode_fn,
    344   )
--> 346 return tf.nest.map_structure(_read_instruction_to_ds, instructions)

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/util/nest.py:917, in map_structure(func, *structure, **kwargs)
    913 flat_structure = (flatten(s, expand_composites) for s in structure)
    914 entries = zip(*flat_structure)
    916 return pack_sequence_as(
--> 917     structure[0], [func(*x) for x in entries],
    918     expand_composites=expand_composites)

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/util/nest.py:917, in <listcomp>(.0)
    913 flat_structure = (flatten(s, expand_composites) for s in structure)
    914 entries = zip(*flat_structure)
    916 return pack_sequence_as(
--> 917     structure[0], [func(*x) for x in entries],
    918     expand_composites=expand_composites)

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/reader.py:338, in Reader.read.<locals>._read_instruction_to_ds(instruction)
    336 def _read_instruction_to_ds(instruction):
    337   file_instructions = splits_dict[instruction].file_instructions
--> 338   return self.read_files(
    339       file_instructions,
    340       read_config=read_config,
    341       shuffle_files=shuffle_files,
    342       disable_shuffling=disable_shuffling,
    343       decode_fn=decode_fn,
    344   )

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/reader.py:402, in Reader.read_files(self, file_instructions, read_config, shuffle_files, disable_shuffling, decode_fn)
    398 if read_config and read_config.add_tfds_id:
    399   parse_and_decode = functools.partial(
    400       _decode_with_id, decode_fn=parse_and_decode)
--> 402 ds = ds.map(
    403     parse_and_decode,
    404     num_parallel_calls=read_config.num_parallel_calls_for_decode,
    405 )
    406 return ds

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/data/ops/dataset_ops.py:2204, in DatasetV2.map(self, map_func, num_parallel_calls, deterministic, name)
   2202   return MapDataset(self, map_func, preserve_cardinality=True, name=name)
   2203 else:
-> 2204   return ParallelMapDataset(
   2205       self,
   2206       map_func,
   2207       num_parallel_calls,
   2208       deterministic,
   2209       preserve_cardinality=True,
   2210       name=name)

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/data/ops/dataset_ops.py:5441, in ParallelMapDataset.__init__(self, input_dataset, map_func, num_parallel_calls, deterministic, use_inter_op_parallelism, preserve_cardinality, use_legacy_function, name)
   5439 self._input_dataset = input_dataset
   5440 self._use_inter_op_parallelism = use_inter_op_parallelism
-> 5441 self._map_func = structured_function.StructuredFunctionWrapper(
   5442     map_func,
   5443     self._transformation_name(),
   5444     dataset=input_dataset,
   5445     use_legacy_function=use_legacy_function)
   5446 if deterministic is None:
   5447   self._deterministic = "default"

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/data/ops/structured_function.py:271, in StructuredFunctionWrapper.__init__(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs)
    264       warnings.warn(
    265           "Even though the `tf.config.experimental_run_functions_eagerly` "
    266           "option is set, this option does not apply to tf.data functions. "
    267           "To force eager execution of tf.data functions, please use "
    268           "`tf.data.experimental.enable_debug_mode()`.")
    269     fn_factory = trace_tf_function(defun_kwargs)
--> 271 self._function = fn_factory()
    272 # There is no graph to add in eager mode.
    273 add_to_graph &= not context.executing_eagerly()

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/eager/function.py:2610, in Function.get_concrete_function(self, *args, **kwargs)
   2601 def get_concrete_function(self, *args, **kwargs):
   2602   """Returns a `ConcreteFunction` specialized to inputs and execution context.
   2603 
   2604   Args:
   (...)
   2608        or `tf.Tensor` or `tf.TensorSpec`.
   2609   """
-> 2610   graph_function = self._get_concrete_function_garbage_collected(
   2611       *args, **kwargs)
   2612   graph_function._garbage_collector.release()  # pylint: disable=protected-access
   2613   return graph_function

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/eager/function.py:2576, in Function._get_concrete_function_garbage_collected(self, *args, **kwargs)
   2574   args, kwargs = None, None
   2575 with self._lock:
-> 2576   graph_function, _ = self._maybe_define_function(args, kwargs)
   2577   seen_names = set()
   2578   captured = object_identity.ObjectIdentitySet(
   2579       graph_function.graph.internal_captures)

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/eager/function.py:2760, in Function._maybe_define_function(self, args, kwargs)
   2758   # Only get placeholders for arguments, not captures
   2759   args, kwargs = placeholder_dict["args"]
-> 2760 graph_function = self._create_graph_function(args, kwargs)
   2762 graph_capture_container = graph_function.graph._capture_func_lib  # pylint: disable=protected-access
   2763 # Maintain the list of all captures

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/eager/function.py:2670, in Function._create_graph_function(self, args, kwargs)
   2665 missing_arg_names = [
   2666     "%s_%d" % (arg, i) for i, arg in enumerate(missing_arg_names)
   2667 ]
   2668 arg_names = base_arg_names + missing_arg_names
   2669 graph_function = ConcreteFunction(
-> 2670     func_graph_module.func_graph_from_py_func(
   2671         self._name,
   2672         self._python_function,
   2673         args,
   2674         kwargs,
   2675         self.input_signature,
   2676         autograph=self._autograph,
   2677         autograph_options=self._autograph_options,
   2678         arg_names=arg_names,
   2679         capture_by_value=self._capture_by_value),
   2680     self._function_attributes,
   2681     spec=self.function_spec,
   2682     # Tell the ConcreteFunction to clean up its graph once it goes out of
   2683     # scope. This is not the default behavior since it gets used in some
   2684     # places (like Keras) where the FuncGraph lives longer than the
   2685     # ConcreteFunction.
   2686     shared_func_graph=False)
   2687 return graph_function

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/framework/func_graph.py:1247, in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, acd_record_initial_resource_uses)
   1244 else:
   1245   _, original_func = tf_decorator.unwrap(python_func)
-> 1247 func_outputs = python_func(*func_args, **func_kwargs)
   1249 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
   1250 # TensorArrays and `None`s.
   1251 func_outputs = nest.map_structure(
   1252     convert, func_outputs, expand_composites=True)

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/data/ops/structured_function.py:248, in StructuredFunctionWrapper.__init__.<locals>.trace_tf_function.<locals>.wrapped_fn(*args)
    242 @eager_function.defun_with_attributes(
    243     input_signature=structure.get_flat_tensor_specs(
    244         self._input_structure),
    245     autograph=False,
    246     attributes=defun_kwargs)
    247 def wrapped_fn(*args):  # pylint: disable=missing-docstring
--> 248   ret = wrapper_helper(*args)
    249   ret = structure.to_tensor_list(self._output_structure, ret)
    250   return [ops.convert_to_tensor(t) for t in ret]

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/data/ops/structured_function.py:177, in StructuredFunctionWrapper.__init__.<locals>.wrapper_helper(*args)
    175 if not _should_unpack(nested_args):
    176   nested_args = (nested_args,)
--> 177 ret = autograph.tf_convert(self._func, ag_ctx)(*nested_args)
    178 if _should_pack(ret):
    179   ret = tuple(ret)

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/autograph/impl/api.py:689, in convert.<locals>.decorator.<locals>.wrapper(*args, **kwargs)
    687 try:
    688   with conversion_ctx:
--> 689     return converted_call(f, args, kwargs, options=options)
    690 except Exception as e:  # pylint:disable=broad-except
    691   if hasattr(e, 'ag_error_metadata'):

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/autograph/impl/api.py:377, in converted_call(f, args, kwargs, caller_fn_scope, options)
    374   return _call_unconverted(f, args, kwargs, options)
    376 if not options.user_requested and conversion.is_allowlisted(f):
--> 377   return _call_unconverted(f, args, kwargs, options)
    379 # internal_convert_user_code is for example turned off when issuing a dynamic
    380 # call conversion from generated code while in nonrecursive mode. In that
    381 # case we evidently don't want to recurse, but we still have to convert
    382 # things like builtins.
    383 if not options.internal_convert_user_code:

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/autograph/impl/api.py:458, in _call_unconverted(f, args, kwargs, options, update_cache)
    455   return f.__self__.call(args, kwargs)
    457 if kwargs is not None:
--> 458   return f(*args, **kwargs)
    459 return f(*args)

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/reader.py:392, in Reader.read_files.<locals>.parse_and_decode(ex)
    387 def parse_and_decode(ex: Tensor) -> TreeDict[Tensor]:
    388   # TODO(pierrot): `parse_example` uses
    389   # `tf.io.parse_single_example`. It might be faster to use `parse_example`,
    390   # after batching.
    391   # https://www.tensorflow.org/api_docs/python/tf/io/parse_example
--> 392   ex = self._parser.parse_example(ex)
    393   if decode_fn:
    394     ex = decode_fn(ex)

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/example_parser.py:65, in ExampleParser.parse_example(self, serialized_example)
     59 example = tf.io.parse_single_example(
     60     serialized=serialized_example,
     61     features=self.flat_feature_specs,
     62 )
     63 example = utils.pack_as_nest_dict(example, self._nested_feature_specs)
---> 65 example = {  # pylint:disable=g-complex-comprehension
     66     k: _deserialize_single_field(example_data, tensor_info)
     67     for k, (
     68         example_data,
     69         tensor_info) in utils.zip_dict(example, self._flat_example_specs)
     70 }
     71 # Reconstruct all nesting
     72 example = utils.pack_as_nest_dict(example, self._example_specs)

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/example_parser.py:66, in <dictcomp>(.0)
     59 example = tf.io.parse_single_example(
     60     serialized=serialized_example,
     61     features=self.flat_feature_specs,
     62 )
     63 example = utils.pack_as_nest_dict(example, self._nested_feature_specs)
     65 example = {  # pylint:disable=g-complex-comprehension
---> 66     k: _deserialize_single_field(example_data, tensor_info)
     67     for k, (
     68         example_data,
     69         tensor_info) in utils.zip_dict(example, self._flat_example_specs)
     70 }
     71 # Reconstruct all nesting
     72 example = utils.pack_as_nest_dict(example, self._example_specs)

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/example_parser.py:98, in _deserialize_single_field(example_data, tensor_info)
     96 # Ragged tensor case:
     97 if tensor_info.sequence_rank > 1:
---> 98   example_data = _dict_to_ragged(example_data, tensor_info)
    100 # Restore shape if possible. TF Example flattened it.
    101 elif tensor_info.shape.count(None) < 2:

File /opt/conda/lib/python3.9/site-packages/tensorflow_datasets/core/example_parser.py:114, in _dict_to_ragged(example_data, tensor_info)
    111 def _dict_to_ragged(example_data, tensor_info):
    112   """Reconstruct the ragged tensor from the row ids."""
    113   return tf.RaggedTensor.from_nested_row_lengths(
--> 114       flat_values=example_data["ragged_flat_values"],
    115       nested_row_lengths=[
    116           example_data["ragged_row_lengths_{}".format(k)]
    117           for k in range(tensor_info.sequence_rank - 1)
    118       ],
    119   )

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/util/traceback_utils.py:153, in filter_traceback.<locals>.error_handler(*args, **kwargs)
    151 except Exception as e:
    152   filtered_tb = _process_traceback_frames(e.__traceback__)
--> 153   raise e.with_traceback(filtered_tb) from None
    154 finally:
    155   del filtered_tb

File /opt/conda/lib/python3.9/site-packages/tensorflow/python/ops/array_ops.py:906, in _check_index(idx)
    901 dtype = getattr(idx, "dtype", None)
    902 if (dtype is None or dtypes.as_dtype(dtype) not in _SUPPORTED_SLICE_DTYPES or
    903     idx.shape and len(idx.shape) == 1):
    904   # TODO(slebedev): IndexError seems more appropriate here, but it
    905   # will break `_slice_helper` contract.
--> 906   raise TypeError(_SLICE_TYPE_ERROR + ", got {!r}".format(idx))

TypeError: Only integers, slices (`:`), ellipsis (`...`), tf.newaxis (`None`) and scalar tf.int32/tf.int64 tensors are valid indices, got 'ragged_flat_values'

@ppark-twosixtech
Copy link
Contributor Author

ppark-twosixtech commented Feb 28, 2023

Above comment seems to be a known bug (see tensorflow/datasets#2243, also reported in #1394) - may have to considering waiting for a fix or changing the DapricotTest._info code

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

Successfully merging this pull request may close these issues.

2 participants