Dataset Viewer
Duplicate
The dataset viewer is not available for this split.
Cannot load the dataset split (in streaming mode) to extract the first rows.
Error code:   StreamingRowsError
Exception:    CastError
Message:      Couldn't cast
best_global_step: null
best_metric: null
best_model_checkpoint: null
epoch: double
eval_steps: double
global_step: int64
is_hyper_param_search: bool
is_local_process_zero: bool
is_world_process_zero: bool
log_history: list<item: struct<debug/num_lat_loss: double, debug/num_lat_total: double, debug/num_tok_loss: doubl (... 184 chars omitted)
  child 0, item: struct<debug/num_lat_loss: double, debug/num_lat_total: double, debug/num_tok_loss: double, debug/nu (... 172 chars omitted)
      child 0, debug/num_lat_loss: double
      child 1, debug/num_lat_total: double
      child 2, debug/num_tok_loss: double
      child 3, debug/num_tok_total: double
      child 4, epoch: double
      child 5, step: int64
      child 6, train/ce_loss: double
      child 7, train/diffusion_loss: double
      child 8, train/learning_rate_real: double
      child 9, learning_rate: double
      child 10, loss: double
logging_steps: int64
max_steps: int64
num_input_tokens_seen: int64
num_train_epochs: int64
save_steps: int64
stateful_callbacks: struct<TrainerControl: struct<args: struct<should_epoch_stop: bool, should_evaluate: bool, should_lo (... 79 chars omitted)
  child 0, TrainerControl: struct<args: struct<should_epoch_stop: bool, should_evaluate: bool, should_log: bool, should_save: b (... 55 chars omitted)
      child 0, args: struct<should_epoch_stop: bool, should_evaluate: bool, should_log: bool, should_save: bool, should_t (... 19 chars omitted)
          child 0, should_epoch_stop: bool
          child 1, should_evaluate: bool
          child 2, should_log: bool
          child 3, should_save: bool
          child 4, should_training_stop: bool
      child 1, attributes: struct<>
total_flos: double
train_batch_size: int64
trial_name: null
trial_params: null
corda_config: null
target_modules: list<item: string>
  child 0, item: string
use_dora: bool
r: int64
arrow_config: null
ensure_weight_tying: bool
target_parameters: null
alpha_pattern: struct<>
rank_pattern: struct<>
peft_type: string
init_lora_weights: bool
use_rslora: bool
inference_mode: bool
auto_mapping: null
megatron_config: null
eva_config: null
task_type: string
revision: null
modules_to_save: null
layers_pattern: null
use_qalora: bool
lora_alpha: int64
lora_bias: bool
layer_replication: null
loftq_config: struct<>
qalora_group_size: int64
peft_version: string
trainable_token_indices: null
layers_to_transform: null
megatron_core: string
fan_in_fan_out: bool
base_model_name_or_path: string
exclude_modules: null
bias: string
lora_dropout: double
alora_invocation_tokens: null
to
{'alora_invocation_tokens': Value('null'), 'alpha_pattern': {}, 'arrow_config': Value('null'), 'auto_mapping': Value('null'), 'base_model_name_or_path': Value('string'), 'bias': Value('string'), 'corda_config': Value('null'), 'ensure_weight_tying': Value('bool'), 'eva_config': Value('null'), 'exclude_modules': Value('null'), 'fan_in_fan_out': Value('bool'), 'inference_mode': Value('bool'), 'init_lora_weights': Value('bool'), 'layer_replication': Value('null'), 'layers_pattern': Value('null'), 'layers_to_transform': Value('null'), 'loftq_config': {}, 'lora_alpha': Value('int64'), 'lora_bias': Value('bool'), 'lora_dropout': Value('float64'), 'megatron_config': Value('null'), 'megatron_core': Value('string'), 'modules_to_save': Value('null'), 'peft_type': Value('string'), 'peft_version': Value('string'), 'qalora_group_size': Value('int64'), 'r': Value('int64'), 'rank_pattern': {}, 'revision': Value('null'), 'target_modules': List(Value('string')), 'target_parameters': Value('null'), 'task_type': Value('string'), 'trainable_token_indices': Value('null'), 'use_dora': Value('bool'), 'use_qalora': Value('bool'), 'use_rslora': Value('bool')}
because column names don't match
Traceback:    Traceback (most recent call last):
                File "/src/services/worker/src/worker/utils.py", line 99, in get_rows_or_raise
                  return get_rows(
                         ^^^^^^^^^
                File "/src/libs/libcommon/src/libcommon/utils.py", line 272, in decorator
                  return func(*args, **kwargs)
                         ^^^^^^^^^^^^^^^^^^^^^
                File "/src/services/worker/src/worker/utils.py", line 77, in get_rows
                  rows_plus_one = list(itertools.islice(ds, rows_max_number + 1))
                                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/iterable_dataset.py", line 2690, in __iter__
                  for key, example in ex_iterable:
                                      ^^^^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/iterable_dataset.py", line 2227, in __iter__
                  for key, pa_table in self._iter_arrow():
                                       ^^^^^^^^^^^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/iterable_dataset.py", line 2251, in _iter_arrow
                  for key, pa_table in self.ex_iterable._iter_arrow():
                                       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/iterable_dataset.py", line 494, in _iter_arrow
                  for key, pa_table in iterator:
                                       ^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/iterable_dataset.py", line 384, in _iter_arrow
                  for key, pa_table in self.generate_tables_fn(**gen_kwags):
                                       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/packaged_modules/json/json.py", line 265, in _generate_tables
                  self._cast_table(pa_table, json_field_paths=json_field_paths),
                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/packaged_modules/json/json.py", line 120, in _cast_table
                  pa_table = table_cast(pa_table, self.info.features.arrow_schema)
                             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/table.py", line 2272, in table_cast
                  return cast_table_to_schema(table, schema)
                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                File "/usr/local/lib/python3.12/site-packages/datasets/table.py", line 2218, in cast_table_to_schema
                  raise CastError(
              datasets.table.CastError: Couldn't cast
              best_global_step: null
              best_metric: null
              best_model_checkpoint: null
              epoch: double
              eval_steps: double
              global_step: int64
              is_hyper_param_search: bool
              is_local_process_zero: bool
              is_world_process_zero: bool
              log_history: list<item: struct<debug/num_lat_loss: double, debug/num_lat_total: double, debug/num_tok_loss: doubl (... 184 chars omitted)
                child 0, item: struct<debug/num_lat_loss: double, debug/num_lat_total: double, debug/num_tok_loss: double, debug/nu (... 172 chars omitted)
                    child 0, debug/num_lat_loss: double
                    child 1, debug/num_lat_total: double
                    child 2, debug/num_tok_loss: double
                    child 3, debug/num_tok_total: double
                    child 4, epoch: double
                    child 5, step: int64
                    child 6, train/ce_loss: double
                    child 7, train/diffusion_loss: double
                    child 8, train/learning_rate_real: double
                    child 9, learning_rate: double
                    child 10, loss: double
              logging_steps: int64
              max_steps: int64
              num_input_tokens_seen: int64
              num_train_epochs: int64
              save_steps: int64
              stateful_callbacks: struct<TrainerControl: struct<args: struct<should_epoch_stop: bool, should_evaluate: bool, should_lo (... 79 chars omitted)
                child 0, TrainerControl: struct<args: struct<should_epoch_stop: bool, should_evaluate: bool, should_log: bool, should_save: b (... 55 chars omitted)
                    child 0, args: struct<should_epoch_stop: bool, should_evaluate: bool, should_log: bool, should_save: bool, should_t (... 19 chars omitted)
                        child 0, should_epoch_stop: bool
                        child 1, should_evaluate: bool
                        child 2, should_log: bool
                        child 3, should_save: bool
                        child 4, should_training_stop: bool
                    child 1, attributes: struct<>
              total_flos: double
              train_batch_size: int64
              trial_name: null
              trial_params: null
              corda_config: null
              target_modules: list<item: string>
                child 0, item: string
              use_dora: bool
              r: int64
              arrow_config: null
              ensure_weight_tying: bool
              target_parameters: null
              alpha_pattern: struct<>
              rank_pattern: struct<>
              peft_type: string
              init_lora_weights: bool
              use_rslora: bool
              inference_mode: bool
              auto_mapping: null
              megatron_config: null
              eva_config: null
              task_type: string
              revision: null
              modules_to_save: null
              layers_pattern: null
              use_qalora: bool
              lora_alpha: int64
              lora_bias: bool
              layer_replication: null
              loftq_config: struct<>
              qalora_group_size: int64
              peft_version: string
              trainable_token_indices: null
              layers_to_transform: null
              megatron_core: string
              fan_in_fan_out: bool
              base_model_name_or_path: string
              exclude_modules: null
              bias: string
              lora_dropout: double
              alora_invocation_tokens: null
              to
              {'alora_invocation_tokens': Value('null'), 'alpha_pattern': {}, 'arrow_config': Value('null'), 'auto_mapping': Value('null'), 'base_model_name_or_path': Value('string'), 'bias': Value('string'), 'corda_config': Value('null'), 'ensure_weight_tying': Value('bool'), 'eva_config': Value('null'), 'exclude_modules': Value('null'), 'fan_in_fan_out': Value('bool'), 'inference_mode': Value('bool'), 'init_lora_weights': Value('bool'), 'layer_replication': Value('null'), 'layers_pattern': Value('null'), 'layers_to_transform': Value('null'), 'loftq_config': {}, 'lora_alpha': Value('int64'), 'lora_bias': Value('bool'), 'lora_dropout': Value('float64'), 'megatron_config': Value('null'), 'megatron_core': Value('string'), 'modules_to_save': Value('null'), 'peft_type': Value('string'), 'peft_version': Value('string'), 'qalora_group_size': Value('int64'), 'r': Value('int64'), 'rank_pattern': {}, 'revision': Value('null'), 'target_modules': List(Value('string')), 'target_parameters': Value('null'), 'task_type': Value('string'), 'trainable_token_indices': Value('null'), 'use_dora': Value('bool'), 'use_qalora': Value('bool'), 'use_rslora': Value('bool')}
              because column names don't match

Need help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.

No dataset card yet

Downloads last month
753