fangqi commited on
Commit
ae43725
·
verified ·
1 Parent(s): 18b3046

Upload folder using huggingface_hub

Browse files
checkpoint_files/world_models/coffee/P_128/train_mimicgen_cfg.py CHANGED
@@ -1,6 +1,6 @@
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
- shards_pattern = "./tmp_files/rollout_coffee_wmpo_128/**/*.tar",
4
  stats_path = "./data_files/statistics/coffee/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
@@ -61,7 +61,7 @@ val_batch_size = 128
61
 
62
  # Log settings
63
  seed = 42
64
- outputs = "./checkpoint_files/runs/world_models/coffee_wmpo_128"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
 
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
+ shards_pattern = "./data_files/example_rollouts/coffee_128_demos/**/*.tar",
4
  stats_path = "./data_files/statistics/coffee/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
 
61
 
62
  # Log settings
63
  seed = 42
64
+ outputs = "./checkpoint_files/runs/world_models/coffee_128"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
checkpoint_files/world_models/coffee/P_1280/train_mimicgen_cfg.py CHANGED
@@ -1,6 +1,6 @@
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
- shards_pattern = "./tmp_files/rollout_coffee_wmpo_1280/**/*.tar",
4
  stats_path = "./data_files/statistics/coffee/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
@@ -61,7 +61,7 @@ val_batch_size = 128
61
 
62
  # Log settings
63
  seed = 42
64
- outputs = "./checkpoint_files/runs/world_models/coffee_wmpo_1280"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
 
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
+ shards_pattern = "./data_files/example_rollouts/coffee_1280_demos/**/*.tar",
4
  stats_path = "./data_files/statistics/coffee/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
 
61
 
62
  # Log settings
63
  seed = 42
64
+ outputs = "./checkpoint_files/runs/world_models/coffee_1280"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
checkpoint_files/world_models/square/P_128/train_mimicgen_cfg.py CHANGED
@@ -1,6 +1,6 @@
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
- shards_pattern = "./tmp_files/rollout_square_wmpo_128/**/*.tar",
4
  stats_path = "./data_files/statistics/square/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
@@ -61,7 +61,7 @@ val_batch_size = 128
61
 
62
  # Log settings
63
  seed = 42
64
- outputs = "./checkpoint_files/runs/world_models/square_wmpo_128"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
 
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
+ shards_pattern = "./data_files/example_rollouts/square_128_demos/**/*.tar",
4
  stats_path = "./data_files/statistics/square/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
 
61
 
62
  # Log settings
63
  seed = 42
64
+ outputs = "./checkpoint_files/runs/world_models/square_128"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
checkpoint_files/world_models/square/P_1280/train_mimicgen_cfg.py CHANGED
@@ -1,6 +1,6 @@
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
- shards_pattern = "./tmp_files/rollout_square_wmpo_1280/**/*.tar",
4
  stats_path = "./data_files/statistics/square/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
@@ -61,7 +61,7 @@ val_batch_size = 128
61
 
62
  # Log settings
63
  seed = 42
64
- outputs = "./checkpoint_files/runs/world_models/square_wmpo_1280"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
 
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
+ shards_pattern = "./data_files/example_rollouts/square_1280_demos/**/*.tar",
4
  stats_path = "./data_files/statistics/square/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
 
61
 
62
  # Log settings
63
  seed = 42
64
+ outputs = "./checkpoint_files/runs/world_models/square_1280"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
checkpoint_files/world_models/stack_three/P_128/train_mimicgen_cfg.py CHANGED
@@ -1,6 +1,6 @@
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
- shards_pattern = "./tmp_files/rollout_stack_three_wmpo_128/**/*.tar",
4
  stats_path = "./data_files/statistics/stack_three/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
@@ -61,7 +61,7 @@ val_batch_size = 128
61
 
62
  # Log settings
63
  seed = 42
64
- outputs = "./checkpoint_files/runs/world_models/stack_three_wmpo_128"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
 
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
+ shards_pattern = "./data_files/example_rollouts/stack_three_128_demos/**/*.tar",
4
  stats_path = "./data_files/statistics/stack_three/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
 
61
 
62
  # Log settings
63
  seed = 42
64
+ outputs = "./checkpoint_files/runs/world_models/stack_three_128"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
checkpoint_files/world_models/stack_three/P_1280/train_mimicgen_cfg.py CHANGED
@@ -1,6 +1,6 @@
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
- shards_pattern = "./tmp_files/rollout_stack_three_wmpo_1280/**/*.tar",
4
  stats_path = "./data_files/statistics/stack_three/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
@@ -61,7 +61,7 @@ val_batch_size = 128
61
 
62
  # Log settings
63
  seed = 42
64
- outputs = "./checkpoint_files/runs/world_models/stack_three_wmpo_1280"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
 
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
+ shards_pattern = "./data_files/example_rollouts/stack_three_128_demos/**/*.tar",
4
  stats_path = "./data_files/statistics/stack_three/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
 
61
 
62
  # Log settings
63
  seed = 42
64
+ outputs = "./checkpoint_files/runs/world_models/stack_three_1280"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
checkpoint_files/world_models/three_piece_assembly/P_128/train_mimicgen_cfg.py CHANGED
@@ -1,6 +1,6 @@
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
- shards_pattern = "./tmp_files/rollout_three_piece_assembly_wmpo_128/**/*.tar",
4
  stats_path = "./data_files/statistics/three_piece_assembly/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
@@ -61,7 +61,7 @@ val_batch_size = 128
61
 
62
  # Log settings
63
  seed = 42
64
- outputs = "./checkpoint_files/runs/world_models/three_piece_assembly_wmpo_128"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
 
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
+ shards_pattern = "./data_files/example_rollouts/three_piece_assembly_128_demos/**/*.tar",
4
  stats_path = "./data_files/statistics/three_piece_assembly/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
 
61
 
62
  # Log settings
63
  seed = 42
64
+ outputs = "./checkpoint_files/runs/world_models/three_piece_assembly_128"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
checkpoint_files/world_models/three_piece_assembly/P_1280/train_mimicgen_cfg.py CHANGED
@@ -1,6 +1,6 @@
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
- shards_pattern = "./tmp_files/rollout_three_piece_assembly_wmpo_1280/**/*.tar",
4
  stats_path = "./data_files/statistics/three_piece_assembly/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
@@ -61,7 +61,7 @@ val_batch_size = 128
61
 
62
  # Log settings
63
  seed = 42
64
- outputs = "./checkpoint_files/runs/world_models/three_piece_assembly_wmpo_1280"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10
 
1
  dataset = dict(
2
  type="SimpleVLAWebDataset",
3
+ shards_pattern = "./data_files/example_rollouts/three_piece_assembly_1280_demos/**/*.tar",
4
  stats_path = "./data_files/statistics/three_piece_assembly/dataset_statistics.json",
5
  Ta = 8,
6
  To = 4,
 
61
 
62
  # Log settings
63
  seed = 42
64
+ outputs = "./checkpoint_files/runs/world_models/three_piece_assembly_1280"
65
  wandb = True
66
  epochs = 10000
67
  log_every = 10