first commit
This commit is contained in:
commit
8f8cf48929
2819 changed files with 33143 additions and 0 deletions
100
README.md
Normal file
100
README.md
Normal file
|
@ -0,0 +1,100 @@
|
|||
# ActionDiffusion: An Action-aware Diffusion Model for Procedure Planning in Instructional Videos
|
||||
*Lei Shi<sup>1</sup>, Paul Bürkner<sup>2</sup>, Andreas Bulling<sup>1</sup>*
|
||||
|
||||
1. University of Stuttgart
|
||||
2. TU Dortmund University
|
||||
|
||||
IEEE/CVF Winter Conference on Applications of Computer Vision, 2025
|
||||
|
||||
Paper link: https://arxiv.org/abs/2403.08591
|
||||
|
||||
## Dataset
|
||||
|
||||
Download pre-extracted features.
|
||||
|
||||
|
||||
### Crosstask
|
||||
|
||||
```
|
||||
cd dataset/crosstask
|
||||
wget https://www.di.ens.fr/~dzhukov/crosstask/crosstask_release.zip
|
||||
wget https://vision.eecs.yorku.ca/WebShare/CrossTask_s3d.zip
|
||||
unzip '*.zip'
|
||||
```
|
||||
|
||||
### Coin
|
||||
|
||||
```
|
||||
cd dataset/coin
|
||||
wget https://vision.eecs.yorku.ca/WebShare/COIN_s3d.zip
|
||||
unzip COIN_s3d.zip
|
||||
```
|
||||
|
||||
### NIV
|
||||
|
||||
```
|
||||
cd dataset/NIV
|
||||
wget https://vision.eecs.yorku.ca/WebShare/NIV_s3d.zip
|
||||
unzip NIV_s3d.zip
|
||||
```
|
||||
|
||||
## Train
|
||||
|
||||
### Task Predicion
|
||||
|
||||
Set arguments in `train_mlp.sh`. Train task prediction model for each dataset. Set `--class_dim, --action_dim, --observation_dim` accordingly. For horizon `T={3,4,5,6}`, set `--horizon, --json_path_val ,--json_path_train` accordingly.
|
||||
|
||||
```
|
||||
sh train_mlp.sh
|
||||
```
|
||||
|
||||
Set the checkpoint path in `temp.py` via `--checkpoint_mlp`
|
||||
|
||||
|
||||
### Diffusion Model
|
||||
|
||||
Set `dataset, horizon` in `train.sh` to corresponding datasets and time horizons for training. Set `mask_type` to `multi_add` to use multiple-add noise mask or `single_add` to use single-add noise mask. Set `attn` to `WithAttention` to use UNet with attention or `NoAttention` to use UNet without attention.
|
||||
|
||||
To train the model, run
|
||||
|
||||
```
|
||||
sh train.sh
|
||||
```
|
||||
|
||||
To train the model without mask, run
|
||||
|
||||
```
|
||||
sh train_no_mask.sh
|
||||
```
|
||||
|
||||
## Inference
|
||||
|
||||
Set `dataset, horizon` in `inference.sh` to corresponding datasets and time horizons for training. Set `checkpoint_diff` to the pre-trained model.
|
||||
Set `mask_type` to `multi_add` to use multiple-add noise mask or `single_add` to use single-add noise mask. Set `attn` to `WithAttention` to use UNet with attention or `NoAttention` to use UNet without attention.
|
||||
|
||||
Set `dataset, horizon` to corresponding datasets and time horizons for inference. Set `checkpoint_diff` to the path of pre-trained model.
|
||||
Set `mask_type` to `multi_add` to use multiple-add noise mask or `single_add` to use single-add noise mask. Set `attn` to `WithAttention` to use UNet with attention or `NoAttention` to use UNet without attention.
|
||||
|
||||
To perform inference, run
|
||||
|
||||
```
|
||||
sh inference.sh
|
||||
```
|
||||
|
||||
To perform inference without action mask, run
|
||||
|
||||
```
|
||||
sh inference_no_mask.sh
|
||||
```
|
||||
|
||||
To infer with the ditribution of the noise with action embedding, run
|
||||
|
||||
```
|
||||
sh inference_dist.sh
|
||||
```
|
||||
|
||||
## Acknowledgement
|
||||
```
|
||||
This repository is developed based on https://github.com/MCG-NJU/PDPP/tree/main/
|
||||
```
|
||||
|
133
data_load_json.py
Normal file
133
data_load_json.py
Normal file
|
@ -0,0 +1,133 @@
|
|||
import os
|
||||
import numpy as np
|
||||
import torch
|
||||
from torch.utils.data import Dataset
|
||||
import json
|
||||
from collections import namedtuple
|
||||
|
||||
Batch = namedtuple('Batch', 'Observations json_id json_len')
|
||||
|
||||
|
||||
class PlanningDataset(Dataset):
|
||||
"""
|
||||
load video and action features from dataset
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
root,
|
||||
args=None,
|
||||
is_val=False,
|
||||
model=None,
|
||||
):
|
||||
self.is_val = is_val
|
||||
self.data_root = root
|
||||
self.args = args
|
||||
self.max_traj_len = args.horizon
|
||||
self.vid_names = None
|
||||
self.frame_cnts = None
|
||||
self.images = None
|
||||
self.last_vid = ''
|
||||
|
||||
if args.dataset == 'crosstask':
|
||||
cross_task_data_name = args.json_path_val
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_crosstask105/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
|
||||
if os.path.exists(cross_task_data_name):
|
||||
with open(cross_task_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(cross_task_data_name))
|
||||
else:
|
||||
assert 0
|
||||
elif args.dataset == 'coin':
|
||||
coin_data_name = args.json_path_val
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_coin/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
if os.path.exists(coin_data_name):
|
||||
with open(coin_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(coin_data_name))
|
||||
else:
|
||||
assert 0
|
||||
elif args.dataset == 'NIV':
|
||||
niv_data_name = args.json_path_val
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_niv/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
if os.path.exists(niv_data_name):
|
||||
with open(niv_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(niv_data_name))
|
||||
else:
|
||||
assert 0
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Dataset {} is not implemented'.format(args.dataset))
|
||||
|
||||
self.model = model
|
||||
self.prepare_data()
|
||||
self.M = 3
|
||||
|
||||
def prepare_data(self):
|
||||
vid_names = []
|
||||
frame_cnts = []
|
||||
for listdata in self.json_data:
|
||||
vid_names.append(listdata['id'])
|
||||
frame_cnts.append(listdata['instruction_len'])
|
||||
self.vid_names = vid_names
|
||||
self.frame_cnts = frame_cnts
|
||||
|
||||
def curate_dataset(self, images, legal_range, M=2):
|
||||
images_list = []
|
||||
labels_onehot_list = []
|
||||
idx_list = []
|
||||
for start_idx, end_idx, action_label in legal_range:
|
||||
idx = start_idx
|
||||
idx_list.append(idx)
|
||||
image_start_idx = max(0, idx)
|
||||
if image_start_idx + M <= len(images):
|
||||
image_start = images[image_start_idx: image_start_idx + M]
|
||||
else:
|
||||
image_start = images[len(images) - M: len(images)]
|
||||
image_start_cat = image_start[0]
|
||||
for w in range(len(image_start) - 1):
|
||||
image_start_cat = np.concatenate((image_start_cat, image_start[w + 1]), axis=0)
|
||||
|
||||
images_list.append(image_start_cat)
|
||||
labels_onehot_list.append(action_label)
|
||||
|
||||
end_idx = max(2, end_idx)
|
||||
image_end = images[end_idx - 2:end_idx + M - 2]
|
||||
image_end_cat = image_end[0]
|
||||
for w in range(len(image_end) - 1):
|
||||
image_end_cat = np.concatenate((image_end_cat, image_end[w + 1]), axis=0)
|
||||
images_list.append(image_end_cat)
|
||||
|
||||
return images_list, labels_onehot_list, idx_list
|
||||
|
||||
def sample_single(self, index):
|
||||
folder_id = self.vid_names[index]
|
||||
if self.args.dataset == 'crosstask':
|
||||
if folder_id['vid'] != self.last_vid:
|
||||
images_ = np.load(folder_id['feature'], allow_pickle=True)
|
||||
self.images = images_['frames_features']
|
||||
self.last_vid = folder_id['vid']
|
||||
else:
|
||||
images_ = np.load(folder_id['feature'], allow_pickle=True)
|
||||
self.images = images_['frames_features']
|
||||
images, labels_matrix, idx_list = self.curate_dataset(
|
||||
self.images, folder_id['legal_range'], M=self.M)
|
||||
frames = torch.tensor(np.array(images))
|
||||
return frames
|
||||
|
||||
def __getitem__(self, index):
|
||||
frames = self.sample_single(index)
|
||||
frames_t = torch.zeros(2, self.args.observation_dim)
|
||||
frames_t[0, :] = frames[0, :]
|
||||
frames_t[1, :] = frames[-1, :]
|
||||
frames_t = frames_t.view(1, 2, -1)
|
||||
|
||||
batch = Batch(frames_t, self.vid_names[index], self.frame_cnts[index])
|
||||
return batch
|
||||
|
||||
def __len__(self):
|
||||
return len(self.json_data)
|
167
dataloader/data_load.py
Normal file
167
dataloader/data_load.py
Normal file
|
@ -0,0 +1,167 @@
|
|||
import os
|
||||
import numpy as np
|
||||
import torch
|
||||
from torch.utils.data import Dataset
|
||||
import json
|
||||
from collections import namedtuple
|
||||
|
||||
|
||||
Batch = namedtuple('Batch', 'Observations Actions Class')
|
||||
|
||||
|
||||
class PlanningDataset(Dataset):
|
||||
"""
|
||||
load video and action features from dataset
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
root,
|
||||
args=None,
|
||||
is_val=False,
|
||||
model=None,
|
||||
):
|
||||
self.is_val = is_val
|
||||
self.data_root = root
|
||||
self.args = args
|
||||
self.max_traj_len = args.horizon
|
||||
self.vid_names = None
|
||||
self.frame_cnts = None
|
||||
self.images = None
|
||||
self.last_vid = ''
|
||||
|
||||
if args.dataset == 'crosstask':
|
||||
if is_val:
|
||||
cross_task_data_name = args.json_path_val
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_crosstask105/sliding_window_cross_task_data_{}_{}_new_task_id_73_with_event_class.json".format(is_val, self.max_traj_len)
|
||||
else:
|
||||
cross_task_data_name = args.json_path_train
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_crosstask105/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
|
||||
if os.path.exists(cross_task_data_name):
|
||||
with open(cross_task_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(cross_task_data_name))
|
||||
else:
|
||||
assert 0
|
||||
elif args.dataset == 'coin':
|
||||
if is_val:
|
||||
coin_data_name = args.json_path_val
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_coin/sliding_window_cross_task_data_{}_{}_new_task_id_73_with_event_class.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
else:
|
||||
coin_data_name = args.json_path_train
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_coin/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
if os.path.exists(coin_data_name):
|
||||
with open(coin_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(coin_data_name))
|
||||
else:
|
||||
assert 0
|
||||
elif args.dataset == 'NIV':
|
||||
if is_val:
|
||||
niv_data_name = args.json_path_val
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_niv/sliding_window_cross_task_data_{}_{}_new_task_id_73_with_event_class.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
else:
|
||||
niv_data_name = args.json_path_train
|
||||
# "/data1/wanghanlin/diffusion_planning/jsons_niv/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
if os.path.exists(niv_data_name):
|
||||
with open(niv_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(niv_data_name))
|
||||
else:
|
||||
assert 0
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Dataset {} is not implemented'.format(args.dataset))
|
||||
|
||||
self.model = model
|
||||
self.prepare_data()
|
||||
#self.M = args.horizon
|
||||
self.M = 3
|
||||
|
||||
def prepare_data(self):
|
||||
vid_names = []
|
||||
frame_cnts = []
|
||||
for listdata in self.json_data:
|
||||
vid_names.append(listdata['id'])
|
||||
frame_cnts.append(listdata['instruction_len'])
|
||||
self.vid_names = vid_names
|
||||
self.frame_cnts = frame_cnts
|
||||
|
||||
def curate_dataset(self, images, legal_range, M=2):
|
||||
images_list = []
|
||||
labels_onehot_list = []
|
||||
idx_list = []
|
||||
for start_idx, end_idx, action_label in legal_range:
|
||||
idx = start_idx
|
||||
idx_list.append(idx)
|
||||
image_start_idx = max(0, idx)
|
||||
if image_start_idx + M <= len(images):
|
||||
image_start = images[image_start_idx: image_start_idx + M]
|
||||
else:
|
||||
image_start = images[len(images) - M: len(images)]
|
||||
image_start_cat = image_start[0]
|
||||
for w in range(len(image_start) - 1):
|
||||
image_start_cat = np.concatenate((image_start_cat, image_start[w + 1]), axis=0)
|
||||
images_list.append(image_start_cat)
|
||||
labels_onehot_list.append(action_label)
|
||||
|
||||
end_idx = max(2, end_idx)
|
||||
image_end = images[end_idx - 2:end_idx + M - 2]
|
||||
image_end_cat = image_end[0]
|
||||
for w in range(len(image_end) - 1):
|
||||
image_end_cat = np.concatenate((image_end_cat, image_end[w + 1]), axis=0)
|
||||
images_list.append(image_end_cat)
|
||||
'''end_idx = max(M-1, end_idx)
|
||||
image_end = images[end_idx - (M-1):end_idx + M - (M-1)]
|
||||
image_end_cat = image_end[0]
|
||||
for w in range(len(image_end) - 1):
|
||||
image_end_cat = np.concatenate((image_end_cat, image_end[w + 1]), axis=0)
|
||||
images_list.append(image_end_cat)'''
|
||||
return images_list, labels_onehot_list, idx_list
|
||||
|
||||
def sample_single(self, index):
|
||||
folder_id = self.vid_names[index]
|
||||
|
||||
if self.is_val:
|
||||
event_class = folder_id['event_class'] # was event_class
|
||||
else:
|
||||
task_class = folder_id['task_id']
|
||||
|
||||
if self.args.dataset == 'crosstask':
|
||||
if folder_id['vid'] != self.last_vid:
|
||||
images_ = np.load(folder_id['feature'], allow_pickle=True)
|
||||
self.images = images_['frames_features']
|
||||
self.last_vid = folder_id['vid']
|
||||
else:
|
||||
images_ = np.load(folder_id['feature'], allow_pickle=True)
|
||||
self.images = images_['frames_features']
|
||||
images, labels_matrix, idx_list = self.curate_dataset(
|
||||
self.images, folder_id['legal_range'], M=self.M)
|
||||
frames = torch.tensor(np.array(images))
|
||||
labels_tensor = torch.tensor(labels_matrix, dtype=torch.long)
|
||||
|
||||
if self.is_val:
|
||||
event_class = torch.tensor(event_class, dtype=torch.long)
|
||||
return frames, labels_tensor, event_class
|
||||
else:
|
||||
task_class = torch.tensor(task_class, dtype=torch.long)
|
||||
return frames, labels_tensor, task_class
|
||||
|
||||
def __getitem__(self, index):
|
||||
if self.is_val:
|
||||
frames, labels, event_class = self.sample_single(index)
|
||||
else:
|
||||
frames, labels, task = self.sample_single(index)
|
||||
if self.is_val:
|
||||
batch = Batch(frames, labels, event_class)
|
||||
else:
|
||||
batch = Batch(frames, labels, task)
|
||||
return batch
|
||||
|
||||
def __len__(self):
|
||||
return len(self.json_data)
|
456
dataloader/data_load_mlp.py
Normal file
456
dataloader/data_load_mlp.py
Normal file
|
@ -0,0 +1,456 @@
|
|||
import os
|
||||
import numpy as np
|
||||
import torch
|
||||
from torch.utils.data import Dataset
|
||||
import json
|
||||
import math
|
||||
from collections import namedtuple
|
||||
|
||||
Batch = namedtuple('Batch', 'Observations Actions Class')
|
||||
|
||||
|
||||
def get_vids_from_json(path):
|
||||
task_vids = {}
|
||||
with open(path, 'r') as f:
|
||||
json_data = json.load(f)
|
||||
|
||||
for i in json_data:
|
||||
task = i['task']
|
||||
vid = i['vid']
|
||||
if task not in task_vids:
|
||||
task_vids[task] = []
|
||||
task_vids[task].append(vid)
|
||||
return task_vids
|
||||
|
||||
|
||||
def get_vids(path):
|
||||
task_vids = {}
|
||||
with open(path, 'r') as f:
|
||||
for line in f:
|
||||
task, vid, url = line.strip().split(',')
|
||||
if task not in task_vids:
|
||||
task_vids[task] = []
|
||||
task_vids[task].append(vid)
|
||||
return task_vids
|
||||
|
||||
|
||||
def read_task_info(path):
|
||||
titles = {}
|
||||
urls = {}
|
||||
n_steps = {}
|
||||
steps = {}
|
||||
with open(path, 'r') as f:
|
||||
idx = f.readline()
|
||||
while idx != '':
|
||||
idx = idx.strip()
|
||||
titles[idx] = f.readline().strip()
|
||||
urls[idx] = f.readline().strip()
|
||||
n_steps[idx] = int(f.readline().strip())
|
||||
steps[idx] = f.readline().strip().split(',')
|
||||
next(f)
|
||||
idx = f.readline()
|
||||
return {'title': titles, 'url': urls, 'n_steps': n_steps, 'steps': steps}
|
||||
|
||||
|
||||
class PlanningDataset(Dataset):
|
||||
def __init__(self,
|
||||
root,
|
||||
args=None,
|
||||
is_val=False,
|
||||
model=None,
|
||||
crosstask_use_feature_how=True,
|
||||
):
|
||||
self.is_val = is_val
|
||||
self.data_root = root
|
||||
self.args = args
|
||||
self.max_traj_len = args.horizon
|
||||
self.vid_names = None
|
||||
self.frame_cnts = None
|
||||
self.images = None
|
||||
self.last_vid = ''
|
||||
self.crosstask_use_feature_how = crosstask_use_feature_how
|
||||
|
||||
if args.dataset == 'crosstask':
|
||||
"""
|
||||
.
|
||||
└── crosstask
|
||||
├── crosstask_features
|
||||
└── crosstask_release
|
||||
├── tasks_primary.txt
|
||||
├── videos.csv or json
|
||||
└── videos_val.csv or json
|
||||
"""
|
||||
val_csv_path = os.path.join(
|
||||
root, 'dataset', 'crosstask', 'crosstask_release', 'test_list.json') # 'videos_val.csv')
|
||||
video_csv_path = os.path.join(
|
||||
root, 'dataset', 'crosstask', 'crosstask_release', 'train_list.json') # 'videos.csv')
|
||||
|
||||
if crosstask_use_feature_how:
|
||||
self.features_path = os.path.join(root, 'dataset', 'crosstask', 'processed_data')
|
||||
else:
|
||||
self.features_path = os.path.join(root, 'dataset', 'crosstask', 'crosstask_features')
|
||||
|
||||
self.constraints_path = os.path.join(
|
||||
root, 'dataset', 'crosstask', 'crosstask_release', 'annotations')
|
||||
|
||||
self.action_one_hot = np.load(
|
||||
os.path.join(root, 'dataset', 'crosstask', 'crosstask_release', 'actions_one_hot.npy'),
|
||||
allow_pickle=True).item()
|
||||
|
||||
self.task_class = {
|
||||
'23521': 0,
|
||||
'59684': 1,
|
||||
'71781': 2,
|
||||
'113766': 3,
|
||||
'105222': 4,
|
||||
'94276': 5,
|
||||
'53193': 6,
|
||||
'105253': 7,
|
||||
'44047': 8,
|
||||
'76400': 9,
|
||||
'16815': 10,
|
||||
'95603': 11,
|
||||
'109972': 12,
|
||||
'44789': 13,
|
||||
'40567': 14,
|
||||
'77721': 15,
|
||||
'87706': 16,
|
||||
'91515': 17
|
||||
}
|
||||
|
||||
# cross_task_data_name = "/data1/wanghanlin/diffusion_planning/jsons_crosstask105/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
|
||||
if is_val:
|
||||
cross_task_data_name = args.json_path_val
|
||||
else:
|
||||
cross_task_data_name = args.json_path_train
|
||||
|
||||
if os.path.exists(cross_task_data_name):
|
||||
with open(cross_task_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(cross_task_data_name))
|
||||
else:
|
||||
file_type = val_csv_path.split('.')[-1]
|
||||
if file_type == 'json':
|
||||
all_task_vids = get_vids_from_json(video_csv_path)
|
||||
val_vids = get_vids_from_json(val_csv_path)
|
||||
else:
|
||||
all_task_vids = get_vids(video_csv_path)
|
||||
val_vids = get_vids(val_csv_path)
|
||||
|
||||
if is_val:
|
||||
task_vids = val_vids
|
||||
else:
|
||||
task_vids = {task: [vid for vid in vids if task not in val_vids or vid not in val_vids[task]] for
|
||||
task, vids in
|
||||
all_task_vids.items()}
|
||||
|
||||
primary_info = read_task_info(os.path.join(
|
||||
root, 'dataset', 'crosstask', 'crosstask_release', 'tasks_primary.txt'))
|
||||
|
||||
self.n_steps = primary_info['n_steps']
|
||||
all_tasks = set(self.n_steps.keys())
|
||||
|
||||
task_vids = {task: vids for task,
|
||||
vids in task_vids.items() if task in all_tasks}
|
||||
|
||||
all_vids = []
|
||||
for task, vids in task_vids.items():
|
||||
all_vids.extend([(task, vid) for vid in vids])
|
||||
json_data = []
|
||||
for idx in range(len(all_vids)):
|
||||
task, vid = all_vids[idx]
|
||||
if self.crosstask_use_feature_how:
|
||||
video_path = os.path.join(
|
||||
self.features_path, str(task) + '_' + str(vid) + '.npy')
|
||||
else:
|
||||
video_path = os.path.join(
|
||||
self.features_path, str(vid) + '.npy')
|
||||
legal_range = self.process_single(task, vid)
|
||||
if not legal_range:
|
||||
continue
|
||||
|
||||
temp_len = len(legal_range)
|
||||
temp = []
|
||||
while temp_len < self.max_traj_len:
|
||||
temp.append(legal_range[0])
|
||||
temp_len += 1
|
||||
|
||||
temp.extend(legal_range)
|
||||
legal_range = temp
|
||||
|
||||
for i in range(len(legal_range) - self.max_traj_len + 1):
|
||||
legal_range_current = legal_range[i:i + self.max_traj_len]
|
||||
json_data.append({'id': {'vid': vid, 'task': task, 'feature': video_path,
|
||||
'legal_range': legal_range_current, 'task_id': self.task_class[task]},
|
||||
'instruction_len': self.n_steps[task]})
|
||||
|
||||
self.json_data = json_data
|
||||
with open(cross_task_data_name, 'w') as f:
|
||||
json.dump(json_data, f)
|
||||
|
||||
elif args.dataset == 'coin':
|
||||
coin_path = os.path.join(root, 'dataset/coin', 'full_npy/')
|
||||
val_csv_path = os.path.join(
|
||||
root, 'dataset/coin', 'coin_test_30.json')
|
||||
video_csv_path = os.path.join(
|
||||
root, 'dataset/coin', 'coin_train_70.json')
|
||||
|
||||
# coin_data_name = "/data1/wanghanlin/diffusion_planning/jsons_coin/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
if is_val:
|
||||
coin_data_name = args.json_path_val
|
||||
else:
|
||||
coin_data_name = args.json_path_train
|
||||
|
||||
if os.path.exists(coin_data_name):
|
||||
with open(coin_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(coin_data_name))
|
||||
else:
|
||||
json_data = []
|
||||
num = 0
|
||||
if is_val:
|
||||
with open(val_csv_path, 'r') as f:
|
||||
coin_data = json.load(f)
|
||||
else:
|
||||
with open(video_csv_path, 'r') as f:
|
||||
coin_data = json.load(f)
|
||||
for i in coin_data:
|
||||
for (k, v) in i.items():
|
||||
file_name = v['class'] + '_' + str(v['recipe_type']) + '_' + k + '.npy'
|
||||
file_path = coin_path + file_name
|
||||
images_ = np.load(file_path, allow_pickle=True)
|
||||
images = images_['frames_features']
|
||||
legal_range = []
|
||||
|
||||
last_action = v['annotation'][-1]['segment'][1]
|
||||
last_action = math.ceil(last_action)
|
||||
if last_action > len(images):
|
||||
print(k, last_action, len(images))
|
||||
num += 1
|
||||
continue
|
||||
|
||||
for annotation in v['annotation']:
|
||||
action_label = int(annotation['id']) - 1
|
||||
start_idx, end_idx = annotation['segment']
|
||||
start_idx = math.floor(start_idx)
|
||||
end_idx = math.ceil(end_idx)
|
||||
|
||||
if end_idx < images.shape[0]:
|
||||
legal_range.append((start_idx, end_idx, action_label))
|
||||
else:
|
||||
legal_range.append((start_idx, images.shape[0] - 1, action_label))
|
||||
|
||||
temp_len = len(legal_range)
|
||||
temp = []
|
||||
while temp_len < self.max_traj_len:
|
||||
temp.append(legal_range[0])
|
||||
temp_len += 1
|
||||
|
||||
temp.extend(legal_range)
|
||||
legal_range = temp
|
||||
|
||||
for i in range(len(legal_range) - self.max_traj_len + 1):
|
||||
legal_range_current = legal_range[i:i + self.max_traj_len]
|
||||
json_data.append({'id': {'vid': k, 'feature': file_path,
|
||||
'legal_range': legal_range_current, 'task_id': v['recipe_type']},
|
||||
'instruction_len': 0})
|
||||
print(num)
|
||||
self.json_data = json_data
|
||||
with open(coin_data_name, 'w') as f:
|
||||
json.dump(json_data, f)
|
||||
|
||||
elif args.dataset == 'NIV':
|
||||
val_csv_path = os.path.join(
|
||||
root, 'dataset/NIV', 'test30_new.json')
|
||||
video_csv_path = os.path.join(
|
||||
root, 'dataset/NIV', 'train70_new.json')
|
||||
|
||||
# niv_data_name = "/data1/wanghanlin/diffusion_planning/jsons_niv/sliding_window_cross_task_data_{}_{}_new_task_id_73.json".format(
|
||||
# is_val, self.max_traj_len)
|
||||
if is_val:
|
||||
niv_data_name = args.json_path_val
|
||||
else:
|
||||
niv_data_name = args.json_path_train
|
||||
|
||||
|
||||
if os.path.exists(niv_data_name):
|
||||
with open(niv_data_name, 'r') as f:
|
||||
self.json_data = json.load(f)
|
||||
print('Loaded {}'.format(niv_data_name))
|
||||
else:
|
||||
json_data = []
|
||||
if is_val:
|
||||
with open(val_csv_path, 'r') as f:
|
||||
niv_data = json.load(f)
|
||||
else:
|
||||
with open(video_csv_path, 'r') as f:
|
||||
niv_data = json.load(f)
|
||||
for d in niv_data:
|
||||
legal_range = []
|
||||
path = os.path.join( root, 'dataset/NIV', 'processed_data' , d['feature'])
|
||||
info = np.load(path, allow_pickle=True)
|
||||
num_steps = int(info['num_steps'])
|
||||
assert num_steps == len(info['steps_ids'])
|
||||
assert info['num_steps'] == len(info['steps_starts'])
|
||||
assert info['num_steps'] == len(info['steps_ends'])
|
||||
starts = info['steps_starts']
|
||||
ends = info['steps_ends']
|
||||
action_labels = info['steps_ids']
|
||||
images = info['frames_features']
|
||||
|
||||
for i in range(num_steps):
|
||||
action_label = int(action_labels[i])
|
||||
start_idx = math.floor(float(starts[i]))
|
||||
end_idx = math.ceil(float(ends[i]))
|
||||
|
||||
if end_idx < images.shape[0]:
|
||||
legal_range.append((start_idx, end_idx, action_label))
|
||||
else:
|
||||
legal_range.append((start_idx, images.shape[0] - 1, action_label))
|
||||
|
||||
temp_len = len(legal_range)
|
||||
temp = []
|
||||
while temp_len < self.max_traj_len:
|
||||
temp.append(legal_range[0])
|
||||
temp_len += 1
|
||||
|
||||
temp.extend(legal_range)
|
||||
legal_range = temp
|
||||
|
||||
for i in range(len(legal_range) - self.max_traj_len + 1):
|
||||
legal_range_current = legal_range[i:i + self.max_traj_len]
|
||||
json_data.append({'id': {'feature': path,
|
||||
'legal_range': legal_range_current, 'task_id': d['task_id']},
|
||||
'instruction_len': 0})
|
||||
self.json_data = json_data
|
||||
with open(niv_data_name, 'w') as f:
|
||||
json.dump(json_data, f)
|
||||
print(len(json_data))
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Dataset {} is not implemented'.format(args.dataset))
|
||||
|
||||
self.model = model
|
||||
self.prepare_data()
|
||||
self.M = 3
|
||||
|
||||
def process_single(self, task, vid):
|
||||
if self.crosstask_use_feature_how:
|
||||
if not os.path.exists(os.path.join(self.features_path, str(task) + '_' + str(vid) + '.npy')):
|
||||
return False
|
||||
images_ = np.load(os.path.join(self.features_path, str(task) + '_' + str(vid) + '.npy'), allow_pickle=True)
|
||||
images = images_['frames_features']
|
||||
else:
|
||||
if not os.path.exists(os.path.join(self.features_path, vid + '.npy')):
|
||||
return False
|
||||
images = np.load(os.path.join(self.features_path, vid + '.npy'))
|
||||
|
||||
cnst_path = os.path.join(
|
||||
self.constraints_path, task + '_' + vid + '.csv')
|
||||
legal_range = self.read_assignment(task, cnst_path)
|
||||
legal_range_ret = []
|
||||
for (start_idx, end_idx, action_label) in legal_range:
|
||||
if not start_idx < images.shape[0]:
|
||||
print(task, vid, end_idx, images.shape[0])
|
||||
return False
|
||||
if end_idx < images.shape[0]:
|
||||
legal_range_ret.append((start_idx, end_idx, action_label))
|
||||
else:
|
||||
legal_range_ret.append((start_idx, images.shape[0] - 1, action_label))
|
||||
|
||||
return legal_range_ret
|
||||
|
||||
def read_assignment(self, task_id, path):
|
||||
legal_range = []
|
||||
with open(path, 'r') as f:
|
||||
for line in f:
|
||||
step, start, end = line.strip().split(',')
|
||||
start = int(math.floor(float(start)))
|
||||
end = int(math.ceil(float(end)))
|
||||
action_label_ind = self.action_one_hot[task_id + '_' + step]
|
||||
legal_range.append((start, end, action_label_ind))
|
||||
|
||||
return legal_range
|
||||
|
||||
def prepare_data(self):
|
||||
vid_names = []
|
||||
frame_cnts = []
|
||||
for listdata in self.json_data:
|
||||
vid_names.append(listdata['id'])
|
||||
frame_cnts.append(listdata['instruction_len'])
|
||||
self.vid_names = vid_names
|
||||
self.frame_cnts = frame_cnts
|
||||
|
||||
def curate_dataset(self, images, legal_range, M=2):
|
||||
images_list = []
|
||||
labels_onehot_list = []
|
||||
idx_list = []
|
||||
for start_idx, end_idx, action_label in legal_range:
|
||||
idx = start_idx
|
||||
idx_list.append(idx)
|
||||
image_start_idx = max(0, idx)
|
||||
if image_start_idx + M <= len(images):
|
||||
image_start = images[image_start_idx: image_start_idx + M]
|
||||
else:
|
||||
image_start = images[len(images) - M: len(images)]
|
||||
image_start_cat = image_start[0]
|
||||
for w in range(len(image_start) - 1):
|
||||
image_start_cat = np.concatenate((image_start_cat, image_start[w + 1]), axis=0)
|
||||
images_list.append(image_start_cat)
|
||||
labels_onehot_list.append(action_label)
|
||||
|
||||
end_idx = max(2, end_idx)
|
||||
image_end = images[end_idx - 2:end_idx + M - 2]
|
||||
image_end_cat = image_end[0]
|
||||
for w in range(len(image_end) - 1):
|
||||
image_end_cat = np.concatenate((image_end_cat, image_end[w + 1]), axis=0)
|
||||
images_list.append(image_end_cat)
|
||||
return images_list, labels_onehot_list, idx_list
|
||||
|
||||
def sample_single(self, index):
|
||||
folder_id = self.vid_names[index]
|
||||
if self.is_val:
|
||||
event_class = folder_id['task_id']
|
||||
else:
|
||||
task_class = folder_id['task_id']
|
||||
|
||||
if self.args.dataset == 'crosstask':
|
||||
if folder_id['vid'] != self.last_vid:
|
||||
if self.crosstask_use_feature_how:
|
||||
images_ = np.load(folder_id['feature'], allow_pickle=True)
|
||||
self.images = images_['frames_features']
|
||||
self.last_vid = folder_id['vid']
|
||||
else:
|
||||
self.images = np.load(os.path.join(self.features_path, folder_id['vid'] + '.npy'))
|
||||
else:
|
||||
images_ = np.load(folder_id['feature'], allow_pickle=True)
|
||||
self.images = images_['frames_features']
|
||||
|
||||
images, labels_matrix, idx_list = self.curate_dataset(
|
||||
self.images, folder_id['legal_range'], M=self.M)
|
||||
frames = torch.tensor(np.array(images))
|
||||
labels_tensor = torch.tensor(labels_matrix, dtype=torch.long)
|
||||
|
||||
if self.is_val:
|
||||
event_class = torch.tensor(event_class, dtype=torch.long)
|
||||
return frames, labels_tensor, event_class
|
||||
else:
|
||||
task_class = torch.tensor(task_class, dtype=torch.long)
|
||||
return frames, labels_tensor, task_class
|
||||
|
||||
def __getitem__(self, index):
|
||||
if self.is_val:
|
||||
frames, labels, event_class = self.sample_single(index)
|
||||
else:
|
||||
frames, labels, task = self.sample_single(index)
|
||||
if self.is_val:
|
||||
batch = Batch(frames, labels, event_class)
|
||||
else:
|
||||
batch = Batch(frames, labels, task)
|
||||
return batch
|
||||
|
||||
def __len__(self):
|
||||
return len(self.json_data)
|
1
dataset/NIV/NIV_mlp_T3.json
Normal file
1
dataset/NIV/NIV_mlp_T3.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/NIV/NIV_mlp_T4.json
Normal file
1
dataset/NIV/NIV_mlp_T4.json
Normal file
File diff suppressed because one or more lines are too long
BIN
dataset/NIV/niv_act_embeddings.pickle
Normal file
BIN
dataset/NIV/niv_act_embeddings.pickle
Normal file
Binary file not shown.
1
dataset/NIV/test30.json
Normal file
1
dataset/NIV/test30.json
Normal file
|
@ -0,0 +1 @@
|
|||
[{"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0001.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0004.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0005.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0007.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0012.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0020.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0021.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0022.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0025.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/changing_tire_0030.npy", "task_id": 0}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/coffee_0001.npy", "task_id": 1}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/coffee_0007.npy", "task_id": 1}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/coffee_0014.npy", "task_id": 1}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/coffee_0018.npy", "task_id": 1}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/coffee_0019.npy", "task_id": 1}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/coffee_0023.npy", "task_id": 1}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/coffee_0025.npy", "task_id": 1}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/coffee_0027.npy", "task_id": 1}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0002.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0006.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0012.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0013.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0016.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0017.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0019.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0021.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0023.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/cpr_0026.npy", "task_id": 2}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0001.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0009.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0011.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0017.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0018.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0020.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0025.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0028.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/jump_car_0029.npy", "task_id": 3}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/repot_0004.npy", "task_id": 4}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/repot_0015.npy", "task_id": 4}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/repot_0018.npy", "task_id": 4}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/repot_0025.npy", "task_id": 4}, {"feature": "/data0/wanghanlin/planning_diffusion/dataset/NIV/processed_data/repot_0029.npy", "task_id": 4}]
|
1
dataset/NIV/test_split_T3.json
Normal file
1
dataset/NIV/test_split_T3.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/NIV/test_split_T4.json
Normal file
1
dataset/NIV/test_split_T4.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/NIV/train70.json
Normal file
1
dataset/NIV/train70.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/NIV/train_split_T3.json
Normal file
1
dataset/NIV/train_split_T3.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/NIV/train_split_T4.json
Normal file
1
dataset/NIV/train_split_T4.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/coin/coin_mlp_T3.json
Normal file
1
dataset/coin/coin_mlp_T3.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/coin/coin_mlp_T4.json
Normal file
1
dataset/coin/coin_mlp_T4.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/coin/coin_test_30.json
Normal file
1
dataset/coin/coin_test_30.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/coin/coin_train_70.json
Normal file
1
dataset/coin/coin_train_70.json
Normal file
File diff suppressed because one or more lines are too long
BIN
dataset/coin/steps_info.pickle
Normal file
BIN
dataset/coin/steps_info.pickle
Normal file
Binary file not shown.
1
dataset/coin/test_split_T3.json
Normal file
1
dataset/coin/test_split_T3.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/coin/test_split_T4.json
Normal file
1
dataset/coin/test_split_T4.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/coin/train_split_T3.json
Normal file
1
dataset/coin/train_split_T3.json
Normal file
File diff suppressed because one or more lines are too long
1
dataset/coin/train_split_T4.json
Normal file
1
dataset/coin/train_split_T4.json
Normal file
File diff suppressed because one or more lines are too long
BIN
dataset/crosstask/act_lang_emb.pkl
Normal file
BIN
dataset/crosstask/act_lang_emb.pkl
Normal file
Binary file not shown.
BIN
dataset/crosstask/crosstask_release/actions_one_hot.npy
Normal file
BIN
dataset/crosstask/crosstask_release/actions_one_hot.npy
Normal file
Binary file not shown.
|
@ -0,0 +1,11 @@
|
|||
5,2.1,5.7
|
||||
1,6.28,13.31
|
||||
6,16.17,34.84
|
||||
4,36.74,39.57
|
||||
6,38.24,61.32
|
||||
6,88.79,96.12
|
||||
2,98.0,99.22
|
||||
6,99.26,160.96
|
||||
6,177.47,205.68
|
||||
5,209.3,212.58
|
||||
6,213.86,227.11
|
|
|
@ -0,0 +1,6 @@
|
|||
5,157.51,172.09
|
||||
1,178.84,180.73
|
||||
3,190.98,197.53
|
||||
4,207.26,218.76
|
||||
2,252.12,256.71
|
||||
6,271.46,277.78
|
|
|
@ -0,0 +1,15 @@
|
|||
5,207.6,211.45
|
||||
3,218.68,220.11
|
||||
6,220.17,231.24
|
||||
4,231.59,236.24
|
||||
6,236.17,250.09
|
||||
6,261.86,271.89
|
||||
2,272.12,276.89
|
||||
6,280.34,301.34
|
||||
2,301.5,306.07
|
||||
6,306.79,337.95
|
||||
4,340.62,342.92
|
||||
6,344.86,359.22
|
||||
1,360.26,361.42
|
||||
6,361.48,369.31
|
||||
6,375.79,388.77
|
|
|
@ -0,0 +1,24 @@
|
|||
5,146.9,148.27
|
||||
1,156.02,158.35
|
||||
6,158.4,172.61
|
||||
5,172.91,175.19
|
||||
1,176.35,176.95
|
||||
6,177.07,188.98
|
||||
3,189.06,189.85
|
||||
6,192.44,205.18
|
||||
4,205.29,207.06
|
||||
6,207.18,209.91
|
||||
5,229.82,231.02
|
||||
6,232.16,235.92
|
||||
6,240.37,245.03
|
||||
2,245.15,246.0
|
||||
6,247.45,251.67
|
||||
4,252.23,253.04
|
||||
6,253.1,254.62
|
||||
4,254.73,256.95
|
||||
1,257.39,262.67
|
||||
3,263.31,263.85
|
||||
6,264.04,271.12
|
||||
6,277.01,286.19
|
||||
5,287.8,288.67
|
||||
6,289.18,293.3
|
|
|
@ -0,0 +1,5 @@
|
|||
5,83.37,87.6
|
||||
3,89.16,90.76
|
||||
1,91.46,94.86
|
||||
4,91.52,94.49
|
||||
2,100.4,104.18
|
|
|
@ -0,0 +1,10 @@
|
|||
1,19.45,29.72
|
||||
6,23.67,41.01
|
||||
6,53.47,78.63
|
||||
3,94.36,117.76
|
||||
6,117.83,139.8
|
||||
6,149.36,181.13
|
||||
4,189.28,192.28
|
||||
6,216.73,224.95
|
||||
2,237.61,246.4
|
||||
6,249.63,266.53
|
|
|
@ -0,0 +1,5 @@
|
|||
1,62.98,69.98
|
||||
6,70.06,112.26
|
||||
4,102.21,105.31
|
||||
2,116.44,121.44
|
||||
6,121.44,155.74
|
|
|
@ -0,0 +1,11 @@
|
|||
5,15.34,16.95
|
||||
4,19.65,20.42
|
||||
6,21.14,22.46
|
||||
6,26.83,27.97
|
||||
6,31.84,33.49
|
||||
1,34.89,36.27
|
||||
6,37.34,47.93
|
||||
3,39.51,40.81
|
||||
4,44.19,46.39
|
||||
2,49.05,50.46
|
||||
6,51.03,54.14
|
|
|
@ -0,0 +1,6 @@
|
|||
1,39.14,40.24
|
||||
3,40.31,41.41
|
||||
4,42.39,43.49
|
||||
6,45.42,48.32
|
||||
2,50.46,51.52
|
||||
6,51.49,53.32
|
|
|
@ -0,0 +1,11 @@
|
|||
1,38.82,42.35
|
||||
6,46.97,64.81
|
||||
3,69.09,73.06
|
||||
6,73.8,101.19
|
||||
6,110.79,139.44
|
||||
2,146.78,147.49
|
||||
6,149.01,168.01
|
||||
6,175.15,192.51
|
||||
6,198.85,219.43
|
||||
4,222.62,232.79
|
||||
6,258.41,280.95
|
|
|
@ -0,0 +1,5 @@
|
|||
1,92.39,114.3
|
||||
3,115.12,118.7
|
||||
4,119.25,125.2
|
||||
2,125.94,139.55
|
||||
5,140.2,154.45
|
|
|
@ -0,0 +1,9 @@
|
|||
3,36.2,43.41
|
||||
6,45.84,52.1
|
||||
4,52.81,55.12
|
||||
6,55.93,77.73
|
||||
2,80.59,87.82
|
||||
6,86.11,111.2
|
||||
6,135.55,156.6
|
||||
4,159.06,162.97
|
||||
6,166.59,198.99
|
|
|
@ -0,0 +1,16 @@
|
|||
3,59.03,60.83
|
||||
5,61.24,62.69
|
||||
6,65.72,71.06
|
||||
1,82.48,84.84
|
||||
6,87.5,97.18
|
||||
5,115.63,116.4
|
||||
6,123.76,132.23
|
||||
6,146.52,176.24
|
||||
4,154.59,157.68
|
||||
5,177.08,178.08
|
||||
6,187.17,191.93
|
||||
2,196.86,199.86
|
||||
5,201.17,202.67
|
||||
6,203.79,225.63
|
||||
6,233.91,250.48
|
||||
6,265.17,267.22
|
|
|
@ -0,0 +1,7 @@
|
|||
5,75.99,78.32
|
||||
3,85.31,90.63
|
||||
6,97.64,107.04
|
||||
4,109.46,111.65
|
||||
6,111.75,158.34
|
||||
2,163.29,169.2
|
||||
6,169.51,201.3
|
|
|
@ -0,0 +1,9 @@
|
|||
6,150.32,153.32
|
||||
5,161.88,166.88
|
||||
3,166.95,169.04
|
||||
6,169.19,175.51
|
||||
4,171.25,172.18
|
||||
2,175.51,176.53
|
||||
6,178.31,180.91
|
||||
6,185.37,191.14
|
||||
5,194.03,196.53
|
|
|
@ -0,0 +1,8 @@
|
|||
4,131.04,133.64
|
||||
6,133.05,139.0
|
||||
6,142.75,155.1
|
||||
3,150.9,162.55
|
||||
6,162.09,166.06
|
||||
2,164.51,171.51
|
||||
6,172.41,208.89
|
||||
5,208.95,214.72
|
|
|
@ -0,0 +1,8 @@
|
|||
5,49.45,54.45
|
||||
4,54.66,59.66
|
||||
6,66.66,96.85
|
||||
1,74.94,81.94
|
||||
4,147.4,151.45
|
||||
6,151.61,169.06
|
||||
2,159.08,162.08
|
||||
6,178.31,191.31
|
|
|
@ -0,0 +1,21 @@
|
|||
3,18.66,24.19
|
||||
6,24.51,31.51
|
||||
6,35.32,39.02
|
||||
6,46.61,60.73
|
||||
6,69.09,85.9
|
||||
1,84.61,87.61
|
||||
6,87.58,95.35
|
||||
3,107.33,113.91
|
||||
6,116.0,123.5
|
||||
6,140.45,147.66
|
||||
2,147.66,148.77
|
||||
6,148.4,162.7
|
||||
5,166.28,173.59
|
||||
6,173.82,184.42
|
||||
6,187.92,189.5
|
||||
4,190.92,197.92
|
||||
6,198.28,226.19
|
||||
6,241.1,250.99
|
||||
6,280.47,295.07
|
||||
6,308.71,319.71
|
||||
6,341.38,365.78
|
|
|
@ -0,0 +1,6 @@
|
|||
4,29.12,31.46
|
||||
6,31.47,38.07
|
||||
2,33.64,36.03
|
||||
5,47.9,50.32
|
||||
1,52.4,54.08
|
||||
6,56.09,59.67
|
|
|
@ -0,0 +1,10 @@
|
|||
1,106.59,108.79
|
||||
6,108.95,136.64
|
||||
3,148.7,150.88
|
||||
6,151.02,152.91
|
||||
2,156.8,158.71
|
||||
6,161.65,191.09
|
||||
1,194.13,195.55
|
||||
4,198.56,202.77
|
||||
6,205.07,208.09
|
||||
6,228.25,248.6
|
|
|
@ -0,0 +1,11 @@
|
|||
5,16.69,19.66
|
||||
1,21.74,27.66
|
||||
4,42.29,45.29
|
||||
6,43.64,50.58
|
||||
6,71.3,78.49
|
||||
2,80.71,81.71
|
||||
6,82.03,103.09
|
||||
4,114.34,117.74
|
||||
6,118.11,123.98
|
||||
5,130.88,134.23
|
||||
6,135.71,141.52
|
|
|
@ -0,0 +1,11 @@
|
|||
3,27.83,29.54
|
||||
1,78.56,80.98
|
||||
6,81.37,93.52
|
||||
6,100.59,107.28
|
||||
4,111.22,114.48
|
||||
6,114.44,120.92
|
||||
6,138.89,147.89
|
||||
2,160.16,161.73
|
||||
6,162.73,206.51
|
||||
5,211.33,213.87
|
||||
6,216.16,237.03
|
|
|
@ -0,0 +1,7 @@
|
|||
4,25.01,32.87
|
||||
2,61.28,66.48
|
||||
6,68.14,73.89
|
||||
6,84.08,98.66
|
||||
6,117.24,118.25
|
||||
6,132.8,134.63
|
||||
6,140.1,143.49
|
|
|
@ -0,0 +1,5 @@
|
|||
5,37.64,39.54
|
||||
4,47.6,50.6
|
||||
6,51.59,76.59
|
||||
2,81.44,84.44
|
||||
6,84.98,106.58
|
|
|
@ -0,0 +1,4 @@
|
|||
4,72.43,74.23
|
||||
6,75.07,122.07
|
||||
2,96.67,99.67
|
||||
1,125.45,132.45
|
|
|
@ -0,0 +1,2 @@
|
|||
5,66.1,67.45
|
||||
2,74.43,76.02
|
|
|
@ -0,0 +1,5 @@
|
|||
4,101.68,106.68
|
||||
6,106.82,141.22
|
||||
6,167.0,179.2
|
||||
2,182.31,185.31
|
||||
6,187.66,205.86
|
|
|
@ -0,0 +1,6 @@
|
|||
5,138.25,144.94
|
||||
4,150.99,156.11
|
||||
6,156.06,214.33
|
||||
4,176.69,182.04
|
||||
2,225.19,229.99
|
||||
6,231.56,259.75
|
|
|
@ -0,0 +1,3 @@
|
|||
5,27.33,28.94
|
||||
1,29.07,32.11
|
||||
4,59.76,62.8
|
|
|
@ -0,0 +1,4 @@
|
|||
4,166.37,197.37
|
||||
6,325.13,325.38
|
||||
2,357.42,366.42
|
||||
6,368.73,425.63
|
|
|
@ -0,0 +1,12 @@
|
|||
5,79.33,82.43
|
||||
1,110.86,121.86
|
||||
6,124.8,139.8
|
||||
4,139.83,146.93
|
||||
6,151.61,164.86
|
||||
2,164.97,171.87
|
||||
6,171.78,183.58
|
||||
2,183.58,186.78
|
||||
6,187.0,195.6
|
||||
6,203.41,246.41
|
||||
5,285.82,289.72
|
||||
6,308.36,331.04
|
|
|
@ -0,0 +1,6 @@
|
|||
5,53.51,56.51
|
||||
3,60.22,63.22
|
||||
4,76.1,81.1
|
||||
2,89.28,94.28
|
||||
6,98.08,117.08
|
||||
1,122.14,125.14
|
|
|
@ -0,0 +1,9 @@
|
|||
1,64.63,66.04
|
||||
6,66.5,76.75
|
||||
3,72.54,73.46
|
||||
4,78.05,79.79
|
||||
2,84.47,85.04
|
||||
6,85.47,88.36
|
||||
6,102.11,114.38
|
||||
5,142.04,145.51
|
||||
6,145.74,148.36
|
|
|
@ -0,0 +1,9 @@
|
|||
5,47.65,49.36
|
||||
1,49.88,50.62
|
||||
6,50.72,55.61
|
||||
4,63.41,70.14
|
||||
2,76.13,78.71
|
||||
6,78.81,81.42
|
||||
5,84.78,86.09
|
||||
6,86.19,88.93
|
||||
6,98.09,100.47
|
|
|
@ -0,0 +1,15 @@
|
|||
3,36.46,41.05
|
||||
6,41.06,53.44
|
||||
5,95.44,98.73
|
||||
6,109.19,115.67
|
||||
2,115.78,116.53
|
||||
6,117.38,130.98
|
||||
6,139.08,145.85
|
||||
6,152.44,168.18
|
||||
6,179.08,191.98
|
||||
3,197.91,199.86
|
||||
6,199.99,218.6
|
||||
4,220.63,221.91
|
||||
6,224.92,241.38
|
||||
4,241.58,245.0
|
||||
6,248.92,252.45
|
|
|
@ -0,0 +1,8 @@
|
|||
1,11.2,12.4
|
||||
6,17.96,57.21
|
||||
4,75.45,84.82
|
||||
6,91.6,117.57
|
||||
6,211.86,246.04
|
||||
2,251.17,267.44
|
||||
6,287.92,472.94
|
||||
6,483.54,503.56
|
|
|
@ -0,0 +1,10 @@
|
|||
5,95.39,101.77
|
||||
6,108.32,125.8
|
||||
3,121.55,122.99
|
||||
4,126.04,128.52
|
||||
6,128.94,153.71
|
||||
6,162.48,174.89
|
||||
6,178.83,202.33
|
||||
2,207.1,208.64
|
||||
6,209.66,243.92
|
||||
6,253.82,264.73
|
|
|
@ -0,0 +1,3 @@
|
|||
1,87.14,90.14
|
||||
2,87.14,100.14
|
||||
4,130.19,137.19
|
|
|
@ -0,0 +1,5 @@
|
|||
4,62.56,63.62
|
||||
6,63.46,65.86
|
||||
2,68.88,69.23
|
||||
6,69.3,72.4
|
||||
1,111.73,113.93
|
|
|
@ -0,0 +1,12 @@
|
|||
5,40.66,41.41
|
||||
3,49.27,50.8
|
||||
3,50.93,56.96
|
||||
4,57.55,58.98
|
||||
6,58.97,67.16
|
||||
2,70.88,80.28
|
||||
4,80.98,82.28
|
||||
6,87.0,95.72
|
||||
1,95.82,97.12
|
||||
6,99.34,101.38
|
||||
5,101.72,104.5
|
||||
6,110.59,120.95
|
|
|
@ -0,0 +1,18 @@
|
|||
3,74.58,79.43
|
||||
3,95.64,104.58
|
||||
6,170.73,176.36
|
||||
4,175.95,178.46
|
||||
4,187.37,192.47
|
||||
6,196.72,209.76
|
||||
6,212.14,236.89
|
||||
6,240.4,250.59
|
||||
6,261.47,268.25
|
||||
1,268.96,273.96
|
||||
6,274.37,285.81
|
||||
6,289.17,305.44
|
||||
6,313.79,324.51
|
||||
6,352.1,380.19
|
||||
6,384.96,395.71
|
||||
2,397.14,400.5
|
||||
6,401.27,415.17
|
||||
6,422.4,432.95
|
|
|
@ -0,0 +1,6 @@
|
|||
1,48.34,53.86
|
||||
4,62.33,65.33
|
||||
6,65.55,69.05
|
||||
2,77.29,82.39
|
||||
5,88.78,91.78
|
||||
6,116.73,123.13
|
|
|
@ -0,0 +1,8 @@
|
|||
5,101.3,102.68
|
||||
4,106.47,108.73
|
||||
6,110.43,125.94
|
||||
2,131.93,136.26
|
||||
6,136.51,179.84
|
||||
5,174.41,175.61
|
||||
1,189.53,191.66
|
||||
5,191.86,193.06
|
|
|
@ -0,0 +1,12 @@
|
|||
5,62.64,65.42
|
||||
1,66.73,69.2
|
||||
4,70.22,73.22
|
||||
6,73.5,77.55
|
||||
3,77.69,78.89
|
||||
6,78.93,84.61
|
||||
2,88.8,93.94
|
||||
6,94.05,105.21
|
||||
6,113.38,126.45
|
||||
5,129.05,130.12
|
||||
1,131.02,133.02
|
||||
6,133.75,140.11
|
|
|
@ -0,0 +1,10 @@
|
|||
3,189.66,192.83
|
||||
6,193.0,200.03
|
||||
2,223.44,225.44
|
||||
6,227.62,270.68
|
||||
6,295.68,319.69
|
||||
3,323.0,324.39
|
||||
6,332.91,383.64
|
||||
6,397.9,443.63
|
||||
1,455.99,456.59
|
||||
6,460.22,476.41
|
|
|
@ -0,0 +1,4 @@
|
|||
4,214.51,228.73
|
||||
2,230.91,255.59
|
||||
6,267.3,269.44
|
||||
6,310.71,319.33
|
|
|
@ -0,0 +1,7 @@
|
|||
5,48.95,55.33
|
||||
4,55.55,66.69
|
||||
2,68.17,73.74
|
||||
6,78.7,86.22
|
||||
6,103.39,108.24
|
||||
6,119.65,139.05
|
||||
6,148.14,161.26
|
|
|
@ -0,0 +1,7 @@
|
|||
3,84.76,88.2
|
||||
6,88.36,100.18
|
||||
4,101.97,113.26
|
||||
6,115.3,128.3
|
||||
2,128.5,135.75
|
||||
6,135.56,153.7
|
||||
6,164.28,216.66
|
|
|
@ -0,0 +1,19 @@
|
|||
5,51.78,56.16
|
||||
3,56.11,61.11
|
||||
6,61.22,68.22
|
||||
4,75.97,80.61
|
||||
6,80.98,89.47
|
||||
2,89.63,93.33
|
||||
6,95.14,117.71
|
||||
1,120.75,125.75
|
||||
6,125.25,136.25
|
||||
6,315.21,321.19
|
||||
2,322.58,324.14
|
||||
6,325.25,331.77
|
||||
6,334.53,349.29
|
||||
1,354.54,357.64
|
||||
1,368.65,373.65
|
||||
3,373.65,376.58
|
||||
6,376.98,384.02
|
||||
4,392.74,396.8
|
||||
6,397.13,410.24
|
|
|
@ -0,0 +1,9 @@
|
|||
5,67.88,70.15
|
||||
3,71.26,72.05
|
||||
4,75.1,75.3
|
||||
6,75.27,78.17
|
||||
3,78.25,79.87
|
||||
6,80.68,83.41
|
||||
2,83.52,85.32
|
||||
5,98.01,99.27
|
||||
1,99.3,101.3
|
|
|
@ -0,0 +1,7 @@
|
|||
2,51.61,54.43
|
||||
5,117.16,120.58
|
||||
6,124.87,172.19
|
||||
1,142.15,145.05
|
||||
6,207.62,304.95
|
||||
4,289.77,292.35
|
||||
6,354.61,367.0
|
|
|
@ -0,0 +1,13 @@
|
|||
1,42.64,43.43
|
||||
6,45.08,57.31
|
||||
3,59.82,60.9
|
||||
6,62.92,65.21
|
||||
2,70.53,74.89
|
||||
6,76.54,81.5
|
||||
6,88.25,91.25
|
||||
6,116.52,121.87
|
||||
4,126.24,127.25
|
||||
6,129.4,130.38
|
||||
4,142.58,144.85
|
||||
6,158.31,173.69
|
||||
1,189.17,192.76
|
|
|
@ -0,0 +1,3 @@
|
|||
5,65.5,68.5
|
||||
1,71.1,82.1
|
||||
4,73.1,82.1
|
|
|
@ -0,0 +1,5 @@
|
|||
1,26.2,33.2
|
||||
2,39.6,45.2
|
||||
6,45.8,182.6
|
||||
4,211.71,214.71
|
||||
6,235.48,259.28
|
|
|
@ -0,0 +1,8 @@
|
|||
5,28.75,32.61
|
||||
6,47.96,73.61
|
||||
4,82.25,89.31
|
||||
1,89.92,95.13
|
||||
6,95.75,123.46
|
||||
2,124.1,136.52
|
||||
5,145.44,148.97
|
||||
6,173.83,197.39
|
|
|
@ -0,0 +1,5 @@
|
|||
1,130.71,139.71
|
||||
4,130.72,139.72
|
||||
6,142.62,161.82
|
||||
2,183.53,186.53
|
||||
6,186.82,194.62
|
|
|
@ -0,0 +1,5 @@
|
|||
1,39.48,44.48
|
||||
2,57.08,61.48
|
||||
6,67.54,73.64
|
||||
4,77.22,80.22
|
||||
6,82.76,119.46
|
|
|
@ -0,0 +1,4 @@
|
|||
2,57.61,64.96
|
||||
3,74.86,77.62
|
||||
6,78.58,94.38
|
||||
4,84.94,86.56
|
|
|
@ -0,0 +1,12 @@
|
|||
2,97.32,98.02
|
||||
5,219.71,220.91
|
||||
3,232.53,233.83
|
||||
4,237.22,238.52
|
||||
1,241.55,242.05
|
||||
6,242.14,249.54
|
||||
6,266.67,276.17
|
||||
2,277.53,280.53
|
||||
6,280.27,299.97
|
||||
6,311.29,315.69
|
||||
5,321.17,322.17
|
||||
6,322.35,324.25
|
|
|
@ -0,0 +1,6 @@
|
|||
3,118.02,120.61
|
||||
6,122.64,186.6
|
||||
1,138.8,141.59
|
||||
4,154.54,157.35
|
||||
2,169.79,170.87
|
||||
5,196.69,196.93
|
|
|
@ -0,0 +1,11 @@
|
|||
5,90.02,92.66
|
||||
1,94.27,96.02
|
||||
6,96.13,107.77
|
||||
4,110.47,113.89
|
||||
6,113.98,154.26
|
||||
5,156.37,158.18
|
||||
6,162.8,180.55
|
||||
2,182.31,184.38
|
||||
6,185.55,237.39
|
||||
1,264.14,267.42
|
||||
5,278.8,279.87
|
|
|
@ -0,0 +1,12 @@
|
|||
1,112.55,123.55
|
||||
6,146.43,148.96
|
||||
4,154.31,161.31
|
||||
6,165.19,181.75
|
||||
3,197.8,203.86
|
||||
6,210.35,220.4
|
||||
6,244.92,262.8
|
||||
2,269.88,289.6
|
||||
4,301.75,307.75
|
||||
6,309.26,311.7
|
||||
1,417.03,424.83
|
||||
6,426.73,430.05
|
|
|
@ -0,0 +1,4 @@
|
|||
5,66.05,68.87
|
||||
3,73.06,78.8
|
||||
4,82.01,86.07
|
||||
6,88.68,180.95
|
|
|
@ -0,0 +1,3 @@
|
|||
4,53.68,56.68
|
||||
2,62.27,69.27
|
||||
6,70.67,89.07
|
|
|
@ -0,0 +1,7 @@
|
|||
5,28.25,33.25
|
||||
4,33.68,39.78
|
||||
3,42.89,52.98
|
||||
6,55.23,73.93
|
||||
2,79.13,84.13
|
||||
6,84.96,106.14
|
||||
6,150.43,161.43
|
|
|
@ -0,0 +1,5 @@
|
|||
3,32.17,33.17
|
||||
4,36.38,37.38
|
||||
2,39.59,40.59
|
||||
1,47.57,56.57
|
||||
5,58.0,59.6
|
|
|
@ -0,0 +1,9 @@
|
|||
5,93.06,102.06
|
||||
4,107.7,114.2
|
||||
6,113.95,120.95
|
||||
6,123.54,144.16
|
||||
3,131.75,134.44
|
||||
1,134.15,137.74
|
||||
2,145.28,150.28
|
||||
6,152.23,194.85
|
||||
5,255.76,262.76
|
|
|
@ -0,0 +1,10 @@
|
|||
3,77.2,78.63
|
||||
6,78.89,92.82
|
||||
6,102.82,104.74
|
||||
4,107.35,109.63
|
||||
6,118.42,121.65
|
||||
6,131.83,137.27
|
||||
1,149.32,154.59
|
||||
6,156.46,166.29
|
||||
2,171.56,179.67
|
||||
6,182.04,204.21
|
|
|
@ -0,0 +1,12 @@
|
|||
5,102.21,104.35
|
||||
3,117.78,125.82
|
||||
6,127.66,152.51
|
||||
2,257.04,275.36
|
||||
6,278.89,288.36
|
||||
6,297.16,322.41
|
||||
1,323.34,325.24
|
||||
3,326.25,328.82
|
||||
6,330.81,350.02
|
||||
6,369.8,373.23
|
||||
4,377.05,389.87
|
||||
6,393.62,426.55
|
|
|
@ -0,0 +1,2 @@
|
|||
4,100.51,105.51
|
||||
2,133.45,138.45
|
|
|
@ -0,0 +1,9 @@
|
|||
5,80.08,85.08
|
||||
3,89.95,94.73
|
||||
5,106.48,110.01
|
||||
6,113.81,160.07
|
||||
1,117.15,119.84
|
||||
2,128.4,130.24
|
||||
4,148.79,151.17
|
||||
3,167.52,170.52
|
||||
6,169.71,181.69
|
|
|
@ -0,0 +1,12 @@
|
|||
5,83.36,84.6
|
||||
6,90.46,96.44
|
||||
1,99.13,101.45
|
||||
6,101.39,109.33
|
||||
3,110.43,114.66
|
||||
6,114.75,129.6
|
||||
6,144.18,149.52
|
||||
4,150.01,154.33
|
||||
6,154.37,159.3
|
||||
2,161.05,168.52
|
||||
6,169.55,178.51
|
||||
1,200.53,209.08
|
|
|
@ -0,0 +1,3 @@
|
|||
2,51.46,53.86
|
||||
6,53.64,74.74
|
||||
4,55.81,56.81
|
|
|
@ -0,0 +1,6 @@
|
|||
5,113.87,123.51
|
||||
4,137.34,139.16
|
||||
6,139.09,143.96
|
||||
5,170.67,181.92
|
||||
2,182.65,188.1
|
||||
6,204.2,246.23
|
|
|
@ -0,0 +1,6 @@
|
|||
6,49.26,56.41
|
||||
4,56.78,60.98
|
||||
6,61.22,70.87
|
||||
6,74.08,78.21
|
||||
2,81.95,82.9
|
||||
6,84.03,108.83
|
|
|
@ -0,0 +1,13 @@
|
|||
1,84.83,86.63
|
||||
4,105.62,109.38
|
||||
4,124.72,126.19
|
||||
3,128.82,135.19
|
||||
6,137.28,153.92
|
||||
4,154.56,155.33
|
||||
6,156.62,160.63
|
||||
2,161.06,167.18
|
||||
6,168.91,178.49
|
||||
4,178.57,180.29
|
||||
6,181.5,198.39
|
||||
5,200.83,202.94
|
||||
6,205.56,217.2
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue