diff --git a/process/batch_processing.py b/process/batch_processing.py index 58d1f4f2cad6dc0517e594638be6a266049db263..44e4d4f24c325b70f6ddd68de640b87855065ccb 100755 --- a/process/batch_processing.py +++ b/process/batch_processing.py @@ -898,8 +898,8 @@ class BatchProcessing: if not overwrite_previous and os.path.exists(os.path.join(save_process_path, 'process_history.yaml')): if fn_name not in ['enhance_contrast', 'enhance_brightness', 'enhance_sharpness', 'enhance_color', 'autocontrast', 'equalize', 'load_dlc', 'fit_skeleton']\ - or (fn_name == 'load_dlc' and len(glob.glob(os.path.join(save_process_path, '*' + model_name + '_full.pickle'))) >= 1)\ - or (fn_name == 'fit_skeleton' and len(glob.glob(os.path.join(save_process_path, '*' + model_name + '-3d_points.csv'))) >= 1): + or (fn_name == 'load_dlc' and len(glob.glob(os.path.join(save_process_path, '*-3d_points.csv'))) >= 1)\ + or (fn_name == 'fit_skeleton' and len(glob.glob(os.path.join(save_process_path, '*skeleton_parameters*.csvs'))) >= 1): print('>> {0} has been skipped because already processed and overwrite_previous = False'.format(main_rec_name)) continue @@ -974,6 +974,10 @@ class BatchProcessing: destfolder=save_process_path) elif fn_name == 'load_dlc': + if len(glob.glob(os.path.join(save_process_path, '*.pickle'))) >= 1: + print('>> WARNING: No pickle files were found!') + continue + # TODO Give options to load from csv, pickle or hdf5? # skeleton2d = self.load_dlc_from_csv(settings.batch, kwargs['model_name'], load_process_path) # skeleton2d = self.load_dlc_from_hdf5(settings.batch, kwargs['model_name'], load_process_path) @@ -1168,10 +1172,8 @@ class BatchProcessing: if not overwrite_previous and os.path.exists(os.path.join(save_process_path, 'process_history.yaml')): if fn_name not in ['enhance_contrast', 'enhance_brightness', 'enhance_sharpness', 'enhance_color', 'autocontrast', 'equalize', 'load_dlc', 'fit_skeleton'] \ - or (fn_name == 'load_dlc' and len( - glob.glob(os.path.join(save_process_path, '*' + model_name + '_full.pickle'))) >= 1) \ - or (fn_name == 'fit_skeleton' and len( - glob.glob(os.path.join(save_process_path, '*' + model_name + '-3d_points.csv'))) >= 1): + or (fn_name == 'load_dlc' and len(glob.glob(os.path.join(save_process_path, '*-3d_points.csv'))) >= 1) \ + or (fn_name == 'fit_skeleton' and len(glob.glob(os.path.join(save_process_path, '*skeleton_parameters*.csvs'))) >= 1): print('>> {0} had already been processed, and thus as been skipped (overwrite_previous = False)') continue diff --git a/process/dlc_postprocessing.py b/process/dlc_postprocessing.py index d1196c1d337692fd86d743e2a7b51af8846186d0..bf7f8cb35b75ddff219b29657697f841f60a0e92 100755 --- a/process/dlc_postprocessing.py +++ b/process/dlc_postprocessing.py @@ -94,6 +94,8 @@ def load_dlc_from_full_pickle(batch_settings: BatchSettings, model_name: str, pa dlc_pickle_paths = glob.glob(os.path.join(path, '*' + model_name + '_full.pickle')) + assert len(dlc_pickle_paths) > 0, "ERROR: No pickle files were found at {0}".format(os.path.join(path, '*' + model_name + '_full.pickle')) + obj_names = [] for i, dlc_pickle_path in enumerate(dlc_pickle_paths): # go through the various obj file_name = dlc_pickle_path[len(path) + 1:] @@ -102,7 +104,7 @@ def load_dlc_from_full_pickle(batch_settings: BatchSettings, model_name: str, pa except ValueError: del dlc_pickle_paths[i] - assert len(obj_names) > 0, "ERROR: No pickle files were found! (The file name might be incorrect)" + assert len(obj_names) > 0, "ERROR: No pickle files with obj were found! (The file name might be incorrect)" skeleton2d = {'obj_names': obj_names, 'model_name': model_name} for obj_name in obj_names: