This is amazing, but I'm having some trouble with DIS.
Sorry, i'm new at this. It's finding 3000 training units but still saying num_samples =0
## configure the train, valid and inference datasets
train_datasets, valid_datasets = [], []
dataset_1, dataset_1 = {}, {}
dataset_tr = {"name": "DIS5K-TR",
"im_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TR/im",
"gt_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TR/gt",
"im_ext": ".jpg",
"gt_ext": ".png",
"cache_dir":"../DIS5K-Cache/DIS-TR"}
dataset_vd = {"name": "DIS5K-VD",
"im_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-VD/im",
"gt_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-VD/gt",
"im_ext": ".jpg",
"gt_ext": ".png",
"cache_dir":"../DIS5K-Cache/DIS-VD"}
dataset_te1 = {"name": "DIS5K-TE1",
"im_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TE1/im",
"gt_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TE1/gt",
"im_ext": ".jpg",
"gt_ext": ".png",
"cache_dir":"../DIS5K-Cache/DIS-TE1"}
dataset_te2 = {"name": "DIS5K-TE2",
"im_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TE2/im",
"gt_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TE2/gt",
"im_ext": ".jpg",
"gt_ext": ".png",
"cache_dir":"../DIS5K-Cache/DIS-TE2"}
dataset_te3 = {"name": "DIS5K-TE3",
"im_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TE3/im",
"gt_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TE3/gt",
"im_ext": ".jpg",
"gt_ext": ".png",
"cache_dir":"../DIS5K-Cache/DIS-TE3"}
dataset_te4 = {"name": "DIS5K-TE4",
"im_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TE4/im",
"gt_dir": "/home/jakko/Pictures/DIS5K/DIS5K/DIS-TE4/gt",
"im_ext": ".jpg",
"gt_ext": ".png",
"cache_dir":"../DIS5K-Cache/DIS-TE4"}
### test your own dataset
dataset_demo = {"name": "your-dataset",
"im_dir": "../your-dataset/im",
"gt_dir": "",
"im_ext": ".jpg",
"gt_ext": "",
"cache_dir":"../your-dataset/cache"}
train_datasets = [dataset_tr] ## users can create mutiple dictionary for setting a list of datasets as training set
# valid_datasets = [dataset_vd] ## users can create mutiple dictionary for setting a list of datasets as vaidation sets or inference sets
valid_datasets = [dataset_vd] # dataset_vd, dataset_te1, dataset_te2, dataset_te3, dataset_te4] # and hypar["mode"] = "valid" for inference,
### --------------- STEP 2: Configuring the hyperparamters for Training, validation and inferencing ---------------
hypar = {}
## -- 2.1. configure the model saving or restoring path --
hypar["mode"] = "train"
## "train": for training,
## "valid": for validation and inferening,
## in "valid" mode, it will calculate the accuracy as well as save the prediciton results into the "hypar["valid_out_dir"]", which shouldn't be ""
## otherwise only accuracy will be calculated and no predictions will be saved
hypar["interm_sup"] = False ## in-dicate if activate intermediate feature supervision
if hypar["mode"] == "train":
hypar["valid_out_dir"] = "" ## for "train" model leave it as "", for "valid"("inference") mode: set it according to your local directory
hypar["model_path"] ="/home/jakko/Github/DIS/saved_models/your_model_weights" ## model weights saving (or restoring) path
hypar["restore_model"] = "" ## name of the segmentation model weights .pth for resume training process from last stop or for the inferencing
hypar["start_ite"] = 0 ## start iteration for the training, can be changed to match the restored training process
hypar["gt_encoder_model"] = ""
else: ## configure the segmentation output path and the to-be-used model weights path
hypar["valid_out_dir"] = "../your-results/"##"../DIS5K-Results-test" ## output inferenced segmentation maps into this fold
hypar["model_path"] = "/home/jakko/Github/DIS/saved_models/your_model_weights" ## load trained weights from this path
hypar["restore_model"] = "isnet.pth"##"isnet.pth" ## name of the to-be-loaded weights