Skip to content

Commit

Permalink
* 2021年11月18日
Browse files Browse the repository at this point in the history
    1. 改正拼写错误,调整命名。
    2. 支持预测结果中使用名称前缀 (例子可见`examples`文件夹中的`config_method_json_example.json`),现在搭配后缀,基本上可以应对所有可能的情形了。但是需要说明的是,目前不支持使用文件提供的映射关系,请确保预测名字中包含真值(不包含扩展名)名字。
    3. 优化了绘图中的axis的设置,由于这些设置属于非常细粒度的设定,目前暂不支持使用终端选项配置,之后可能会使用特定的配置文件,例如json等来配置相关选项。
    4. 支持绘图中使用共享的纵轴,即`sharey`,这可以用来辅助绘制独立的示例图。具体使用可见`examples`中的`plot_results.py`文件。
    5. 优化了下 `include_` 与 `exclude_` 类选项的相关函数.
    6. 添加了数据集和方法配置的json的例子。并且针对`examples`中提供的配置文件统一命名为`config_`.
    7. 绘图支持对数据名和方法名使用别名。之前都是直接从各自的 `json` 配置文件中读取键来作为绘图中显示的名字,这对于名字有特殊标记(例如名字中想补充年份或者会议名字)时的使用不太方便和灵活。所以当前支持了使用额外的 `json` 配置文件来配置映射关系。例子可见 `examples` 中的 `alias_for_plotting.json` 。
    8. 由于核心文件`eval_all.py`和`plot_results.py`的配置和调用方式发生了变化,所以为了便于大家的使用和修改,我提供了两个简单调用的`sh`文件,里面提供了这怒地各个选项的基本配置案例。linux用户可以直接使用`bash <sh_name>.sh`来执行,而windows用户麻烦些,还是自己参考着其中的配置项在终端自行配置吧!有问题欢迎提问,当然,如果大家可以提供windows直接调用的`bat`文件倒也欢迎PR哦!
  • Loading branch information
lartpang committed Nov 18, 2021
1 parent 47397de commit 897612b
Show file tree
Hide file tree
Showing 12 changed files with 640 additions and 190 deletions.
17 changes: 17 additions & 0 deletions examples/alias_for_plotting.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"dataset": {
"Name_In_Json": "Name_In_SubFigure",
"NJUD": "NJUD",
"NLPR": "NLPR",
"DUTRGBD": "DUTRGBD",
"STEREO1000": "SETERE",
"RGBD135": "RGBD135",
"SSD": "SSD",
"SIP": "SIP"
},
"method": {
"Name_In_Json": "Name_In_Legend",
"GateNet_2020": "GateNet",
"MINet_R50_2020": "MINet"
}
}
101 changes: 101 additions & 0 deletions examples/config_dataset_json_example.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
{
"LFSD": {
"root": "Path_Of_RGBDSOD_Datasets/LFSD",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/LFSD/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/LFSD/Mask",
"suffix": ".png"
}
},
"NJUD": {
"root": "Path_Of_RGBDSOD_Datasets/NJUD_FULL",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/NJUD_FULL/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/NJUD_FULL/Mask",
"suffix": ".png"
}
},
"NLPR": {
"root": "Path_Of_RGBDSOD_Datasets/NLPR_FULL",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/NLPR_FULL/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/NLPR_FULL/Mask",
"suffix": ".png"
}
},
"RGBD135": {
"root": "Path_Of_RGBDSOD_Datasets/RGBD135",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/RGBD135/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/RGBD135/Mask",
"suffix": ".png"
}
},
"SIP": {
"root": "Path_Of_RGBDSOD_Datasets/SIP",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/SIP/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/SIP/Mask",
"suffix": ".png"
}
},
"SSD": {
"root": "Path_Of_RGBDSOD_Datasets/SSD",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/SSD/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/SSD/Mask",
"suffix": ".png"
}
},
"STEREO797": {
"root": "Path_Of_RGBDSOD_Datasets/STEREO797",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/STEREO797/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/STEREO797/Mask",
"suffix": ".png"
}
},
"STEREO1000": {
"root": "Path_Of_RGBDSOD_Datasets/STEREO1000",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/STEREO1000/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/STEREO1000/Mask",
"suffix": ".png"
}
},
"DUTRGBD": {
"root": "Path_Of_RGBDSOD_Datasets/DUT-RGBD/Test",
"image": {
"path": "Path_Of_RGBDSOD_Datasets/DUT-RGBD/Test/Image",
"suffix": ".jpg"
},
"mask": {
"path": "Path_Of_RGBDSOD_Datasets/DUT-RGBD/Test/Mask",
"suffix": ".png"
}
}
}
File renamed without changes.
78 changes: 78 additions & 0 deletions examples/config_method_json_example.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
{
"Method1": {
"PASCAL-S": {
"path": "Path_Of_Method1/PASCAL-S/DGRL",
"suffix": ".png"
},
"ECSSD": {
"path": "Path_Of_Method1/ECSSD/DGRL",
"suffix": ".png"
},
"HKU-IS": {
"path": "Path_Of_Method1/HKU-IS/DGRL",
"suffix": ".png"
},
"DUT-OMRON": {
"path": "Path_Of_Method1/DUT-OMRON/DGRL",
"suffix": ".png"
},
"DUTS-TE": {
"path": "Path_Of_Method1/DUTS-TE/DGRL",
"suffix": ".png"
}
},
"Method2": {
"PASCAL-S": {
"path": "Path_Of_Method2/pascal",
"prefix": "pascal_",
"suffix": ".png"
},
"ECSSD": {
"path": "Path_Of_Method2/ecssd",
"prefix": "ecssd_",
"suffix": ".png"
},
"HKU-IS": {
"path": "Path_Of_Method2/hku",
"prefix": "hku_",
"suffix": ".png"
},
"DUT-OMRON": {
"path": "Path_Of_Method2/duto",
"prefix": "duto_",
"suffix": ".png"
},
"DUTS-TE": {
"path": "Path_Of_Method2/dut_te",
"prefix": "dut_te_",
"suffix": ".png"
}
},
"Method3": {
"PASCAL-S": {
"path": "Path_Of_Method3/pascal",
"prefix": "pascal_",
"suffix": "_fused_sod.png"
},
"ECSSD": {
"path": "Path_Of_Method3/ecssd",
"prefix": "ecssd_",
"suffix": "_fused_sod.png"
},
"HKU-IS": {
"path": "Path_Of_Method3/hku",
"prefix": "hku_",
"suffix": "_fused_sod.png"
},
"DUT-OMRON": {
"path": "Path_Of_Method3/duto",
"prefix": "duto_",
"suffix": "_fused_sod.png"
},
"DUTS-TE": {
"path": "Path_Of_Method3/dut_te",
"prefix": "dut_te_",
"suffix": "_fused_sod.png"
}
}
}
File renamed without changes.
152 changes: 98 additions & 54 deletions examples/eval_all.py
Original file line number Diff line number Diff line change
@@ -1,70 +1,114 @@
# -*- coding: utf-8 -*-
import argparse
import os
import sys
import warnings

sys.path.append("..")

from metrics import cal_sod_matrics
from utils.generate_info import get_datasets_info, get_methods_info
from utils.misc import make_dir

"""
Include: Fm Curve/PR Curves/MAE/(max/mean/weighted) Fmeasure/Smeasure/Emeasure

NOTE:
* Our method automatically calculates the intersection of `pre` and `gt`.
But it needs to have uniform naming rules for `pre` and `gt`.
"""
def get_args():
parser = argparse.ArgumentParser(
description="""Include: Fm Curve/PR Curves/MAE/(max/mean/weighted) Fmeasure/Smeasure/Emeasure
NOTE:
Our method automatically calculates the intersection of `pre` and `gt`.
Currently supported pre naming rules: `prefix + gt_name_wo_ext + suffix_w_ext`
""",
formatter_class=argparse.RawTextHelpFormatter,
)
parser.add_argument("--dataset-json", required=True, type=str, help="Json file for datasets.")
parser.add_argument("--method-json", required=True, type=str, help="Json file for methods.")
parser.add_argument("--metric-npy", type=str, help="Npy file for saving metric results.")
parser.add_argument("--curves-npy", type=str, help="Npy file for saving curve results.")
parser.add_argument("--record-txt", type=str, help="Txt file for saving metric results.")
parser.add_argument("--to-overwrite", action="store_true", help="To overwrite the txt file.")
parser.add_argument("--record-xlsx", type=str, help="Xlsx file for saving metric results.")
parser.add_argument(
"--include-methods",
type=str,
nargs="+",
help="Names of only specific methods you want to evaluate.",
)
parser.add_argument(
"--exclude-methods",
type=str,
nargs="+",
help="Names of some specific methods you do not want to evaluate.",
)
parser.add_argument(
"--include-datasets",
type=str,
nargs="+",
help="Names of only specific datasets you want to evaluate.",
)
parser.add_argument(
"--exclude-datasets",
type=str,
nargs="+",
help="Names of some specific datasets you do not want to evaluate.",
)
parser.add_argument(
"--num-workers",
type=int,
default=4,
help="Number of workers for multi-threading or multi-processing. Default: 4",
)
parser.add_argument(
"--num-bits",
type=int,
default=3,
help="Number of decimal places for showing results. Default: 3",
)
args = parser.parse_args()

total_info = dict(
rgb_sod=dict(
dataset="/home/lart/Coding/GIT/PySODEvalToolkit/configs/datasets/json/rgb_sod.json",
method="/home/lart/Coding/GIT/PySODEvalToolkit/configs/methods/json/rgb_sod_methods.json",
),
rgb_cod=dict(
dataset="/home/lart/Coding/GIT/PySODEvalToolkit/configs/datasets/json/rgb_cod.json",
method="/home/lart/Coding/GIT/PySODEvalToolkit/configs/methods/json/rgb_cod_methods.json",
),
rgbd_sod=dict(
dataset="/home/lart/Coding/GIT/PySODEvalToolkit/configs/datasets/json/rgbd_sod.json",
method="/home/lart/Coding/GIT/PySODEvalToolkit/configs/methods/json/rgbd_sod_methods_ablation.json",
),
)
if args.metric_npy is not None:
make_dir(os.path.dirname(args.metric_npy))
if args.curves_npy is not None:
make_dir(os.path.dirname(args.curves_npy))
if args.record_txt is not None:
make_dir(os.path.dirname(args.record_txt))
if args.record_xlsx is not None:
make_dir(os.path.dirname(args.record_xlsx))
if args.to_overwrite and not args.record_txt:
warnings.warn("--to-overwrite only works with a valid --record-txt")
return args

# 当前支持rgb_cod, rgb_sod, rgbd_sod
data_type = "rgbd_sod"
data_info = total_info[data_type]

# 存放输出文件的文件夹
output_path = "../output"
make_dir(output_path)
def main():
args = get_args()

# 包含所有数据集信息的字典
dataset_info = get_datasets_info(
datastes_info_json=data_info["dataset"],
include_datasets=["NJUD"],
# exclude_datasets=["LFSD"],
)
# 包含所有待比较模型结果的信息和绘图配置的字典
drawing_info = get_methods_info(
methods_info_json=data_info["method"],
for_drawing=True,
our_name="",
include_methods=["CTMF_V16"],
# exclude_methods=["UCNet_ABP", "UCNet_CVAE"],
)
# 包含所有数据集信息的字典
datasets_info = get_datasets_info(
datastes_info_json=args.dataset_json,
include_datasets=args.include_datasets,
exclude_datasets=args.exclude_datasets,
)
# 包含所有待比较模型结果的信息的字典
methods_info = get_methods_info(
methods_info_json=args.method_json,
include_methods=args.include_methods,
exclude_methods=args.exclude_methods,
)

if __name__ == "__main__":
# 确保多进程在windows上也可以正常使用
cal_sod_matrics.cal_sod_matrics(
data_type=data_type,
to_append=True, # 是否保留之前的评估记录(针对txt_path文件有效)
txt_path=os.path.join(output_path, f"{data_type}.txt"),
xlsx_path=os.path.join(output_path, f"{data_type}.xlsx"),
drawing_info=drawing_info,
dataset_info=dataset_info,
save_npy=True, # 是否将评估结果到npy文件中,该文件可用来绘制pr和fm曲线
# 保存曲线指标数据的文件路径
curves_npy_path=os.path.join(output_path, data_type + "_" + "curves.npy"),
metrics_npy_path=os.path.join(output_path, data_type + "_" + "metrics.npy"),
num_bits=3, # 评估结果保留的小数点后数据的位数
num_workers=4,
use_mp=False, # using multi-threading
sheet_name="Results",
to_append=not args.to_overwrite,
txt_path=args.record_txt,
xlsx_path=args.record_xlsx,
methods_info=methods_info,
datasets_info=datasets_info,
curves_npy_path=args.curves_npy,
metrics_npy_path=args.metric_npy,
num_bits=args.num_bits,
num_workers=args.num_workers,
use_mp=False,
)


if __name__ == "__main__":
main()
Loading

0 comments on commit 897612b

Please sign in to comment.