Skip to content

Commit 4ff0a26

Browse files
committed
update readme
1 parent f78b983 commit 4ff0a26

3 files changed

Lines changed: 39 additions & 26 deletions

File tree

seg_nll/JSRT/README.md

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,24 @@
1-
# Weakly supervised segmentation demo using PyMIC
1+
# Noisy label-based segmentation demo using PyMIC
22

3-
In this example, we show scribble-supervised learning methods implemented in PyMIC.
4-
Currently, the following are available in PyMIC:
3+
In this example, we show noisy label learning methods implemented in PyMIC.
4+
Currently, the following methods are available in PyMIC:
55
|PyMIC Method|Reference|Remarks|
66
|---|---|---|
7-
|WSLEntropyMinimization|[Grandvalet et al.][em_paper], NeurIPS 2005| Entropy minimization for regularization|
8-
|WSLTotalVariation| [Luo et al.][tv_paper], arXiv 2022| Tobal variation for regularization|
9-
|WSLMumfordShah| [Kim et al.][mumford_paper], TIP 2020| Mumford-Shah loss for regularization|
10-
|WSLGatedCRF| [Lbukhov et al.][gcrf_paper], arXiv 2019| Gated CRF for regularization|
11-
|WSLUSTM| [Liu et al.][ustm_paper], PR 2022| Adapt USTM with transform-consistency|
12-
|WSLDMPLS| [Luo et al.][dmpls_paper], MICCAI 2022| Dynamically mixed pseudo label supervision|
13-
14-
[em_paper]:https://papers.nips.cc/paper/2004/file/96f2b50b5d3613adf9c27049b2a888c7-Paper.pdf
15-
[tv_paper]:https://arxiv.org/abs/2111.02403
16-
[mumford_paper]:https://doi.org/10.1109/TIP.2019.2941265
17-
[gcrf_paper]:http://arxiv.org/abs/1906.04651
18-
[ustm_paper]:https://doi.org/10.1016/j.patcog.2021.108341
19-
[dmpls_paper]:https://arxiv.org/abs/2203.02106
7+
|GCELoss|[Zhang et al.][gce_paper], NeurIPS 2018| Train with SegmentationAgent|
8+
|NRDiceLoss| [Wang et al.][nrdice_paper], TMI 2020| Train with SegmentationAgent|
9+
|MAELoss| [Kim et al.][mae_paper], AAAI 2017| Train with SegmentationAgent|
10+
|NLLCoTeaching| [Han et al.][cot_paper], NeurIPS 2018| Co-teaching between two networks|
11+
|NLLCLSLSR| [Zhang et al.][cl_paper], MICCAI 2020| Confident learning with spatial label smoothing|
12+
|NLLTriNet| [Zhang et al.][trinet_paper], MICCAI 2020| Tri-network combined with sample selection|
13+
|NLLDAST| [Yang et al.][dast_paper], JBHI 2022| Divergence-aware selective training|
14+
15+
[gce_paper]:https://arxiv.org/abs/1805.07836
16+
[nrdice_paper]:https://ieeexplore.ieee.org/document/9109297
17+
[mae_paper]:https://arxiv.org/abs/1712.09482v1
18+
[cot_paper]:https://arxiv.org/abs/1804.06872
19+
[cl_paper]:https://link.springer.com/chapter/10.1007/978-3-030-59710-8_70
20+
[trinet_paper]:https://link.springer.com/chapter/10.1007/978-3-030-59719-1_25
21+
[dast_paper]:https://ieeexplore.ieee.org/document/9770406
2022

2123

2224
## Data
Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,13 @@ tensor_type = float
44

55
task_type = seg
66
root_dir = ../../PyMIC_data/JSRT
7-
train_csv = config/data/jsrt_train_mix.csv
7+
train_csv = config/data/jsrt_train_clean.csv
8+
train_csv_noise = config/data/jsrt_train_noise.csv
89
valid_csv = config/data/jsrt_valid.csv
910
test_csv = config/data/jsrt_test.csv
1011

11-
train_batch_size = 8
12+
train_batch_size = 4
13+
train_batch_size_noise = 4
1214

1315
# data transforms
1416
train_transform = [RandomCrop, RandomFlip, NormalizeWithMeanStd, LabelConvert, LabelToProbability]
@@ -34,7 +36,7 @@ LabelConvert_target_list = [0, 1]
3436
# the keys may be different for different networks
3537

3638
# type of network
37-
net_type = UNet2D
39+
net_type = UNet2D_DualBranch
3840

3941
# number of class, required for segmentation task
4042
class_num = 2
@@ -43,40 +45,49 @@ feature_chns = [16, 32, 64, 128, 256]
4345
dropout = [0, 0, 0.3, 0.4, 0.5]
4446
bilinear = False
4547
deep_supervise= False
48+
output_mode = first
4649

4750
[training]
4851
# list of gpus
49-
gpus = [0]
52+
gpus = [1]
5053

51-
loss_type = NoiseRobustDiceLoss
52-
NoiseRobustDiceLoss_gamma = 1.5
54+
loss_type = CrossEntropyLoss
5355

5456
# for optimizers
5557
optimizer = Adam
5658
learning_rate = 1e-3
5759
momentum = 0.9
5860
weight_decay = 1e-5
5961

60-
# for lr schedular
62+
# for lr schedular (MultiStepLR)
6163
lr_scheduler = ReduceLROnPlateau
6264
lr_gamma = 0.5
6365
ReduceLROnPlateau_patience = 2000
6466

65-
ckpt_save_dir = model/unet_nrdice
67+
ckpt_save_dir = model/unet_dast
6668

6769
# start iter
6870
iter_start = 0
6971
iter_max = 10000
7072
iter_valid = 100
7173
iter_save = [10000]
7274

75+
[noisy_label_learning]
76+
nll_method = DAST
77+
dast_dbc_w = 0.1
78+
dast_st_w = 0.1
79+
dast_rank_length = 20
80+
dast_select_ratio = 0.2
81+
rampup_start = 1000
82+
rampup_end = 8000
83+
7384
[testing]
7485
# list of gpus
7586
gpus = [0]
7687

7788
# checkpoint mode can be [0-latest, 1-best, 2-specified]
7889
ckpt_mode = 0
79-
output_dir = result/unet_nrdice
90+
output_dir = result/unet_dast
8091

8192
# convert the label of prediction output
8293
label_source = [0, 1]

seg_wsl/ACDC/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Weakly supervised segmentation demo using PyMIC
22

33
In this example, we show scribble-supervised learning methods implemented in PyMIC.
4-
Currently, the following are available in PyMIC:
4+
Currently, the following methods are available in PyMIC:
55
|PyMIC Method|Reference|Remarks|
66
|---|---|---|
77
|WSLEntropyMinimization|[Grandvalet et al.][em_paper], NeurIPS 2005| Entropy minimization for regularization|

0 commit comments

Comments
 (0)