-
Notifications
You must be signed in to change notification settings - Fork 0
/
all_in.sh
48 lines (45 loc) · 1.07 KB
/
all_in.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
## my best plan here:
### train se_resnext_3474 with many many tricks first:
python main.py \
--lr 0.1 \
--data ./data \
--arch se_resnext_3474 \
--result ./Results_final \
--crop-size 64 \
--loss-type labelsmooth \
--optimizer SGD \
--logfile all_in.txt \
--epochs 300 \
--lr-type warmup \
--warmup-epoch 20 \
--mixup True \
--cutout True
### then, train(finetune) se_resnext_3474 with kd:
python main_kd.py \
--data ./data \
--arch se_resnext_3474 \
--kdarch resnet101_t \
--result ./Results_final_kd \
--logfile all_in_kd.txt \
--epochs 80 \
--lr-type step \
--lr 0.001 \
--mixup True \
--cutout True \
--pretrained True \
--pth Results_final/se_resnext_3474_lr_0.1/model_best.pth.tar \
--tpth pretrained/resnet101_fine_all.pth.tar
### lastly, finetune with ClassBalance:
python main.py \
--lr 0.001 \
--data ./data \
--arch se_resnext_3474 \
--result ./Results_final_finetune \
--crop-size 64 \
--loss-type CrossEntropyLoss \
--lr-type step \
--optimizer SGD \
--logfile final_finetune.txt \
--finetune True \
--epochs 40 \
--pth Results_final_kd/se_resnext_3474_lr_0.001/model_best.pth.tar