Browse Source

first run

main
Amir 2 years ago
parent
commit
8f3b51b1bc
5 changed files with 33 additions and 11 deletions
  1. 9
    7
      opt.py
  2. 10
    0
      results.txt
  3. 9
    0
      train.py
  4. 1
    0
      utils/__init__.py
  5. 4
    4
      utils/loss.py

+ 9
- 7
opt.py View File

@@ -5,24 +5,26 @@ import os
parse = argparse.ArgumentParser(description='PyTorch Polyp Segmentation')

"-------------------data option--------------------------"
parse.add_argument('--root', type=str, default='/scratch/krushi1992/MICCAI2021/new-med-up/CVC-frame/')
parse.add_argument('--dataset', type=str, default='EndoScene')
parse.add_argument('--train_data_dir', type=str, default='train')
parse.add_argument('--valid_data_dir', type=str, default='valid')
parse.add_argument('--test_data_dir', type=str, default='test')
# parse.add_argument('--root', type=str, default='/media/external_10TB/10TB/pourmand/CVC-EndoSceneStill')
# parse.add_argument('--dataset', type=str, default='EndoScene')

parse.add_argument('--root',default='/media/external_10TB/10TB/pourmand/Kvasir-SEG/Kvasir-SEG')
parse.add_argument('--dataset',type=str,default='kvasir_SEG')

parse.add_argument('--train_data_dir', type=str, default='')
parse.add_argument('--valid_data_dir', type=str, default='')
parse.add_argument('--test_data_dir', type=str, default='')

"-------------------training option-----------------------"
parse.add_argument('--mode', type=str, default='train')
parse.add_argument('--nEpoch', type=int, default=200)
parse.add_argument('--nEpoch', type=int, default=10)
parse.add_argument('--batch_size', type=float, default=4)
parse.add_argument('--num_workers', type=int, default=0)
parse.add_argument('--use_gpu', type=bool, default=True)
parse.add_argument('--load_ckpt', type=str, default=None)
parse.add_argument('--model', type=str, default='EUNet')
parse.add_argument('--expID', type=int, default=1)
parse.add_argument('--ckpt_period', type=int, default=50)
parse.add_argument('--ckpt_period', type=int, default=0)
parse.add_argument('--weight_const', type=float, default=0.3)

"-------------------optimizer option-----------------------"

+ 10
- 0
results.txt View File

@@ -0,0 +1,10 @@
recall: 0.8576, specificity: 0.9496, precision: 0.7379, F1: 0.7466, F2: 0.7929, ACC_overall: 0.9293, IoU_poly: 0.6324, IoU_bg: 0.9147, IoU_mean: 0.7736
recall: 0.9194, specificity: 0.9545, precision: 0.7454, F1: 0.7950, F2: 0.8533, ACC_overall: 0.9426, IoU_poly: 0.6884, IoU_bg: 0.9293, IoU_mean: 0.8089
recall: 0.9209, specificity: 0.9635, precision: 0.8065, F1: 0.8338, F2: 0.8733, ACC_overall: 0.9514, IoU_poly: 0.7441, IoU_bg: 0.9394, IoU_mean: 0.8417
recall: 0.9211, specificity: 0.9660, precision: 0.8323, F1: 0.8523, F2: 0.8830, ACC_overall: 0.9552, IoU_poly: 0.7704, IoU_bg: 0.9440, IoU_mean: 0.8572
recall: 0.8685, specificity: 0.9803, precision: 0.8929, F1: 0.8590, F2: 0.8577, ACC_overall: 0.9586, IoU_poly: 0.7808, IoU_bg: 0.9483, IoU_mean: 0.8645
recall: 0.9076, specificity: 0.9653, precision: 0.8430, F1: 0.8484, F2: 0.8723, ACC_overall: 0.9523, IoU_poly: 0.7675, IoU_bg: 0.9407, IoU_mean: 0.8541
recall: 0.9342, specificity: 0.9678, precision: 0.8530, F1: 0.8732, F2: 0.9004, ACC_overall: 0.9605, IoU_poly: 0.8002, IoU_bg: 0.9497, IoU_mean: 0.8750
recall: 0.9368, specificity: 0.9719, precision: 0.8748, F1: 0.8886, F2: 0.9102, ACC_overall: 0.9658, IoU_poly: 0.8212, IoU_bg: 0.9558, IoU_mean: 0.8885
recall: 0.9421, specificity: 0.9756, precision: 0.8770, F1: 0.8940, F2: 0.9169, ACC_overall: 0.9680, IoU_poly: 0.8276, IoU_bg: 0.9585, IoU_mean: 0.8930
recall: 0.9344, specificity: 0.9764, precision: 0.8932, F1: 0.8986, F2: 0.9143, ACC_overall: 0.9691, IoU_poly: 0.8364, IoU_bg: 0.9599, IoU_mean: 0.8982

+ 9
- 0
train.py View File

@@ -43,6 +43,7 @@ def valid(model, valid_dataloader, total_batch):


def train():
file_name = 'results.txt'

model = generate_model(opt)
#model = nn.DataParallel(model)
@@ -101,6 +102,14 @@ def train():
metrics_result['F1'], metrics_result['F2'], metrics_result['ACC_overall'],
metrics_result['IoU_poly'], metrics_result['IoU_bg'], metrics_result['IoU_mean']))

with open(file_name,'a') as f:
f.write('recall: %.4f, specificity: %.4f, precision: %.4f, F1: %.4f,'
' F2: %.4f, ACC_overall: %.4f, IoU_poly: %.4f, IoU_bg: %.4f, IoU_mean: %.4f'
% (metrics_result['recall'], metrics_result['specificity'], metrics_result['precision'],
metrics_result['F1'], metrics_result['F2'], metrics_result['ACC_overall'],
metrics_result['IoU_poly'], metrics_result['IoU_bg'], metrics_result['IoU_mean'])+'\n')


if ((epoch + 1) % opt.ckpt_period == 0):
torch.save(model.state_dict(), './checkpoints/exp' + str(opt.expID)+"/ck_{}.pth".format(epoch + 1))


+ 1
- 0
utils/__init__.py View File

@@ -0,0 +1 @@
from .transform import *

+ 4
- 4
utils/loss.py View File

@@ -76,13 +76,13 @@ def DeepSupervisionLoss(pred, gt):
criterion = BceDiceLoss()

loss0 = criterion(d0, gt)
gt = F.interpolate(gt, scale_factor=0.5, mode='bilinear', align_corners=True, recompute_scale_factor=True)
gt = F.interpolate(gt, scale_factor=0.5, mode='bilinear', align_corners=True)
loss1 = criterion(d1, gt)
gt = F.interpolate(gt, scale_factor=0.5, mode='bilinear', align_corners=True, recompute_scale_factor=True)
gt = F.interpolate(gt, scale_factor=0.5, mode='bilinear', align_corners=True)
loss2 = criterion(d2, gt)
gt = F.interpolate(gt, scale_factor=0.5, mode='bilinear', align_corners=True, recompute_scale_factor=True)
gt = F.interpolate(gt, scale_factor=0.5, mode='bilinear', align_corners=True)
loss3 = criterion(d3, gt)
gt = F.interpolate(gt, scale_factor=0.5, mode='bilinear', align_corners=True, recompute_scale_factor=True)
gt = F.interpolate(gt, scale_factor=0.5, mode='bilinear', align_corners=True)
loss4 = criterion(d4, gt)

return loss0 + loss1 + loss2 + loss3 + loss4

Loading…
Cancel
Save