Browse Source

moving to HPC

master
Mahdi Abdollah Pour 2 years ago
parent
commit
6026017485

+ 1
- 0
.gitignore View File

events.out.tfevents* events.out.tfevents*
pretrained pretrained
YOLOX_outputs YOLOX_outputs
.idea/*



+ 3
- 0
exps/example/metamot/yolox_x_mot17_on_mot20.py View File

self.train_dir = '/media/external_10TB/10TB/vision/ByteTrackData/MOT17/annotations' self.train_dir = '/media/external_10TB/10TB/vision/ByteTrackData/MOT17/annotations'
onlyfiles = [f for f in listdir(self.train_dir) if isfile(join(self.train_dir, f))] onlyfiles = [f for f in listdir(self.train_dir) if isfile(join(self.train_dir, f))]
self.train_anns = [file for file in onlyfiles if file.__contains__('train') and file.__contains__('FRCNN')] self.train_anns = [file for file in onlyfiles if file.__contains__('train') and file.__contains__('FRCNN')]
# # TODO: remove
# self.train_anns = self.train_anns[:1]

self.val_dir = '/media/external_10TB/10TB/vision/ByteTrackData/MOT20/annotations' self.val_dir = '/media/external_10TB/10TB/vision/ByteTrackData/MOT20/annotations'
onlyfiles = [f for f in listdir(self.val_dir) if isfile(join(self.val_dir, f))] onlyfiles = [f for f in listdir(self.val_dir) if isfile(join(self.val_dir, f))]
self.val_anns = [file for file in onlyfiles if file.__contains__('train') and file.__contains__( self.val_anns = [file for file in onlyfiles if file.__contains__('train') and file.__contains__(

+ 3
- 1
yolox/core/meta_trainer.py View File

logger.info( logger.info(
"Model Summary: {}".format(get_model_info(model, self.exp.test_size)) "Model Summary: {}".format(get_model_info(model, self.exp.test_size))
) )
# exit()
model.to(self.device) model.to(self.device)

# from torchsummary import summary
# summary(model, input_size=(3, 300, 300), device='cuda')
# value of epoch will be set in `resume_train` # value of epoch will be set in `resume_train`
model = self.resume_train(model) model = self.resume_train(model)



+ 1
- 0
yolox/exp/meta_yolox_base.py View File



# ---------------- dataloader config ---------------- # # ---------------- dataloader config ---------------- #
# set worker to 4 for shorter dataloader init time # set worker to 4 for shorter dataloader init time
# TODO: deal with this multi threading
self.data_num_workers = 4 self.data_num_workers = 4
self.input_size = (640, 640) self.input_size = (640, 640)
self.random_size = (14, 26) self.random_size = (14, 26)

Loading…
Cancel
Save