Meta Byte Track
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataloading.py 6.1KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178
  1. #!/usr/bin/env python3
  2. # -*- coding:utf-8 -*-
  3. # Copyright (c) Megvii, Inc. and its affiliates.
  4. import torch
  5. from torch.utils.data.dataloader import DataLoader as torchDataLoader
  6. from torch.utils.data.dataloader import default_collate
  7. import os
  8. import random
  9. from .samplers import YoloBatchSampler
  10. def get_yolox_datadir():
  11. """
  12. get dataset dir of YOLOX. If environment variable named `YOLOX_DATADIR` is set,
  13. this function will return value of the environment variable. Otherwise, use data
  14. """
  15. yolox_datadir = os.getenv("YOLOX_DATADIR", None)
  16. if yolox_datadir is None:
  17. import yolox
  18. yolox_path = os.path.dirname(os.path.dirname(yolox.__file__))
  19. yolox_datadir = os.path.join(yolox_path, "/media/external_10TB/10TB/vision/ByteTrackData")
  20. return yolox_datadir
  21. class DataLoader(torchDataLoader):
  22. """
  23. Lightnet dataloader that enables on the fly resizing of the images.
  24. See :class:`torch.utils.data.DataLoader` for more information on the arguments.
  25. Check more on the following website:
  26. https://gitlab.com/EAVISE/lightnet/-/blob/master/lightnet/data/_dataloading.py
  27. Note:
  28. This dataloader only works with :class:`lightnet.data.Dataset` based datasets.
  29. Example:
  30. >>> class CustomSet(ln.data.Dataset):
  31. ... def __len__(self):
  32. ... return 4
  33. ... @ln.data.Dataset.resize_getitem
  34. ... def __getitem__(self, index):
  35. ... # Should return (image, anno) but here we return (input_dim,)
  36. ... return (self.input_dim,)
  37. >>> dl = ln.data.DataLoader(
  38. ... CustomSet((200,200)),
  39. ... batch_size = 2,
  40. ... collate_fn = ln.data.list_collate # We want the data to be grouped as a list
  41. ... )
  42. >>> dl.dataset.input_dim # Default input_dim
  43. (200, 200)
  44. >>> for d in dl:
  45. ... d
  46. [[(200, 200), (200, 200)]]
  47. [[(200, 200), (200, 200)]]
  48. >>> dl.change_input_dim(320, random_range=None)
  49. (320, 320)
  50. >>> for d in dl:
  51. ... d
  52. [[(320, 320), (320, 320)]]
  53. [[(320, 320), (320, 320)]]
  54. >>> dl.change_input_dim((480, 320), random_range=None)
  55. (480, 320)
  56. >>> for d in dl:
  57. ... d
  58. [[(480, 320), (480, 320)]]
  59. [[(480, 320), (480, 320)]]
  60. """
  61. def __init__(self, *args, **kwargs):
  62. super().__init__(*args, **kwargs)
  63. self.__initialized = False
  64. shuffle = False
  65. batch_sampler = None
  66. if len(args) > 5:
  67. shuffle = args[2]
  68. sampler = args[3]
  69. batch_sampler = args[4]
  70. elif len(args) > 4:
  71. shuffle = args[2]
  72. sampler = args[3]
  73. if "batch_sampler" in kwargs:
  74. batch_sampler = kwargs["batch_sampler"]
  75. elif len(args) > 3:
  76. shuffle = args[2]
  77. if "sampler" in kwargs:
  78. sampler = kwargs["sampler"]
  79. if "batch_sampler" in kwargs:
  80. batch_sampler = kwargs["batch_sampler"]
  81. else:
  82. if "shuffle" in kwargs:
  83. shuffle = kwargs["shuffle"]
  84. if "sampler" in kwargs:
  85. sampler = kwargs["sampler"]
  86. if "batch_sampler" in kwargs:
  87. batch_sampler = kwargs["batch_sampler"]
  88. # Use custom BatchSampler
  89. if batch_sampler is None:
  90. if sampler is None:
  91. if shuffle:
  92. sampler = torch.utils.data.sampler.RandomSampler(self.dataset)
  93. # sampler = torch.utils.data.DistributedSampler(self.dataset)
  94. else:
  95. sampler = torch.utils.data.sampler.SequentialSampler(self.dataset)
  96. batch_sampler = YoloBatchSampler(
  97. sampler,
  98. self.batch_size,
  99. self.drop_last,
  100. input_dimension=self.dataset.input_dim,
  101. )
  102. # batch_sampler = IterationBasedBatchSampler(batch_sampler, num_iterations =
  103. self.batch_sampler = batch_sampler
  104. self.__initialized = True
  105. def close_mosaic(self):
  106. self.batch_sampler.mosaic = False
  107. def change_input_dim(self, multiple=32, random_range=(10, 19)):
  108. """This function will compute a new size and update it on the next mini_batch.
  109. Args:
  110. multiple (int or tuple, optional): values to multiply the randomly generated range by.
  111. Default **32**
  112. random_range (tuple, optional): This (min, max) tuple sets the range
  113. for the randomisation; Default **(10, 19)**
  114. Return:
  115. tuple: width, height tuple with new dimension
  116. Note:
  117. The new size is generated as follows: |br|
  118. First we compute a random integer inside ``[random_range]``.
  119. We then multiply that number with the ``multiple`` argument,
  120. which gives our final new input size. |br|
  121. If ``multiple`` is an integer we generate a square size. If you give a tuple
  122. of **(width, height)**, the size is computed
  123. as :math:`rng * multiple[0], rng * multiple[1]`.
  124. Note:
  125. You can set the ``random_range`` argument to **None** to set
  126. an exact size of multiply. |br|
  127. See the example above for how this works.
  128. """
  129. if random_range is None:
  130. size = 1
  131. else:
  132. size = random.randint(*random_range)
  133. if isinstance(multiple, int):
  134. size = (size * multiple, size * multiple)
  135. else:
  136. size = (size * multiple[0], size * multiple[1])
  137. self.batch_sampler.new_input_dim = size
  138. return size
  139. def list_collate(batch):
  140. """
  141. Function that collates lists or tuples together into one list (of lists/tuples).
  142. Use this as the collate function in a Dataloader, if you want to have a list of
  143. items as an output, as opposed to tensors (eg. Brambox.boxes).
  144. """
  145. items = list(zip(*batch))
  146. for i in range(len(items)):
  147. if isinstance(items[i][0], (list, tuple)):
  148. items[i] = list(items[i])
  149. else:
  150. items[i] = default_collate(items[i])
  151. return items