Repository for M.A.I.L system's analysis server.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294
  1. [net]
  2. # Testing
  3. #batch=1
  4. #subdivisions=1
  5. # Training
  6. batch=64
  7. subdivisions=16
  8. width=1920
  9. height=1056
  10. channels=3
  11. momentum=0.9
  12. decay=0.0005
  13. angle=0
  14. saturation = 1.5
  15. exposure = 1.5
  16. hue=.1
  17. learning_rate=0.00261
  18. burn_in=1000
  19. max_batches = 100000
  20. policy=steps
  21. steps=1600,1800
  22. scales=.1,.1
  23. #weights_reject_freq=1001
  24. #ema_alpha=0.9998
  25. #equidistant_point=1000
  26. #num_sigmas_reject_badlabels=3
  27. #badlabels_rejection_percentage=0.2
  28. [convolutional]
  29. batch_normalize=1
  30. filters=32
  31. size=3
  32. stride=2
  33. pad=1
  34. activation=leaky
  35. [convolutional]
  36. batch_normalize=1
  37. filters=64
  38. size=3
  39. stride=2
  40. pad=1
  41. activation=leaky
  42. [convolutional]
  43. batch_normalize=1
  44. filters=64
  45. size=3
  46. stride=1
  47. pad=1
  48. activation=leaky
  49. [route]
  50. layers=-1
  51. groups=2
  52. group_id=1
  53. [convolutional]
  54. batch_normalize=1
  55. filters=32
  56. size=3
  57. stride=1
  58. pad=1
  59. activation=leaky
  60. [convolutional]
  61. batch_normalize=1
  62. filters=32
  63. size=3
  64. stride=1
  65. pad=1
  66. activation=leaky
  67. [route]
  68. layers = -1,-2
  69. [convolutional]
  70. batch_normalize=1
  71. filters=64
  72. size=1
  73. stride=1
  74. pad=1
  75. activation=leaky
  76. [route]
  77. layers = -6,-1
  78. [maxpool]
  79. size=2
  80. stride=2
  81. [convolutional]
  82. batch_normalize=1
  83. filters=128
  84. size=3
  85. stride=1
  86. pad=1
  87. activation=leaky
  88. [route]
  89. layers=-1
  90. groups=2
  91. group_id=1
  92. [convolutional]
  93. batch_normalize=1
  94. filters=64
  95. size=3
  96. stride=1
  97. pad=1
  98. activation=leaky
  99. [convolutional]
  100. batch_normalize=1
  101. filters=64
  102. size=3
  103. stride=1
  104. pad=1
  105. activation=leaky
  106. [route]
  107. layers = -1,-2
  108. [convolutional]
  109. batch_normalize=1
  110. filters=128
  111. size=1
  112. stride=1
  113. pad=1
  114. activation=leaky
  115. [route]
  116. layers = -6,-1
  117. [maxpool]
  118. size=2
  119. stride=2
  120. [convolutional]
  121. batch_normalize=1
  122. filters=256
  123. size=3
  124. stride=1
  125. pad=1
  126. activation=leaky
  127. [route]
  128. layers=-1
  129. groups=2
  130. group_id=1
  131. [convolutional]
  132. batch_normalize=1
  133. filters=128
  134. size=3
  135. stride=1
  136. pad=1
  137. activation=leaky
  138. [convolutional]
  139. batch_normalize=1
  140. filters=128
  141. size=3
  142. stride=1
  143. pad=1
  144. activation=leaky
  145. [route]
  146. layers = -1,-2
  147. [convolutional]
  148. batch_normalize=1
  149. filters=256
  150. size=1
  151. stride=1
  152. pad=1
  153. activation=leaky
  154. [route]
  155. layers = -6,-1
  156. [maxpool]
  157. size=2
  158. stride=2
  159. [convolutional]
  160. batch_normalize=1
  161. filters=512
  162. size=3
  163. stride=1
  164. pad=1
  165. activation=leaky
  166. ##################################
  167. [convolutional]
  168. batch_normalize=1
  169. filters=256
  170. size=1
  171. stride=1
  172. pad=1
  173. activation=leaky
  174. [convolutional]
  175. batch_normalize=1
  176. filters=512
  177. size=3
  178. stride=1
  179. pad=1
  180. activation=leaky
  181. [convolutional]
  182. size=1
  183. stride=1
  184. pad=1
  185. filters=18
  186. activation=linear
  187. [yolo]
  188. mask = 3,4,5
  189. anchors = 10,14, 23,27, 37,58, 81,82, 135,169, 344,319
  190. classes=1
  191. num=6
  192. jitter=.3
  193. scale_x_y = 1.05
  194. cls_normalizer=1.0
  195. iou_normalizer=0.07
  196. iou_loss=ciou
  197. ignore_thresh = .7
  198. truth_thresh = 1
  199. random=0
  200. resize=1.5
  201. nms_kind=greedynms
  202. beta_nms=0.6
  203. #new_coords=1
  204. #scale_x_y = 2.0
  205. [route]
  206. layers = -4
  207. [convolutional]
  208. batch_normalize=1
  209. filters=128
  210. size=1
  211. stride=1
  212. pad=1
  213. activation=leaky
  214. [upsample]
  215. stride=2
  216. [route]
  217. layers = -1, 23
  218. [convolutional]
  219. batch_normalize=1
  220. filters=256
  221. size=3
  222. stride=1
  223. pad=1
  224. activation=leaky
  225. [convolutional]
  226. size=1
  227. stride=1
  228. pad=1
  229. filters=18
  230. activation=linear
  231. [yolo]
  232. mask = 1,2,3
  233. anchors = 10,14, 23,27, 37,58, 81,82, 135,169, 344,319
  234. classes=1
  235. num=6
  236. jitter=.3
  237. scale_x_y = 1.05
  238. cls_normalizer=1.0
  239. iou_normalizer=0.07
  240. iou_loss=ciou
  241. ignore_thresh = .7
  242. truth_thresh = 1
  243. random=0
  244. resize=1.5
  245. nms_kind=greedynms
  246. beta_nms=0.6
  247. #new_coords=1
  248. #scale_x_y = 2.0