enet-coco.cfg 12 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072
  1. [net]
  2. # Testing
  3. #batch=1
  4. #subdivisions=1
  5. # Training
  6. batch=64
  7. subdivisions=8
  8. width=416
  9. height=416
  10. channels=3
  11. momentum=0.9
  12. decay=0.0005
  13. angle=0
  14. saturation = 1.5
  15. exposure = 1.5
  16. hue=.1
  17. learning_rate=0.001
  18. burn_in=1000
  19. max_batches = 500200
  20. policy=steps
  21. steps=400000,450000
  22. scales=.1,.1
  23. ### CONV1 - 1 (1)
  24. # conv1
  25. [convolutional]
  26. filters=32
  27. size=3
  28. pad=1
  29. stride=2
  30. batch_normalize=1
  31. activation=swish
  32. ### CONV2 - MBConv1 - 1 (1)
  33. # conv2_1_expand
  34. [convolutional]
  35. filters=32
  36. size=1
  37. stride=1
  38. pad=0
  39. batch_normalize=1
  40. activation=swish
  41. # conv2_1_dwise
  42. [convolutional]
  43. groups=32
  44. filters=32
  45. size=3
  46. stride=1
  47. pad=1
  48. batch_normalize=1
  49. activation=swish
  50. #squeeze-n-excitation
  51. [avgpool]
  52. # squeeze ratio r=4 (recommended r=16)
  53. [convolutional]
  54. filters=8
  55. size=1
  56. stride=1
  57. activation=swish
  58. # excitation
  59. [convolutional]
  60. filters=32
  61. size=1
  62. stride=1
  63. activation=logistic
  64. # multiply channels
  65. [scale_channels]
  66. from=-4
  67. # conv2_1_linear
  68. [convolutional]
  69. filters=16
  70. size=1
  71. stride=1
  72. pad=0
  73. batch_normalize=1
  74. activation=linear
  75. ### CONV3 - MBConv6 - 1 (2)
  76. # conv2_2_expand
  77. [convolutional]
  78. filters=96
  79. size=1
  80. stride=1
  81. pad=0
  82. batch_normalize=1
  83. activation=swish
  84. # conv2_2_dwise
  85. [convolutional]
  86. groups=96
  87. filters=96
  88. size=3
  89. pad=1
  90. stride=2
  91. batch_normalize=1
  92. activation=swish
  93. #squeeze-n-excitation
  94. [avgpool]
  95. # squeeze ratio r=8 (recommended r=16)
  96. [convolutional]
  97. filters=16
  98. size=1
  99. stride=1
  100. activation=swish
  101. # excitation
  102. [convolutional]
  103. filters=96
  104. size=1
  105. stride=1
  106. activation=logistic
  107. # multiply channels
  108. [scale_channels]
  109. from=-4
  110. # conv2_2_linear
  111. [convolutional]
  112. filters=24
  113. size=1
  114. stride=1
  115. pad=0
  116. batch_normalize=1
  117. activation=linear
  118. ### CONV3 - MBConv6 - 2 (2)
  119. # conv3_1_expand
  120. [convolutional]
  121. filters=144
  122. size=1
  123. stride=1
  124. pad=0
  125. batch_normalize=1
  126. activation=swish
  127. # conv3_1_dwise
  128. [convolutional]
  129. groups=144
  130. filters=144
  131. size=3
  132. stride=1
  133. pad=1
  134. batch_normalize=1
  135. activation=swish
  136. #squeeze-n-excitation
  137. [avgpool]
  138. # squeeze ratio r=16 (recommended r=16)
  139. [convolutional]
  140. filters=8
  141. size=1
  142. stride=1
  143. activation=swish
  144. # excitation
  145. [convolutional]
  146. filters=144
  147. size=1
  148. stride=1
  149. activation=logistic
  150. # multiply channels
  151. [scale_channels]
  152. from=-4
  153. # conv3_1_linear
  154. [convolutional]
  155. filters=24
  156. size=1
  157. stride=1
  158. pad=0
  159. batch_normalize=1
  160. activation=linear
  161. ### CONV4 - MBConv6 - 1 (2)
  162. # dropout only before residual connection
  163. [dropout]
  164. probability=.0
  165. # block_3_1
  166. [shortcut]
  167. from=-9
  168. activation=linear
  169. # conv_3_2_expand
  170. [convolutional]
  171. filters=144
  172. size=1
  173. stride=1
  174. pad=0
  175. batch_normalize=1
  176. activation=swish
  177. # conv_3_2_dwise
  178. [convolutional]
  179. groups=144
  180. filters=144
  181. size=5
  182. pad=1
  183. stride=2
  184. batch_normalize=1
  185. activation=swish
  186. #squeeze-n-excitation
  187. [avgpool]
  188. # squeeze ratio r=16 (recommended r=16)
  189. [convolutional]
  190. filters=8
  191. size=1
  192. stride=1
  193. activation=swish
  194. # excitation
  195. [convolutional]
  196. filters=144
  197. size=1
  198. stride=1
  199. activation=logistic
  200. # multiply channels
  201. [scale_channels]
  202. from=-4
  203. # conv_3_2_linear
  204. [convolutional]
  205. filters=40
  206. size=1
  207. stride=1
  208. pad=0
  209. batch_normalize=1
  210. activation=linear
  211. ### CONV4 - MBConv6 - 2 (2)
  212. # conv_4_1_expand
  213. [convolutional]
  214. filters=192
  215. size=1
  216. stride=1
  217. pad=0
  218. batch_normalize=1
  219. activation=swish
  220. # conv_4_1_dwise
  221. [convolutional]
  222. groups=192
  223. filters=192
  224. size=5
  225. stride=1
  226. pad=1
  227. batch_normalize=1
  228. activation=swish
  229. #squeeze-n-excitation
  230. [avgpool]
  231. # squeeze ratio r=16 (recommended r=16)
  232. [convolutional]
  233. filters=16
  234. size=1
  235. stride=1
  236. activation=swish
  237. # excitation
  238. [convolutional]
  239. filters=192
  240. size=1
  241. stride=1
  242. activation=logistic
  243. # multiply channels
  244. [scale_channels]
  245. from=-4
  246. # conv_4_1_linear
  247. [convolutional]
  248. filters=40
  249. size=1
  250. stride=1
  251. pad=0
  252. batch_normalize=1
  253. activation=linear
  254. ### CONV5 - MBConv6 - 1 (3)
  255. # dropout only before residual connection
  256. [dropout]
  257. probability=.0
  258. # block_4_2
  259. [shortcut]
  260. from=-9
  261. activation=linear
  262. # conv_4_3_expand
  263. [convolutional]
  264. filters=192
  265. size=1
  266. stride=1
  267. pad=0
  268. batch_normalize=1
  269. activation=swish
  270. # conv_4_3_dwise
  271. [convolutional]
  272. groups=192
  273. filters=192
  274. size=3
  275. stride=1
  276. pad=1
  277. batch_normalize=1
  278. activation=swish
  279. #squeeze-n-excitation
  280. [avgpool]
  281. # squeeze ratio r=16 (recommended r=16)
  282. [convolutional]
  283. filters=16
  284. size=1
  285. stride=1
  286. activation=swish
  287. # excitation
  288. [convolutional]
  289. filters=192
  290. size=1
  291. stride=1
  292. activation=logistic
  293. # multiply channels
  294. [scale_channels]
  295. from=-4
  296. # conv_4_3_linear
  297. [convolutional]
  298. filters=80
  299. size=1
  300. stride=1
  301. pad=0
  302. batch_normalize=1
  303. activation=linear
  304. ### CONV5 - MBConv6 - 2 (3)
  305. # conv_4_4_expand
  306. [convolutional]
  307. filters=384
  308. size=1
  309. stride=1
  310. pad=0
  311. batch_normalize=1
  312. activation=swish
  313. # conv_4_4_dwise
  314. [convolutional]
  315. groups=384
  316. filters=384
  317. size=3
  318. stride=1
  319. pad=1
  320. batch_normalize=1
  321. activation=swish
  322. #squeeze-n-excitation
  323. [avgpool]
  324. # squeeze ratio r=16 (recommended r=16)
  325. [convolutional]
  326. filters=24
  327. size=1
  328. stride=1
  329. activation=swish
  330. # excitation
  331. [convolutional]
  332. filters=384
  333. size=1
  334. stride=1
  335. activation=logistic
  336. # multiply channels
  337. [scale_channels]
  338. from=-4
  339. # conv_4_4_linear
  340. [convolutional]
  341. filters=80
  342. size=1
  343. stride=1
  344. pad=0
  345. batch_normalize=1
  346. activation=linear
  347. ### CONV5 - MBConv6 - 3 (3)
  348. # dropout only before residual connection
  349. [dropout]
  350. probability=.0
  351. # block_4_4
  352. [shortcut]
  353. from=-9
  354. activation=linear
  355. # conv_4_5_expand
  356. [convolutional]
  357. filters=384
  358. size=1
  359. stride=1
  360. pad=0
  361. batch_normalize=1
  362. activation=swish
  363. # conv_4_5_dwise
  364. [convolutional]
  365. groups=384
  366. filters=384
  367. size=3
  368. stride=1
  369. pad=1
  370. batch_normalize=1
  371. activation=swish
  372. #squeeze-n-excitation
  373. [avgpool]
  374. # squeeze ratio r=16 (recommended r=16)
  375. [convolutional]
  376. filters=24
  377. size=1
  378. stride=1
  379. activation=swish
  380. # excitation
  381. [convolutional]
  382. filters=384
  383. size=1
  384. stride=1
  385. activation=logistic
  386. # multiply channels
  387. [scale_channels]
  388. from=-4
  389. # conv_4_5_linear
  390. [convolutional]
  391. filters=80
  392. size=1
  393. stride=1
  394. pad=0
  395. batch_normalize=1
  396. activation=linear
  397. ### CONV6 - MBConv6 - 1 (3)
  398. # dropout only before residual connection
  399. [dropout]
  400. probability=.0
  401. # block_4_6
  402. [shortcut]
  403. from=-9
  404. activation=linear
  405. # conv_4_7_expand
  406. [convolutional]
  407. filters=384
  408. size=1
  409. stride=1
  410. pad=0
  411. batch_normalize=1
  412. activation=swish
  413. # conv_4_7_dwise
  414. [convolutional]
  415. groups=384
  416. filters=384
  417. size=5
  418. pad=1
  419. stride=2
  420. batch_normalize=1
  421. activation=swish
  422. #squeeze-n-excitation
  423. [avgpool]
  424. # squeeze ratio r=16 (recommended r=16)
  425. [convolutional]
  426. filters=24
  427. size=1
  428. stride=1
  429. activation=swish
  430. # excitation
  431. [convolutional]
  432. filters=384
  433. size=1
  434. stride=1
  435. activation=logistic
  436. # multiply channels
  437. [scale_channels]
  438. from=-4
  439. # conv_4_7_linear
  440. [convolutional]
  441. filters=112
  442. size=1
  443. stride=1
  444. pad=0
  445. batch_normalize=1
  446. activation=linear
  447. ### CONV6 - MBConv6 - 2 (3)
  448. # conv_5_1_expand
  449. [convolutional]
  450. filters=576
  451. size=1
  452. stride=1
  453. pad=0
  454. batch_normalize=1
  455. activation=swish
  456. # conv_5_1_dwise
  457. [convolutional]
  458. groups=576
  459. filters=576
  460. size=5
  461. stride=1
  462. pad=1
  463. batch_normalize=1
  464. activation=swish
  465. #squeeze-n-excitation
  466. [avgpool]
  467. # squeeze ratio r=16 (recommended r=16)
  468. [convolutional]
  469. filters=32
  470. size=1
  471. stride=1
  472. activation=swish
  473. # excitation
  474. [convolutional]
  475. filters=576
  476. size=1
  477. stride=1
  478. activation=logistic
  479. # multiply channels
  480. [scale_channels]
  481. from=-4
  482. # conv_5_1_linear
  483. [convolutional]
  484. filters=112
  485. size=1
  486. stride=1
  487. pad=0
  488. batch_normalize=1
  489. activation=linear
  490. ### CONV6 - MBConv6 - 3 (3)
  491. # dropout only before residual connection
  492. [dropout]
  493. probability=.0
  494. # block_5_1
  495. [shortcut]
  496. from=-9
  497. activation=linear
  498. # conv_5_2_expand
  499. [convolutional]
  500. filters=576
  501. size=1
  502. stride=1
  503. pad=0
  504. batch_normalize=1
  505. activation=swish
  506. # conv_5_2_dwise
  507. [convolutional]
  508. groups=576
  509. filters=576
  510. size=5
  511. stride=1
  512. pad=1
  513. batch_normalize=1
  514. activation=swish
  515. #squeeze-n-excitation
  516. [avgpool]
  517. # squeeze ratio r=16 (recommended r=16)
  518. [convolutional]
  519. filters=32
  520. size=1
  521. stride=1
  522. activation=swish
  523. # excitation
  524. [convolutional]
  525. filters=576
  526. size=1
  527. stride=1
  528. activation=logistic
  529. # multiply channels
  530. [scale_channels]
  531. from=-4
  532. # conv_5_2_linear
  533. [convolutional]
  534. filters=112
  535. size=1
  536. stride=1
  537. pad=0
  538. batch_normalize=1
  539. activation=linear
  540. ### CONV7 - MBConv6 - 1 (4)
  541. # dropout only before residual connection
  542. [dropout]
  543. probability=.0
  544. # block_5_2
  545. [shortcut]
  546. from=-9
  547. activation=linear
  548. # conv_5_3_expand
  549. [convolutional]
  550. filters=576
  551. size=1
  552. stride=1
  553. pad=0
  554. batch_normalize=1
  555. activation=swish
  556. # conv_5_3_dwise
  557. [convolutional]
  558. groups=576
  559. filters=576
  560. size=5
  561. pad=1
  562. stride=2
  563. batch_normalize=1
  564. activation=swish
  565. #squeeze-n-excitation
  566. [avgpool]
  567. # squeeze ratio r=16 (recommended r=16)
  568. [convolutional]
  569. filters=32
  570. size=1
  571. stride=1
  572. activation=swish
  573. # excitation
  574. [convolutional]
  575. filters=576
  576. size=1
  577. stride=1
  578. activation=logistic
  579. # multiply channels
  580. [scale_channels]
  581. from=-4
  582. # conv_5_3_linear
  583. [convolutional]
  584. filters=192
  585. size=1
  586. stride=1
  587. pad=0
  588. batch_normalize=1
  589. activation=linear
  590. ### CONV7 - MBConv6 - 2 (4)
  591. # conv_6_1_expand
  592. [convolutional]
  593. filters=960
  594. size=1
  595. stride=1
  596. pad=0
  597. batch_normalize=1
  598. activation=swish
  599. # conv_6_1_dwise
  600. [convolutional]
  601. groups=960
  602. filters=960
  603. size=5
  604. stride=1
  605. pad=1
  606. batch_normalize=1
  607. activation=swish
  608. #squeeze-n-excitation
  609. [avgpool]
  610. # squeeze ratio r=16 (recommended r=16)
  611. [convolutional]
  612. filters=64
  613. size=1
  614. stride=1
  615. activation=swish
  616. # excitation
  617. [convolutional]
  618. filters=960
  619. size=1
  620. stride=1
  621. activation=logistic
  622. # multiply channels
  623. [scale_channels]
  624. from=-4
  625. # conv_6_1_linear
  626. [convolutional]
  627. filters=192
  628. size=1
  629. stride=1
  630. pad=0
  631. batch_normalize=1
  632. activation=linear
  633. ### CONV7 - MBConv6 - 3 (4)
  634. # dropout only before residual connection
  635. [dropout]
  636. probability=.0
  637. # block_6_1
  638. [shortcut]
  639. from=-9
  640. activation=linear
  641. # conv_6_2_expand
  642. [convolutional]
  643. filters=960
  644. size=1
  645. stride=1
  646. pad=0
  647. batch_normalize=1
  648. activation=swish
  649. # conv_6_2_dwise
  650. [convolutional]
  651. groups=960
  652. filters=960
  653. size=5
  654. stride=1
  655. pad=1
  656. batch_normalize=1
  657. activation=swish
  658. #squeeze-n-excitation
  659. [avgpool]
  660. # squeeze ratio r=16 (recommended r=16)
  661. [convolutional]
  662. filters=64
  663. size=1
  664. stride=1
  665. activation=swish
  666. # excitation
  667. [convolutional]
  668. filters=960
  669. size=1
  670. stride=1
  671. activation=logistic
  672. # multiply channels
  673. [scale_channels]
  674. from=-4
  675. # conv_6_2_linear
  676. [convolutional]
  677. filters=192
  678. size=1
  679. stride=1
  680. pad=0
  681. batch_normalize=1
  682. activation=linear
  683. ### CONV7 - MBConv6 - 4 (4)
  684. # dropout only before residual connection
  685. [dropout]
  686. probability=.0
  687. # block_6_1
  688. [shortcut]
  689. from=-9
  690. activation=linear
  691. # conv_6_2_expand
  692. [convolutional]
  693. filters=960
  694. size=1
  695. stride=1
  696. pad=0
  697. batch_normalize=1
  698. activation=swish
  699. # conv_6_2_dwise
  700. [convolutional]
  701. groups=960
  702. filters=960
  703. size=5
  704. stride=1
  705. pad=1
  706. batch_normalize=1
  707. activation=swish
  708. #squeeze-n-excitation
  709. [avgpool]
  710. # squeeze ratio r=16 (recommended r=16)
  711. [convolutional]
  712. filters=64
  713. size=1
  714. stride=1
  715. activation=swish
  716. # excitation
  717. [convolutional]
  718. filters=960
  719. size=1
  720. stride=1
  721. activation=logistic
  722. # multiply channels
  723. [scale_channels]
  724. from=-4
  725. # conv_6_2_linear
  726. [convolutional]
  727. filters=192
  728. size=1
  729. stride=1
  730. pad=0
  731. batch_normalize=1
  732. activation=linear
  733. ### CONV8 - MBConv6 - 1 (1)
  734. # dropout only before residual connection
  735. [dropout]
  736. probability=.0
  737. # block_6_2
  738. [shortcut]
  739. from=-9
  740. activation=linear
  741. # conv_6_3_expand
  742. [convolutional]
  743. filters=960
  744. size=1
  745. stride=1
  746. pad=0
  747. batch_normalize=1
  748. activation=swish
  749. # conv_6_3_dwise
  750. [convolutional]
  751. groups=960
  752. filters=960
  753. size=3
  754. stride=1
  755. pad=1
  756. batch_normalize=1
  757. activation=swish
  758. #squeeze-n-excitation
  759. [avgpool]
  760. # squeeze ratio r=16 (recommended r=16)
  761. [convolutional]
  762. filters=64
  763. size=1
  764. stride=1
  765. activation=swish
  766. # excitation
  767. [convolutional]
  768. filters=960
  769. size=1
  770. stride=1
  771. activation=logistic
  772. # multiply channels
  773. [scale_channels]
  774. from=-4
  775. # conv_6_3_linear
  776. [convolutional]
  777. filters=320
  778. size=1
  779. stride=1
  780. pad=0
  781. batch_normalize=1
  782. activation=linear
  783. ### CONV9 - Conv2d 1x1
  784. # conv_6_4
  785. [convolutional]
  786. filters=1280
  787. size=1
  788. stride=1
  789. pad=0
  790. batch_normalize=1
  791. activation=swish
  792. ##########################
  793. [convolutional]
  794. batch_normalize=1
  795. filters=256
  796. size=1
  797. stride=1
  798. pad=1
  799. activation=leaky
  800. [convolutional]
  801. batch_normalize=1
  802. filters=256
  803. size=3
  804. stride=1
  805. pad=1
  806. activation=leaky
  807. [shortcut]
  808. activation=leaky
  809. from=-2
  810. [convolutional]
  811. size=1
  812. stride=1
  813. pad=1
  814. filters=255
  815. activation=linear
  816. [yolo]
  817. mask = 3,4,5
  818. anchors = 10,14, 23,27, 37,58, 81,82, 135,169, 344,319
  819. classes=80
  820. num=6
  821. jitter=.3
  822. ignore_thresh = .7
  823. truth_thresh = 1
  824. random=0
  825. [route]
  826. layers = -4
  827. [convolutional]
  828. batch_normalize=1
  829. filters=128
  830. size=1
  831. stride=1
  832. pad=1
  833. activation=leaky
  834. [upsample]
  835. stride=2
  836. [shortcut]
  837. activation=leaky
  838. from=90
  839. [convolutional]
  840. batch_normalize=1
  841. filters=128
  842. size=3
  843. stride=1
  844. pad=1
  845. activation=leaky
  846. [shortcut]
  847. activation=leaky
  848. from=-3
  849. [shortcut]
  850. activation=leaky
  851. from=90
  852. [convolutional]
  853. size=1
  854. stride=1
  855. pad=1
  856. filters=255
  857. activation=linear
  858. [yolo]
  859. mask = 1,2,3
  860. anchors = 10,14, 23,27, 37,58, 81,82, 135,169, 344,319
  861. classes=80
  862. num=6
  863. jitter=.3
  864. ignore_thresh = .7
  865. truth_thresh = 1
  866. random=0