lstm.train.cfg 364 B

1234567891011121314151617181920212223242526272829303132333435
  1. [net]
  2. subdivisions=8
  3. inputs=256
  4. batch = 128
  5. momentum=0.9
  6. decay=0.001
  7. max_batches = 2000
  8. time_steps=576
  9. learning_rate=0.5
  10. policy=steps
  11. burn_in=10
  12. steps=1000,1500
  13. scales=.1,.1
  14. [lstm]
  15. batch_normalize=1
  16. output = 1024
  17. [lstm]
  18. batch_normalize=1
  19. output = 1024
  20. [lstm]
  21. batch_normalize=1
  22. output = 1024
  23. [connected]
  24. output=256
  25. activation=leaky
  26. [softmax]
  27. [cost]
  28. type=sse