CharacterRecognization.prototxt 1.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. input: "data"
  2. input_dim: 1
  3. input_dim: 1
  4. input_dim: 30
  5. input_dim: 14
  6. layer {
  7. name: "conv2d_1"
  8. type: "Convolution"
  9. bottom: "data"
  10. top: "conv2d_1"
  11. convolution_param {
  12. num_output: 32
  13. bias_term: true
  14. pad: 0
  15. kernel_size: 3
  16. stride: 1
  17. }
  18. }
  19. layer {
  20. name: "activation_1"
  21. type: "ReLU"
  22. bottom: "conv2d_1"
  23. top: "activation_1"
  24. }
  25. layer {
  26. name: "max_pooling2d_1"
  27. type: "Pooling"
  28. bottom: "activation_1"
  29. top: "max_pooling2d_1"
  30. pooling_param {
  31. pool: MAX
  32. kernel_size: 2
  33. stride: 2
  34. pad: 0
  35. }
  36. }
  37. layer {
  38. name: "conv2d_2"
  39. type: "Convolution"
  40. bottom: "max_pooling2d_1"
  41. top: "conv2d_2"
  42. convolution_param {
  43. num_output: 64
  44. bias_term: true
  45. pad: 0
  46. kernel_size: 3
  47. stride: 1
  48. }
  49. }
  50. layer {
  51. name: "activation_2"
  52. type: "ReLU"
  53. bottom: "conv2d_2"
  54. top: "activation_2"
  55. }
  56. layer {
  57. name: "max_pooling2d_2"
  58. type: "Pooling"
  59. bottom: "activation_2"
  60. top: "max_pooling2d_2"
  61. pooling_param {
  62. pool: MAX
  63. kernel_size: 2
  64. stride: 2
  65. pad: 0
  66. }
  67. }
  68. layer {
  69. name: "conv2d_3"
  70. type: "Convolution"
  71. bottom: "max_pooling2d_2"
  72. top: "conv2d_3"
  73. convolution_param {
  74. num_output: 128
  75. bias_term: true
  76. pad: 0
  77. kernel_size: 2
  78. stride: 1
  79. }
  80. }
  81. layer {
  82. name: "activation_3"
  83. type: "ReLU"
  84. bottom: "conv2d_3"
  85. top: "activation_3"
  86. }
  87. layer {
  88. name: "flatten_1"
  89. type: "Flatten"
  90. bottom: "activation_3"
  91. top: "flatten_1"
  92. }
  93. layer {
  94. name: "dense_1"
  95. type: "InnerProduct"
  96. bottom: "flatten_1"
  97. top: "dense_1"
  98. inner_product_param {
  99. num_output: 256
  100. }
  101. }
  102. layer {
  103. name: "relu2"
  104. type: "ReLU"
  105. bottom: "dense_1"
  106. top: "relu2"
  107. }
  108. layer {
  109. name: "dense2"
  110. type: "InnerProduct"
  111. bottom: "relu2"
  112. top: "dense2"
  113. inner_product_param {
  114. num_output: 65
  115. }
  116. }
  117. layer {
  118. name: "prob"
  119. type: "Softmax"
  120. bottom: "dense2"
  121. top: "prob"
  122. }