Segmentation.prototxt 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. input: "data"
  2. input_dim: 1
  3. input_dim: 1
  4. input_dim: 22
  5. input_dim: 22
  6. layer {
  7. name: "conv2d_12"
  8. type: "Convolution"
  9. bottom: "data"
  10. top: "conv2d_12"
  11. convolution_param {
  12. num_output: 16
  13. bias_term: true
  14. pad: 0
  15. kernel_size: 3
  16. stride: 1
  17. }
  18. }
  19. layer {
  20. name: "activation_18"
  21. type: "ReLU"
  22. bottom: "conv2d_12"
  23. top: "activation_18"
  24. }
  25. layer {
  26. name: "max_pooling2d_10"
  27. type: "Pooling"
  28. bottom: "activation_18"
  29. top: "max_pooling2d_10"
  30. pooling_param {
  31. pool: MAX
  32. kernel_size: 2
  33. stride: 2
  34. pad: 0
  35. }
  36. }
  37. layer {
  38. name: "conv2d_13"
  39. type: "Convolution"
  40. bottom: "max_pooling2d_10"
  41. top: "conv2d_13"
  42. convolution_param {
  43. num_output: 16
  44. bias_term: true
  45. pad: 0
  46. kernel_size: 3
  47. stride: 1
  48. }
  49. }
  50. layer {
  51. name: "activation_19"
  52. type: "ReLU"
  53. bottom: "conv2d_13"
  54. top: "activation_19"
  55. }
  56. layer {
  57. name: "max_pooling2d_11"
  58. type: "Pooling"
  59. bottom: "activation_19"
  60. top: "max_pooling2d_11"
  61. pooling_param {
  62. pool: MAX
  63. kernel_size: 2
  64. stride: 2
  65. pad: 0
  66. }
  67. }
  68. layer {
  69. name: "flatten_6"
  70. type: "Flatten"
  71. bottom: "max_pooling2d_11"
  72. top: "flatten_6"
  73. }
  74. layer {
  75. name: "dense_9"
  76. type: "InnerProduct"
  77. bottom: "flatten_6"
  78. top: "dense_9"
  79. inner_product_param {
  80. num_output: 256
  81. }
  82. }
  83. layer {
  84. name: "dropout_9"
  85. type: "Dropout"
  86. bottom: "dense_9"
  87. top: "dropout_9"
  88. dropout_param {
  89. dropout_ratio: 0.5
  90. }
  91. }
  92. layer {
  93. name: "activation_20"
  94. type: "ReLU"
  95. bottom: "dropout_9"
  96. top: "activation_20"
  97. }
  98. layer {
  99. name: "dense_10"
  100. type: "InnerProduct"
  101. bottom: "activation_20"
  102. top: "dense_10"
  103. inner_product_param {
  104. num_output: 3
  105. }
  106. }
  107. layer {
  108. name: "prob"
  109. type: "Softmax"
  110. bottom: "dense_10"
  111. top: "prob"
  112. }