HorizonalFinemapping.prototxt 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495
  1. input: "data"
  2. input_dim: 1
  3. input_dim: 3
  4. input_dim: 16
  5. input_dim: 66
  6. layer {
  7. name: "conv1"
  8. type: "Convolution"
  9. bottom: "data"
  10. top: "conv1"
  11. convolution_param {
  12. num_output: 10
  13. bias_term: true
  14. pad: 0
  15. kernel_size: 3
  16. stride: 1
  17. }
  18. }
  19. layer {
  20. name: "relu1"
  21. type: "ReLU"
  22. bottom: "conv1"
  23. top: "conv1"
  24. }
  25. layer {
  26. name: "max_pooling2d_3"
  27. type: "Pooling"
  28. bottom: "conv1"
  29. top: "max_pooling2d_3"
  30. pooling_param {
  31. pool: MAX
  32. kernel_size: 2
  33. stride: 2
  34. pad: 0
  35. }
  36. }
  37. layer {
  38. name: "conv2"
  39. type: "Convolution"
  40. bottom: "max_pooling2d_3"
  41. top: "conv2"
  42. convolution_param {
  43. num_output: 16
  44. bias_term: true
  45. pad: 0
  46. kernel_size: 3
  47. stride: 1
  48. }
  49. }
  50. layer {
  51. name: "relu2"
  52. type: "ReLU"
  53. bottom: "conv2"
  54. top: "conv2"
  55. }
  56. layer {
  57. name: "conv3"
  58. type: "Convolution"
  59. bottom: "conv2"
  60. top: "conv3"
  61. convolution_param {
  62. num_output: 32
  63. bias_term: true
  64. pad: 0
  65. kernel_size: 3
  66. stride: 1
  67. }
  68. }
  69. layer {
  70. name: "relu3"
  71. type: "ReLU"
  72. bottom: "conv3"
  73. top: "conv3"
  74. }
  75. layer {
  76. name: "flatten_2"
  77. type: "Flatten"
  78. bottom: "conv3"
  79. top: "flatten_2"
  80. }
  81. layer {
  82. name: "dense"
  83. type: "InnerProduct"
  84. bottom: "flatten_2"
  85. top: "dense"
  86. inner_product_param {
  87. num_output: 2
  88. }
  89. }
  90. layer {
  91. name: "relu4"
  92. type: "ReLU"
  93. bottom: "dense"
  94. top: "dense"
  95. }