Python* API Reference for Intel® Data Analytics Acceleration Library 2020 Update 1

spat_stoch_pool2d_layer_dense_batch.py

1 # file: spat_stoch_pool2d_layer_dense_batch.py
2 #===============================================================================
3 # Copyright 2014-2020 Intel Corporation
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16 #===============================================================================
17 
18 #
19 # ! Content:
20 # ! Python example of neural network forward and backward two-dimensional spatial pyramid stochastic pooling layers usage
21 # !
22 # !*****************************************************************************
23 
24 #
25 
26 
27 #
28 
29 import os
30 import sys
31 
32 import numpy as np
33 
34 from daal.algorithms.neural_networks import layers
35 from daal.algorithms.neural_networks.layers import spatial_stochastic_pooling2d
36 from daal.data_management import HomogenTensor
37 
38 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
39 if utils_folder not in sys.path:
40  sys.path.insert(0, utils_folder)
41 from utils import printTensor
42 
43 nDim = 4
44 dims = [2, 3, 2, 4]
45 dataArray = np.array([[[[1, 2, 3, 4],
46  [5, 6, 7, 8]],
47  [[9, 10, 11, 12],
48  [13, 14, 15, 16]],
49  [[17, 18, 19, 20],
50  [21, 22, 23, 24]]],
51  [[[10, 20, 30, 40],
52  [50, 60, 70, 80]],
53  [[90, 100, 110, 120],
54  [130, 140, 150, 160]],
55  [[170, 180, 190, 200],
56  [210, 220, 230, 240]]]],
57  dtype=np.float64)
58 
59 if __name__ == "__main__":
60  data = HomogenTensor(dataArray)
61 
62  printTensor(data, "Forward two-dimensional spatial pyramid stochastic pooling layer input (first 10 rows):", 10)
63 
64  # Create an algorithm to compute forward two-dimensional spatial pyramid stochastic pooling layer results using default method
65  forwardLayer = spatial_stochastic_pooling2d.forward.Batch(2, nDim)
66  forwardLayer.input.setInput(layers.forward.data, data)
67 
68  # Compute forward two-dimensional spatial pyramid stochastic pooling layer results
69  forwardLayer.compute()
70 
71  # Get the computed forward two-dimensional spatial pyramid stochastic pooling layer results
72  forwardResult = forwardLayer.getResult()
73 
74  printTensor(forwardResult.getResult(layers.forward.value), "Forward two-dimensional spatial pyramid stochastic pooling layer result (first 5 rows):", 5)
75  printTensor(forwardResult.getLayerData(layers.spatial_stochastic_pooling2d.auxSelectedIndices),
76  "Forward two-dimensional spatial pyramid stochastic pooling layer selected indices (first 10 rows):", 10)
77 
78  # Create an algorithm to compute backward two-dimensional spatial pyramid stochastic pooling layer results using default method
79  backwardLayer = layers.spatial_stochastic_pooling2d.backward.Batch(2, nDim)
80  backwardLayer.input.setInput(layers.backward.inputGradient, forwardResult.getResult(layers.forward.value))
81  backwardLayer.input.setInputLayerData(layers.backward.inputFromForward, forwardResult.getResultLayerData(layers.forward.resultForBackward))
82 
83  # Compute backward two-dimensional spatial pyramid stochastic pooling layer results
84  backwardLayer.compute()
85 
86  # Get the computed backward two-dimensional spatial pyramid stochastic pooling layer results
87  backwardResult = backwardLayer.getResult()
88 
89  printTensor(backwardResult.getResult(layers.backward.gradient),
90  "Backward two-dimensional spatial pyramid stochastic pooling layer result (first 10 rows):", 10)

For more complete information about compiler optimizations, see our Optimization Notice.