Python* API Reference for Intel® Data Analytics Acceleration Library 2020 Update 1

gbt_cls_dense_batch.py

1 # file: gbt_cls_dense_batch.py
2 #===============================================================================
3 # Copyright 2014-2020 Intel Corporation
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16 #===============================================================================
17 
18 
19 
20 
21 import os
22 import sys
23 
24 from daal.algorithms import gbt
25 from daal.algorithms.gbt.classification import prediction, training
26 from daal.algorithms import classifier
27 from daal.data_management import (
28  FileDataSource, DataSourceIface, NumericTableIface, HomogenNumericTable,
29  MergedNumericTable, features
30 )
31 
32 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
33 if utils_folder not in sys.path:
34  sys.path.insert(0, utils_folder)
35 from utils import printNumericTable, printNumericTables
36 
37 DAAL_PREFIX = os.path.join('..', 'data')
38 
39 # Input data set parameters
40 trainDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'df_classification_train.csv')
41 testDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'df_classification_test.csv')
42 
43 nFeatures = 3
44 nClasses = 5
45 
46 # Gradient boosted trees parameters
47 maxIterations = 40
48 minObservationsInLeafNode = 8
49 
50 # Model object for the gradient boosted trees classification algorithm
51 model = None
52 predictionResult = None
53 testGroundTruth = None
54 
55 
56 def trainModel():
57  global model
58 
59  # Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file
60  trainDataSource = FileDataSource(
61  trainDatasetFileName,
62  DataSourceIface.notAllocateNumericTable,
63  DataSourceIface.doDictionaryFromContext
64  )
65 
66  # Create Numeric Tables for training data and labels
67  trainData = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
68  trainGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
69  mergedData = MergedNumericTable(trainData, trainGroundTruth)
70 
71  # Retrieve the data from the input file
72  trainDataSource.loadDataBlock(mergedData)
73 
74  # Get the dictionary and update it with additional information about data
75  dict = trainData.getDictionary()
76 
77  # Add a feature type to the dictionary
78  dict[0].featureType = features.DAAL_CONTINUOUS
79  dict[1].featureType = features.DAAL_CONTINUOUS
80  dict[2].featureType = features.DAAL_CATEGORICAL
81 
82  # Create an algorithm object to train the gradient boosted trees classification model
83  algorithm = training.Batch(nClasses)
84  algorithm.parameter().maxIterations = maxIterations
85  algorithm.parameter().minObservationsInLeafNode = minObservationsInLeafNode
86  algorithm.parameter().featuresPerNode = nFeatures
87 
88  # Pass the training data set and dependent values to the algorithm
89  algorithm.input.set(classifier.training.data, trainData)
90  algorithm.input.set(classifier.training.labels, trainGroundTruth)
91 
92  # Train the gradient boosted trees classification model and retrieve the results of the training algorithm
93  trainingResult = algorithm.compute()
94  model = trainingResult.get(classifier.training.model)
95 
96 def testModel():
97  global testGroundTruth, predictionResult
98 
99  # Initialize FileDataSource<CSVFeatureManager> to retrieve the test data from a .csv file
100  testDataSource = FileDataSource(
101  testDatasetFileName,
102  DataSourceIface.notAllocateNumericTable,
103  DataSourceIface.doDictionaryFromContext
104  )
105 
106  # Create Numeric Tables for testing data and labels
107  testData = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
108  testGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
109  mergedData = MergedNumericTable(testData, testGroundTruth)
110 
111  # Retrieve the data from input file
112  testDataSource.loadDataBlock(mergedData)
113 
114  # Get the dictionary and update it with additional information about data
115  dict = testData.getDictionary()
116 
117  # Add a feature type to the dictionary
118  dict[0].featureType = features.DAAL_CONTINUOUS
119  dict[1].featureType = features.DAAL_CONTINUOUS
120  dict[2].featureType = features.DAAL_CATEGORICAL
121 
122  # Create algorithm objects for gradient boosted trees classification prediction with the default method
123  algorithm = prediction.Batch(nClasses)
124 
125  # Pass the testing data set and trained model to the algorithm
126  algorithm.input.setTable(classifier.prediction.data, testData)
127  algorithm.input.setModel(classifier.prediction.model, model)
128 
129  # Compute prediction results and retrieve algorithm results
130  # (Result class from classifier.prediction)
131  predictionResult = algorithm.compute()
132 
133 
134 def printResults():
135 
136  printNumericTable(predictionResult.get(classifier.prediction.prediction),"Gragient boosted trees prediction results (first 10 rows):",10)
137  printNumericTable(testGroundTruth,"Ground truth (first 10 rows):",10)
138 
139 if __name__ == "__main__":
140 
141  trainModel()
142  testModel()
143  printResults()

For more complete information about compiler optimizations, see our Optimization Notice.