Point Cloud Library (PCL)  1.10.1
decision_tree_trainer.hpp
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2010-2011, Willow Garage, Inc.
6  *
7  * All rights reserved.
8  *
9  * Redistribution and use in source and binary forms, with or without
10  * modification, are permitted provided that the following conditions
11  * are met:
12  *
13  * * Redistributions of source code must retain the above copyright
14  * notice, this list of conditions and the following disclaimer.
15  * * Redistributions in binary form must reproduce the above
16  * copyright notice, this list of conditions and the following
17  * disclaimer in the documentation and/or other materials provided
18  * with the distribution.
19  * * Neither the name of Willow Garage, Inc. nor the names of its
20  * contributors may be used to endorse or promote products derived
21  * from this software without specific prior written permission.
22  *
23  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
26  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
27  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
28  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
29  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
31  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
32  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
33  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  * POSSIBILITY OF SUCH DAMAGE.
35  *
36  */
37 
38 #pragma once
39 
40 template <class FeatureType,
41  class DataSet,
42  class LabelType,
43  class ExampleIndex,
44  class NodeType>
47 : max_tree_depth_(15)
48 , num_of_features_(1000)
49 , num_of_thresholds_(10)
50 , feature_handler_(nullptr)
51 , stats_estimator_(nullptr)
52 , data_set_()
53 , label_data_()
54 , examples_()
55 , decision_tree_trainer_data_provider_()
56 , random_features_at_split_node_(false)
57 {}
58 
59 template <class FeatureType,
60  class DataSet,
61  class LabelType,
62  class ExampleIndex,
63  class NodeType>
66 {}
67 
68 template <class FeatureType,
69  class DataSet,
70  class LabelType,
71  class ExampleIndex,
72  class NodeType>
73 void
76 {
77  // create random features
78  std::vector<FeatureType> features;
79 
80  if (!random_features_at_split_node_)
81  feature_handler_->createRandomFeatures(num_of_features_, features);
82 
83  // recursively build decision tree
84  NodeType root_node;
85  tree.setRoot(root_node);
86 
87  if (decision_tree_trainer_data_provider_) {
88  std::cerr << "use decision_tree_trainer_data_provider_" << std::endl;
89 
90  decision_tree_trainer_data_provider_->getDatasetAndLabels(
91  data_set_, label_data_, examples_);
92  trainDecisionTreeNode(
93  features, examples_, label_data_, max_tree_depth_, tree.getRoot());
94  label_data_.clear();
95  data_set_.clear();
96  examples_.clear();
97  }
98  else {
99  trainDecisionTreeNode(
100  features, examples_, label_data_, max_tree_depth_, tree.getRoot());
101  }
102 }
103 
104 template <class FeatureType,
105  class DataSet,
106  class LabelType,
107  class ExampleIndex,
108  class NodeType>
109 void
111  trainDecisionTreeNode(std::vector<FeatureType>& features,
112  std::vector<ExampleIndex>& examples,
113  std::vector<LabelType>& label_data,
114  const std::size_t max_depth,
115  NodeType& node)
116 {
117  const std::size_t num_of_examples = examples.size();
118  if (num_of_examples == 0) {
119  PCL_ERROR(
120  "Reached invalid point in decision tree training: Number of examples is 0!");
121  return;
122  };
123 
124  if (max_depth == 0) {
125  stats_estimator_->computeAndSetNodeStats(data_set_, examples, label_data, node);
126  return;
127  };
128 
129  if (examples.size() < min_examples_for_split_) {
130  stats_estimator_->computeAndSetNodeStats(data_set_, examples, label_data, node);
131  return;
132  }
133 
134  if (random_features_at_split_node_) {
135  features.clear();
136  feature_handler_->createRandomFeatures(num_of_features_, features);
137  }
138 
139  std::vector<float> feature_results;
140  std::vector<unsigned char> flags;
141 
142  feature_results.reserve(num_of_examples);
143  flags.reserve(num_of_examples);
144 
145  // find best feature for split
146  int best_feature_index = -1;
147  float best_feature_threshold = 0.0f;
148  float best_feature_information_gain = 0.0f;
149 
150  const std::size_t num_of_features = features.size();
151  for (std::size_t feature_index = 0; feature_index < num_of_features;
152  ++feature_index) {
153  // evaluate features
154  feature_handler_->evaluateFeature(
155  features[feature_index], data_set_, examples, feature_results, flags);
156 
157  // get list of thresholds
158  if (!thresholds_.empty()) {
159  // compute information gain for each threshold and store threshold with highest
160  // information gain
161  for (std::size_t threshold_index = 0; threshold_index < thresholds_.size();
162  ++threshold_index) {
163 
164  const float information_gain =
165  stats_estimator_->computeInformationGain(data_set_,
166  examples,
167  label_data,
168  feature_results,
169  flags,
170  thresholds_[threshold_index]);
171 
172  if (information_gain > best_feature_information_gain) {
173  best_feature_information_gain = information_gain;
174  best_feature_index = static_cast<int>(feature_index);
175  best_feature_threshold = thresholds_[threshold_index];
176  }
177  }
178  }
179  else {
180  std::vector<float> thresholds;
181  thresholds.reserve(num_of_thresholds_);
182  createThresholdsUniform(num_of_thresholds_, feature_results, thresholds);
183 
184  // compute information gain for each threshold and store threshold with highest
185  // information gain
186  for (std::size_t threshold_index = 0; threshold_index < num_of_thresholds_;
187  ++threshold_index) {
188  const float threshold = thresholds[threshold_index];
189 
190  // compute information gain
191  const float information_gain = stats_estimator_->computeInformationGain(
192  data_set_, examples, label_data, feature_results, flags, threshold);
193 
194  if (information_gain > best_feature_information_gain) {
195  best_feature_information_gain = information_gain;
196  best_feature_index = static_cast<int>(feature_index);
197  best_feature_threshold = threshold;
198  }
199  }
200  }
201  }
202 
203  if (best_feature_index == -1) {
204  stats_estimator_->computeAndSetNodeStats(data_set_, examples, label_data, node);
205  return;
206  }
207 
208  // get branch indices for best feature and best threshold
209  std::vector<unsigned char> branch_indices;
210  branch_indices.reserve(num_of_examples);
211  {
212  feature_handler_->evaluateFeature(
213  features[best_feature_index], data_set_, examples, feature_results, flags);
214 
215  stats_estimator_->computeBranchIndices(
216  feature_results, flags, best_feature_threshold, branch_indices);
217  }
218 
219  stats_estimator_->computeAndSetNodeStats(data_set_, examples, label_data, node);
220 
221  // separate data
222  {
223  const std::size_t num_of_branches = stats_estimator_->getNumOfBranches();
224 
225  std::vector<std::size_t> branch_counts(num_of_branches, 0);
226  for (std::size_t example_index = 0; example_index < num_of_examples;
227  ++example_index) {
228  ++branch_counts[branch_indices[example_index]];
229  }
230 
231  node.feature = features[best_feature_index];
232  node.threshold = best_feature_threshold;
233  node.sub_nodes.resize(num_of_branches);
234 
235  for (std::size_t branch_index = 0; branch_index < num_of_branches; ++branch_index) {
236  if (branch_counts[branch_index] == 0) {
237  NodeType branch_node;
238  stats_estimator_->computeAndSetNodeStats(
239  data_set_, examples, label_data, branch_node);
240  // branch_node->num_of_sub_nodes = 0;
241 
242  node.sub_nodes[branch_index] = branch_node;
243 
244  continue;
245  }
246 
247  std::vector<LabelType> branch_labels;
248  std::vector<ExampleIndex> branch_examples;
249  branch_labels.reserve(branch_counts[branch_index]);
250  branch_examples.reserve(branch_counts[branch_index]);
251 
252  for (std::size_t example_index = 0; example_index < num_of_examples;
253  ++example_index) {
254  if (branch_indices[example_index] == branch_index) {
255  branch_examples.push_back(examples[example_index]);
256  branch_labels.push_back(label_data[example_index]);
257  }
258  }
259 
260  trainDecisionTreeNode(features,
261  branch_examples,
262  branch_labels,
263  max_depth - 1,
264  node.sub_nodes[branch_index]);
265  }
266  }
267 }
268 
269 template <class FeatureType,
270  class DataSet,
271  class LabelType,
272  class ExampleIndex,
273  class NodeType>
274 void
276  createThresholdsUniform(const std::size_t num_of_thresholds,
277  std::vector<float>& values,
278  std::vector<float>& thresholds)
279 {
280  // estimate range of values
281  float min_value = ::std::numeric_limits<float>::max();
282  float max_value = -::std::numeric_limits<float>::max();
283 
284  const std::size_t num_of_values = values.size();
285  for (std::size_t value_index = 0; value_index < num_of_values; ++value_index) {
286  const float value = values[value_index];
287 
288  if (value < min_value)
289  min_value = value;
290  if (value > max_value)
291  max_value = value;
292  }
293 
294  const float range = max_value - min_value;
295  const float step = range / static_cast<float>(num_of_thresholds + 2);
296 
297  // compute thresholds
298  thresholds.resize(num_of_thresholds);
299 
300  for (std::size_t threshold_index = 0; threshold_index < num_of_thresholds;
301  ++threshold_index) {
302  thresholds[threshold_index] =
303  min_value + step * (static_cast<float>(threshold_index + 1));
304  }
305 }
pcl::DecisionTree::getRoot
NodeType & getRoot()
Returns the root node of the tree.
Definition: decision_tree.h:69
pcl::DecisionTreeTrainer::DecisionTreeTrainer
DecisionTreeTrainer()
Constructor.
Definition: decision_tree_trainer.hpp:46
pcl::DecisionTreeTrainer::train
void train(DecisionTree< NodeType > &tree)
Trains a decision tree using the set training data and settings.
Definition: decision_tree_trainer.hpp:75
pcl::DecisionTreeTrainer::createThresholdsUniform
static void createThresholdsUniform(const std::size_t num_of_thresholds, std::vector< float > &values, std::vector< float > &thresholds)
Creates uniformely distrebuted thresholds over the range of the supplied values.
Definition: decision_tree_trainer.hpp:276
pcl::DecisionTree
Class representing a decision tree.
Definition: decision_tree.h:49
pcl::DecisionTreeTrainer::trainDecisionTreeNode
void trainDecisionTreeNode(std::vector< FeatureType > &features, std::vector< ExampleIndex > &examples, std::vector< LabelType > &label_data, std::size_t max_depth, NodeType &node)
Trains a decision tree node from the specified features, label data, and examples.
Definition: decision_tree_trainer.hpp:111
pcl::DecisionTreeTrainer::~DecisionTreeTrainer
virtual ~DecisionTreeTrainer()
Destructor.
Definition: decision_tree_trainer.hpp:65
pcl::DecisionTree::setRoot
void setRoot(const NodeType &root)
Sets the root node of the tree.
Definition: decision_tree.h:62