Point Cloud Library (PCL) 1.13.1
Loading...
Searching...
No Matches
decision_tree_trainer.hpp
1/*
2 * Software License Agreement (BSD License)
3 *
4 * Point Cloud Library (PCL) - www.pointclouds.org
5 * Copyright (c) 2010-2011, Willow Garage, Inc.
6 *
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 *
13 * * Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * * Redistributions in binary form must reproduce the above
16 * copyright notice, this list of conditions and the following
17 * disclaimer in the documentation and/or other materials provided
18 * with the distribution.
19 * * Neither the name of Willow Garage, Inc. nor the names of its
20 * contributors may be used to endorse or promote products derived
21 * from this software without specific prior written permission.
22 *
23 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
26 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
27 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
28 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
29 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
31 * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
32 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
33 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34 * POSSIBILITY OF SUCH DAMAGE.
35 *
36 */
37
38#pragma once
39
40namespace pcl {
41
42template <class FeatureType,
43 class DataSet,
44 class LabelType,
45 class ExampleIndex,
46 class NodeType>
49: max_tree_depth_(15)
50, num_of_features_(1000)
51, num_of_thresholds_(10)
52, feature_handler_(nullptr)
53, stats_estimator_(nullptr)
54, data_set_()
55, label_data_()
56, examples_()
57, decision_tree_trainer_data_provider_()
58, random_features_at_split_node_(false)
59{}
60
61template <class FeatureType,
62 class DataSet,
63 class LabelType,
64 class ExampleIndex,
65 class NodeType>
67 ~DecisionTreeTrainer() = default;
68
69template <class FeatureType,
70 class DataSet,
71 class LabelType,
72 class ExampleIndex,
73 class NodeType>
74void
77{
78 // create random features
79 std::vector<FeatureType> features;
80
81 if (!random_features_at_split_node_)
82 feature_handler_->createRandomFeatures(num_of_features_, features);
83
84 // recursively build decision tree
85 NodeType root_node;
86 tree.setRoot(root_node);
87
88 if (decision_tree_trainer_data_provider_) {
89 std::cerr << "use decision_tree_trainer_data_provider_" << std::endl;
90
91 decision_tree_trainer_data_provider_->getDatasetAndLabels(
92 data_set_, label_data_, examples_);
93 trainDecisionTreeNode(
94 features, examples_, label_data_, max_tree_depth_, tree.getRoot());
95 label_data_.clear();
96 data_set_.clear();
97 examples_.clear();
98 }
99 else {
100 trainDecisionTreeNode(
101 features, examples_, label_data_, max_tree_depth_, tree.getRoot());
102 }
103}
104
105template <class FeatureType,
106 class DataSet,
107 class LabelType,
108 class ExampleIndex,
109 class NodeType>
110void
112 trainDecisionTreeNode(std::vector<FeatureType>& features,
113 std::vector<ExampleIndex>& examples,
114 std::vector<LabelType>& label_data,
115 const std::size_t max_depth,
116 NodeType& node)
117{
118 const std::size_t num_of_examples = examples.size();
119 if (num_of_examples == 0) {
120 PCL_ERROR(
121 "Reached invalid point in decision tree training: Number of examples is 0!\n");
122 return;
123 };
124
125 if (max_depth == 0) {
126 stats_estimator_->computeAndSetNodeStats(data_set_, examples, label_data, node);
127 return;
128 };
129
130 if (examples.size() < min_examples_for_split_) {
131 stats_estimator_->computeAndSetNodeStats(data_set_, examples, label_data, node);
132 return;
133 }
134
135 if (random_features_at_split_node_) {
136 features.clear();
137 feature_handler_->createRandomFeatures(num_of_features_, features);
138 }
139
140 std::vector<float> feature_results;
141 std::vector<unsigned char> flags;
142
143 feature_results.reserve(num_of_examples);
144 flags.reserve(num_of_examples);
145
146 // find best feature for split
147 int best_feature_index = -1;
148 float best_feature_threshold = 0.0f;
149 float best_feature_information_gain = 0.0f;
150
151 const std::size_t num_of_features = features.size();
152 for (std::size_t feature_index = 0; feature_index < num_of_features;
153 ++feature_index) {
154 // evaluate features
155 feature_handler_->evaluateFeature(
156 features[feature_index], data_set_, examples, feature_results, flags);
157
158 // get list of thresholds
159 if (!thresholds_.empty()) {
160 // compute information gain for each threshold and store threshold with highest
161 // information gain
162 for (const float& threshold : thresholds_) {
163
164 const float information_gain = stats_estimator_->computeInformationGain(
165 data_set_, examples, label_data, feature_results, flags, threshold);
166
167 if (information_gain > best_feature_information_gain) {
168 best_feature_information_gain = information_gain;
169 best_feature_index = static_cast<int>(feature_index);
170 best_feature_threshold = threshold;
171 }
172 }
173 }
174 else {
175 std::vector<float> thresholds;
176 thresholds.reserve(num_of_thresholds_);
177 createThresholdsUniform(num_of_thresholds_, feature_results, thresholds);
178
179 // compute information gain for each threshold and store threshold with highest
180 // information gain
181 for (std::size_t threshold_index = 0; threshold_index < num_of_thresholds_;
182 ++threshold_index) {
183 const float threshold = thresholds[threshold_index];
184
185 // compute information gain
186 const float information_gain = stats_estimator_->computeInformationGain(
187 data_set_, examples, label_data, feature_results, flags, threshold);
188
189 if (information_gain > best_feature_information_gain) {
190 best_feature_information_gain = information_gain;
191 best_feature_index = static_cast<int>(feature_index);
192 best_feature_threshold = threshold;
193 }
194 }
195 }
196 }
197
198 if (best_feature_index == -1) {
199 stats_estimator_->computeAndSetNodeStats(data_set_, examples, label_data, node);
200 return;
201 }
202
203 // get branch indices for best feature and best threshold
204 std::vector<unsigned char> branch_indices;
205 branch_indices.reserve(num_of_examples);
206 {
207 feature_handler_->evaluateFeature(
208 features[best_feature_index], data_set_, examples, feature_results, flags);
209
210 stats_estimator_->computeBranchIndices(
211 feature_results, flags, best_feature_threshold, branch_indices);
212 }
213
214 stats_estimator_->computeAndSetNodeStats(data_set_, examples, label_data, node);
215
216 // separate data
217 {
218 const std::size_t num_of_branches = stats_estimator_->getNumOfBranches();
219
220 std::vector<std::size_t> branch_counts(num_of_branches, 0);
221 for (std::size_t example_index = 0; example_index < num_of_examples;
222 ++example_index) {
223 ++branch_counts[branch_indices[example_index]];
224 }
225
226 node.feature = features[best_feature_index];
227 node.threshold = best_feature_threshold;
228 node.sub_nodes.resize(num_of_branches);
229
230 for (std::size_t branch_index = 0; branch_index < num_of_branches; ++branch_index) {
231 if (branch_counts[branch_index] == 0) {
232 NodeType branch_node;
233 stats_estimator_->computeAndSetNodeStats(
234 data_set_, examples, label_data, branch_node);
235 // branch_node->num_of_sub_nodes = 0;
236
237 node.sub_nodes[branch_index] = branch_node;
238
239 continue;
240 }
241
242 std::vector<LabelType> branch_labels;
243 std::vector<ExampleIndex> branch_examples;
244 branch_labels.reserve(branch_counts[branch_index]);
245 branch_examples.reserve(branch_counts[branch_index]);
246
247 for (std::size_t example_index = 0; example_index < num_of_examples;
248 ++example_index) {
249 if (branch_indices[example_index] == branch_index) {
250 branch_examples.push_back(examples[example_index]);
251 branch_labels.push_back(label_data[example_index]);
252 }
253 }
254
255 trainDecisionTreeNode(features,
256 branch_examples,
257 branch_labels,
258 max_depth - 1,
259 node.sub_nodes[branch_index]);
260 }
261 }
262}
263
264template <class FeatureType,
265 class DataSet,
266 class LabelType,
267 class ExampleIndex,
268 class NodeType>
269void
271 createThresholdsUniform(const std::size_t num_of_thresholds,
272 std::vector<float>& values,
273 std::vector<float>& thresholds)
274{
275 // estimate range of values
276 float min_value = ::std::numeric_limits<float>::max();
277 float max_value = -::std::numeric_limits<float>::max();
278
279 const std::size_t num_of_values = values.size();
280 for (std::size_t value_index = 0; value_index < num_of_values; ++value_index) {
281 const float value = values[value_index];
282
283 if (value < min_value)
284 min_value = value;
285 if (value > max_value)
286 max_value = value;
287 }
288
289 const float range = max_value - min_value;
290 const float step = range / static_cast<float>(num_of_thresholds + 2);
291
292 // compute thresholds
293 thresholds.resize(num_of_thresholds);
294
295 for (std::size_t threshold_index = 0; threshold_index < num_of_thresholds;
296 ++threshold_index) {
297 thresholds[threshold_index] =
298 min_value + step * (static_cast<float>(threshold_index + 1));
299 }
300}
301
302} // namespace pcl
Class representing a decision tree.
NodeType & getRoot()
Returns the root node of the tree.
void setRoot(const NodeType &root)
Sets the root node of the tree.
static void createThresholdsUniform(const std::size_t num_of_thresholds, std::vector< float > &values, std::vector< float > &thresholds)
Creates uniformly distributed thresholds over the range of the supplied values.
void trainDecisionTreeNode(std::vector< FeatureType > &features, std::vector< ExampleIndex > &examples, std::vector< LabelType > &label_data, std::size_t max_depth, NodeType &node)
Trains a decision tree node from the specified features, label data, and examples.
void train(DecisionTree< NodeType > &tree)
Trains a decision tree using the set training data and settings.
virtual ~DecisionTreeTrainer()
Destructor.