Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/dnn/src/caffe/caffe_io.cpp
16339 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
14
// Third party copyrights are property of their respective owners.
15
//
16
// Redistribution and use in source and binary forms, with or without modification,
17
// are permitted provided that the following conditions are met:
18
//
19
// * Redistribution's of source code must retain the above copyright notice,
20
// this list of conditions and the following disclaimer.
21
//
22
// * Redistribution's in binary form must reproduce the above copyright notice,
23
// this list of conditions and the following disclaimer in the documentation
24
// and/or other materials provided with the distribution.
25
//
26
// * The name of the copyright holders may not be used to endorse or promote products
27
// derived from this software without specific prior written permission.
28
//
29
// This software is provided by the copyright holders and contributors "as is" and
30
// any express or implied warranties, including, but not limited to, the implied
31
// warranties of merchantability and fitness for a particular purpose are disclaimed.
32
// In no event shall the Intel Corporation or contributors be liable for any direct,
33
// indirect, incidental, special, exemplary, or consequential damages
34
// (including, but not limited to, procurement of substitute goods or services;
35
// loss of use, data, or profits; or business interruption) however caused
36
// and on any theory of liability, whether in contract, strict liability,
37
// or tort (including negligence or otherwise) arising in any way out of
38
// the use of this software, even if advised of the possibility of such damage.
39
//
40
//M*/
41
42
/*M///////////////////////////////////////////////////////////////////////////////////////
43
//COPYRIGHT
44
//
45
//All contributions by the University of California:
46
//Copyright (c) 2014, The Regents of the University of California (Regents)
47
//All rights reserved.
48
//
49
//All other contributions:
50
//Copyright (c) 2014, the respective contributors
51
//All rights reserved.
52
//
53
//Caffe uses a shared copyright model: each contributor holds copyright over
54
//their contributions to Caffe. The project versioning records all such
55
//contribution and copyright details. If a contributor wants to further mark
56
//their specific copyright on a particular contribution, they should indicate
57
//their copyright solely in the commit message of the change when it is
58
//committed.
59
//
60
//LICENSE
61
//
62
//Redistribution and use in source and binary forms, with or without
63
//modification, are permitted provided that the following conditions are met:
64
//
65
//1. Redistributions of source code must retain the above copyright notice, this
66
// list of conditions and the following disclaimer.
67
//2. Redistributions in binary form must reproduce the above copyright notice,
68
// this list of conditions and the following disclaimer in the documentation
69
// and/or other materials provided with the distribution.
70
//
71
//THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
72
//ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
73
//WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
74
//DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
75
//ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
76
//(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
77
//LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
78
//ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
79
//(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
80
//SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
81
//
82
//CONTRIBUTION AGREEMENT
83
//
84
//By contributing to the BVLC/caffe repository through pull-request, comment,
85
//or otherwise, the contributor releases their content to the
86
//license and copyright terms herein.
87
//
88
//M*/
89
90
#include "../precomp.hpp"
91
92
#ifdef HAVE_PROTOBUF
93
#include <google/protobuf/io/coded_stream.h>
94
#include <google/protobuf/io/zero_copy_stream_impl.h>
95
#include <google/protobuf/text_format.h>
96
97
#include <opencv2/core.hpp>
98
99
#include <map>
100
#include <string>
101
#include <fstream>
102
#include <vector>
103
104
#include "caffe_io.hpp"
105
#include "glog_emulator.hpp"
106
107
namespace cv {
108
namespace dnn {
109
110
using std::string;
111
using std::map;
112
using namespace caffe;
113
using namespace ::google::protobuf;
114
using namespace ::google::protobuf::io;
115
116
// Return true iff the net is not the current version.
117
bool NetNeedsUpgrade(const NetParameter& net_param);
118
119
// Return true iff any layer contains parameters specified using
120
// deprecated V0LayerParameter.
121
bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param);
122
123
// Perform all necessary transformations to upgrade a V0NetParameter into a
124
// NetParameter (including upgrading padding layers and LayerParameters).
125
bool UpgradeV0Net(const NetParameter& v0_net_param, NetParameter* net_param);
126
127
// Upgrade NetParameter with padding layers to pad-aware conv layers.
128
// For any padding layer, remove it and put its pad parameter in any layers
129
// taking its top blob as input.
130
// Error if any of these above layers are not-conv layers.
131
void UpgradeV0PaddingLayers(const NetParameter& param,
132
NetParameter* param_upgraded_pad);
133
134
// Upgrade a single V0LayerConnection to the V1LayerParameter format.
135
bool UpgradeV0LayerParameter(V1LayerParameter* v0_layer_connection,
136
V1LayerParameter* layer_param);
137
138
V1LayerParameter_LayerType UpgradeV0LayerType(const string& type);
139
140
// Return true iff any layer contains deprecated data transformation parameters.
141
bool NetNeedsDataUpgrade(const NetParameter& net_param);
142
143
// Perform all necessary transformations to upgrade old transformation fields
144
// into a TransformationParameter.
145
void UpgradeNetDataTransformation(NetParameter* net_param);
146
147
// Return true iff the Net contains any layers specified as V1LayerParameters.
148
bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param);
149
150
// Perform all necessary transformations to upgrade a NetParameter with
151
// deprecated V1LayerParameters.
152
bool UpgradeV1Net(NetParameter* net_param);
153
154
bool UpgradeV1LayerParameter(V1LayerParameter* v1_layer_param,
155
LayerParameter* layer_param);
156
157
const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type);
158
159
bool NetNeedsBatchNormUpgrade(const NetParameter& net_param);
160
161
void UpgradeNetBatchNorm(NetParameter* net_param);
162
163
// Check for deprecations and upgrade the NetParameter as needed.
164
bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param);
165
166
167
bool NetNeedsUpgrade(const NetParameter& net_param) {
168
return NetNeedsV0ToV1Upgrade(net_param) || NetNeedsV1ToV2Upgrade(net_param) ||
169
NetNeedsBatchNormUpgrade(net_param);
170
}
171
172
bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param) {
173
for (int i = 0; i < net_param.layers_size(); ++i) {
174
if (net_param.layers(i).has_layer()) {
175
return true;
176
}
177
}
178
return false;
179
}
180
181
bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param) {
182
return net_param.layers_size() > 0;
183
}
184
185
bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers,
186
NetParameter* net_param) {
187
// First upgrade padding layers to padded conv layers.
188
NetParameter v0_net_param;
189
UpgradeV0PaddingLayers(v0_net_param_padding_layers, &v0_net_param);
190
// Now upgrade layer parameters.
191
bool is_fully_compatible = true;
192
net_param->Clear();
193
if (v0_net_param.has_name()) {
194
net_param->set_name(v0_net_param.name());
195
}
196
for (int i = 0; i < v0_net_param.layers_size(); ++i) {
197
is_fully_compatible &= UpgradeV0LayerParameter(v0_net_param.mutable_layers(i),
198
net_param->add_layers());
199
}
200
for (int i = 0; i < v0_net_param.input_size(); ++i) {
201
net_param->add_input(v0_net_param.input(i));
202
}
203
for (int i = 0; i < v0_net_param.input_dim_size(); ++i) {
204
net_param->add_input_dim(v0_net_param.input_dim(i));
205
}
206
if (v0_net_param.has_force_backward()) {
207
net_param->set_force_backward(v0_net_param.force_backward());
208
}
209
return is_fully_compatible;
210
}
211
212
void UpgradeV0PaddingLayers(const NetParameter& param,
213
NetParameter* param_upgraded_pad) {
214
// Copy everything other than the layers from the original param.
215
param_upgraded_pad->Clear();
216
param_upgraded_pad->CopyFrom(param);
217
param_upgraded_pad->clear_layers();
218
// Figure out which layer each bottom blob comes from.
219
map<string, int> blob_name_to_last_top_idx;
220
for (int i = 0; i < param.input_size(); ++i) {
221
const string& blob_name = param.input(i);
222
blob_name_to_last_top_idx[blob_name] = -1;
223
}
224
for (int i = 0; i < param.layers_size(); ++i) {
225
const V1LayerParameter& layer_connection = param.layers(i);
226
const V0LayerParameter& layer_param = layer_connection.layer();
227
// Add the layer to the new net, unless it's a padding layer.
228
if (layer_param.type() != "padding") {
229
param_upgraded_pad->add_layers()->CopyFrom(layer_connection);
230
}
231
for (int j = 0; j < layer_connection.bottom_size(); ++j) {
232
const string& blob_name = layer_connection.bottom(j);
233
if (blob_name_to_last_top_idx.find(blob_name) ==
234
blob_name_to_last_top_idx.end()) {
235
LOG(FATAL) << "Unknown blob input " << blob_name << " to layer " << j;
236
}
237
const int top_idx = blob_name_to_last_top_idx[blob_name];
238
if (top_idx == -1) {
239
continue;
240
}
241
const V1LayerParameter& source_layer = param.layers(top_idx);
242
if (source_layer.layer().type() == "padding") {
243
// This layer has a padding layer as input -- check that it is a conv
244
// layer or a pooling layer and takes only one input. Also check that
245
// the padding layer input has only one input and one output. Other
246
// cases have undefined behavior in Caffe.
247
CHECK((layer_param.type() == "conv") || (layer_param.type() == "pool"))
248
<< "Padding layer input to "
249
"non-convolutional / non-pooling layer type "
250
<< layer_param.type();
251
CHECK_EQ(layer_connection.bottom_size(), 1)
252
<< "Conv Layer takes a single blob as input.";
253
CHECK_EQ(source_layer.bottom_size(), 1)
254
<< "Padding Layer takes a single blob as input.";
255
CHECK_EQ(source_layer.top_size(), 1)
256
<< "Padding Layer produces a single blob as output.";
257
int layer_index = param_upgraded_pad->layers_size() - 1;
258
param_upgraded_pad->mutable_layers(layer_index)->mutable_layer()
259
->set_pad(source_layer.layer().pad());
260
param_upgraded_pad->mutable_layers(layer_index)
261
->set_bottom(j, source_layer.bottom(0));
262
}
263
}
264
for (int j = 0; j < layer_connection.top_size(); ++j) {
265
const string& blob_name = layer_connection.top(j);
266
blob_name_to_last_top_idx[blob_name] = i;
267
}
268
}
269
}
270
271
bool UpgradeV0LayerParameter(V1LayerParameter* v0_layer_connection_,
272
V1LayerParameter* layer_param) {
273
CV_Assert(v0_layer_connection_ != NULL);
274
const V1LayerParameter& v0_layer_connection = *v0_layer_connection_;
275
bool is_fully_compatible = true;
276
layer_param->Clear();
277
for (int i = 0; i < v0_layer_connection.bottom_size(); ++i) {
278
layer_param->add_bottom(v0_layer_connection.bottom(i));
279
}
280
for (int i = 0; i < v0_layer_connection.top_size(); ++i) {
281
layer_param->add_top(v0_layer_connection.top(i));
282
}
283
if (v0_layer_connection.has_layer()) {
284
const V0LayerParameter& v0_layer_param = v0_layer_connection.layer();
285
if (v0_layer_param.has_name()) {
286
layer_param->set_name(v0_layer_param.name());
287
}
288
const string& type = v0_layer_param.type();
289
if (v0_layer_param.has_type()) {
290
layer_param->set_type(UpgradeV0LayerType(type));
291
}
292
layer_param->mutable_blobs()->Swap(v0_layer_connection_->mutable_blobs());
293
for (int i = 0; i < v0_layer_param.blobs_lr_size(); ++i) {
294
layer_param->add_blobs_lr(v0_layer_param.blobs_lr(i));
295
}
296
for (int i = 0; i < v0_layer_param.weight_decay_size(); ++i) {
297
layer_param->add_weight_decay(v0_layer_param.weight_decay(i));
298
}
299
if (v0_layer_param.has_num_output()) {
300
if (type == "conv") {
301
layer_param->mutable_convolution_param()->set_num_output(
302
v0_layer_param.num_output());
303
} else if (type == "innerproduct") {
304
layer_param->mutable_inner_product_param()->set_num_output(
305
v0_layer_param.num_output());
306
} else {
307
LOG(ERROR) << "Unknown parameter num_output for layer type " << type;
308
is_fully_compatible = false;
309
}
310
}
311
if (v0_layer_param.has_biasterm()) {
312
if (type == "conv") {
313
layer_param->mutable_convolution_param()->set_bias_term(
314
v0_layer_param.biasterm());
315
} else if (type == "innerproduct") {
316
layer_param->mutable_inner_product_param()->set_bias_term(
317
v0_layer_param.biasterm());
318
} else {
319
LOG(ERROR) << "Unknown parameter biasterm for layer type " << type;
320
is_fully_compatible = false;
321
}
322
}
323
if (v0_layer_param.has_weight_filler()) {
324
if (type == "conv") {
325
layer_param->mutable_convolution_param()->
326
mutable_weight_filler()->CopyFrom(v0_layer_param.weight_filler());
327
} else if (type == "innerproduct") {
328
layer_param->mutable_inner_product_param()->
329
mutable_weight_filler()->CopyFrom(v0_layer_param.weight_filler());
330
} else {
331
LOG(ERROR) << "Unknown parameter weight_filler for layer type " << type;
332
is_fully_compatible = false;
333
}
334
}
335
if (v0_layer_param.has_bias_filler()) {
336
if (type == "conv") {
337
layer_param->mutable_convolution_param()->
338
mutable_bias_filler()->CopyFrom(v0_layer_param.bias_filler());
339
} else if (type == "innerproduct") {
340
layer_param->mutable_inner_product_param()->
341
mutable_bias_filler()->CopyFrom(v0_layer_param.bias_filler());
342
} else {
343
LOG(ERROR) << "Unknown parameter bias_filler for layer type " << type;
344
is_fully_compatible = false;
345
}
346
}
347
if (v0_layer_param.has_pad()) {
348
if (type == "conv") {
349
layer_param->mutable_convolution_param()->add_pad(v0_layer_param.pad());
350
} else if (type == "pool") {
351
layer_param->mutable_pooling_param()->set_pad(v0_layer_param.pad());
352
} else {
353
LOG(ERROR) << "Unknown parameter pad for layer type " << type;
354
is_fully_compatible = false;
355
}
356
}
357
if (v0_layer_param.has_kernelsize()) {
358
if (type == "conv") {
359
layer_param->mutable_convolution_param()->add_kernel_size(
360
v0_layer_param.kernelsize());
361
} else if (type == "pool") {
362
layer_param->mutable_pooling_param()->set_kernel_size(
363
v0_layer_param.kernelsize());
364
} else {
365
LOG(ERROR) << "Unknown parameter kernelsize for layer type " << type;
366
is_fully_compatible = false;
367
}
368
}
369
if (v0_layer_param.has_group()) {
370
if (type == "conv") {
371
layer_param->mutable_convolution_param()->set_group(
372
v0_layer_param.group());
373
} else {
374
LOG(ERROR) << "Unknown parameter group for layer type " << type;
375
is_fully_compatible = false;
376
}
377
}
378
if (v0_layer_param.has_stride()) {
379
if (type == "conv") {
380
layer_param->mutable_convolution_param()->add_stride(
381
v0_layer_param.stride());
382
} else if (type == "pool") {
383
layer_param->mutable_pooling_param()->set_stride(
384
v0_layer_param.stride());
385
} else {
386
LOG(ERROR) << "Unknown parameter stride for layer type " << type;
387
is_fully_compatible = false;
388
}
389
}
390
if (v0_layer_param.has_pool()) {
391
if (type == "pool") {
392
V0LayerParameter_PoolMethod pool = v0_layer_param.pool();
393
switch (pool) {
394
case V0LayerParameter_PoolMethod_MAX:
395
layer_param->mutable_pooling_param()->set_pool(
396
PoolingParameter_PoolMethod_MAX);
397
break;
398
case V0LayerParameter_PoolMethod_AVE:
399
layer_param->mutable_pooling_param()->set_pool(
400
PoolingParameter_PoolMethod_AVE);
401
break;
402
case V0LayerParameter_PoolMethod_STOCHASTIC:
403
layer_param->mutable_pooling_param()->set_pool(
404
PoolingParameter_PoolMethod_STOCHASTIC);
405
break;
406
default:
407
LOG(ERROR) << "Unknown pool method " << pool;
408
is_fully_compatible = false;
409
}
410
} else {
411
LOG(ERROR) << "Unknown parameter pool for layer type " << type;
412
is_fully_compatible = false;
413
}
414
}
415
if (v0_layer_param.has_dropout_ratio()) {
416
if (type == "dropout") {
417
layer_param->mutable_dropout_param()->set_dropout_ratio(
418
v0_layer_param.dropout_ratio());
419
} else {
420
LOG(ERROR) << "Unknown parameter dropout_ratio for layer type " << type;
421
is_fully_compatible = false;
422
}
423
}
424
if (v0_layer_param.has_local_size()) {
425
if (type == "lrn") {
426
layer_param->mutable_lrn_param()->set_local_size(
427
v0_layer_param.local_size());
428
} else {
429
LOG(ERROR) << "Unknown parameter local_size for layer type " << type;
430
is_fully_compatible = false;
431
}
432
}
433
if (v0_layer_param.has_alpha()) {
434
if (type == "lrn") {
435
layer_param->mutable_lrn_param()->set_alpha(v0_layer_param.alpha());
436
} else {
437
LOG(ERROR) << "Unknown parameter alpha for layer type " << type;
438
is_fully_compatible = false;
439
}
440
}
441
if (v0_layer_param.has_beta()) {
442
if (type == "lrn") {
443
layer_param->mutable_lrn_param()->set_beta(v0_layer_param.beta());
444
} else {
445
LOG(ERROR) << "Unknown parameter beta for layer type " << type;
446
is_fully_compatible = false;
447
}
448
}
449
if (v0_layer_param.has_k()) {
450
if (type == "lrn") {
451
layer_param->mutable_lrn_param()->set_k(v0_layer_param.k());
452
} else {
453
LOG(ERROR) << "Unknown parameter k for layer type " << type;
454
is_fully_compatible = false;
455
}
456
}
457
if (v0_layer_param.has_source()) {
458
if (type == "data") {
459
layer_param->mutable_data_param()->set_source(v0_layer_param.source());
460
} else if (type == "hdf5_data") {
461
layer_param->mutable_hdf5_data_param()->set_source(
462
v0_layer_param.source());
463
} else if (type == "images") {
464
layer_param->mutable_image_data_param()->set_source(
465
v0_layer_param.source());
466
} else if (type == "window_data") {
467
layer_param->mutable_window_data_param()->set_source(
468
v0_layer_param.source());
469
} else if (type == "infogain_loss") {
470
layer_param->mutable_infogain_loss_param()->set_source(
471
v0_layer_param.source());
472
} else {
473
LOG(ERROR) << "Unknown parameter source for layer type " << type;
474
is_fully_compatible = false;
475
}
476
}
477
if (v0_layer_param.has_scale()) {
478
layer_param->mutable_transform_param()->
479
set_scale(v0_layer_param.scale());
480
}
481
if (v0_layer_param.has_meanfile()) {
482
layer_param->mutable_transform_param()->
483
set_mean_file(v0_layer_param.meanfile());
484
}
485
if (v0_layer_param.has_batchsize()) {
486
if (type == "data") {
487
layer_param->mutable_data_param()->set_batch_size(
488
v0_layer_param.batchsize());
489
} else if (type == "hdf5_data") {
490
layer_param->mutable_hdf5_data_param()->set_batch_size(
491
v0_layer_param.batchsize());
492
} else if (type == "images") {
493
layer_param->mutable_image_data_param()->set_batch_size(
494
v0_layer_param.batchsize());
495
} else if (type == "window_data") {
496
layer_param->mutable_window_data_param()->set_batch_size(
497
v0_layer_param.batchsize());
498
} else {
499
LOG(ERROR) << "Unknown parameter batchsize for layer type " << type;
500
is_fully_compatible = false;
501
}
502
}
503
if (v0_layer_param.has_cropsize()) {
504
layer_param->mutable_transform_param()->
505
set_crop_size(v0_layer_param.cropsize());
506
}
507
if (v0_layer_param.has_mirror()) {
508
layer_param->mutable_transform_param()->
509
set_mirror(v0_layer_param.mirror());
510
}
511
if (v0_layer_param.has_rand_skip()) {
512
if (type == "data") {
513
layer_param->mutable_data_param()->set_rand_skip(
514
v0_layer_param.rand_skip());
515
} else if (type == "images") {
516
layer_param->mutable_image_data_param()->set_rand_skip(
517
v0_layer_param.rand_skip());
518
} else {
519
LOG(ERROR) << "Unknown parameter rand_skip for layer type " << type;
520
is_fully_compatible = false;
521
}
522
}
523
if (v0_layer_param.has_shuffle_images()) {
524
if (type == "images") {
525
layer_param->mutable_image_data_param()->set_shuffle(
526
v0_layer_param.shuffle_images());
527
} else {
528
LOG(ERROR) << "Unknown parameter shuffle for layer type " << type;
529
is_fully_compatible = false;
530
}
531
}
532
if (v0_layer_param.has_new_height()) {
533
if (type == "images") {
534
layer_param->mutable_image_data_param()->set_new_height(
535
v0_layer_param.new_height());
536
} else {
537
LOG(ERROR) << "Unknown parameter new_height for layer type " << type;
538
is_fully_compatible = false;
539
}
540
}
541
if (v0_layer_param.has_new_width()) {
542
if (type == "images") {
543
layer_param->mutable_image_data_param()->set_new_width(
544
v0_layer_param.new_width());
545
} else {
546
LOG(ERROR) << "Unknown parameter new_width for layer type " << type;
547
is_fully_compatible = false;
548
}
549
}
550
if (v0_layer_param.has_concat_dim()) {
551
if (type == "concat") {
552
layer_param->mutable_concat_param()->set_concat_dim(
553
v0_layer_param.concat_dim());
554
} else {
555
LOG(ERROR) << "Unknown parameter concat_dim for layer type " << type;
556
is_fully_compatible = false;
557
}
558
}
559
if (v0_layer_param.has_det_fg_threshold()) {
560
if (type == "window_data") {
561
layer_param->mutable_window_data_param()->set_fg_threshold(
562
v0_layer_param.det_fg_threshold());
563
} else {
564
LOG(ERROR) << "Unknown parameter det_fg_threshold for layer type "
565
<< type;
566
is_fully_compatible = false;
567
}
568
}
569
if (v0_layer_param.has_det_bg_threshold()) {
570
if (type == "window_data") {
571
layer_param->mutable_window_data_param()->set_bg_threshold(
572
v0_layer_param.det_bg_threshold());
573
} else {
574
LOG(ERROR) << "Unknown parameter det_bg_threshold for layer type "
575
<< type;
576
is_fully_compatible = false;
577
}
578
}
579
if (v0_layer_param.has_det_fg_fraction()) {
580
if (type == "window_data") {
581
layer_param->mutable_window_data_param()->set_fg_fraction(
582
v0_layer_param.det_fg_fraction());
583
} else {
584
LOG(ERROR) << "Unknown parameter det_fg_fraction for layer type "
585
<< type;
586
is_fully_compatible = false;
587
}
588
}
589
if (v0_layer_param.has_det_context_pad()) {
590
if (type == "window_data") {
591
layer_param->mutable_window_data_param()->set_context_pad(
592
v0_layer_param.det_context_pad());
593
} else {
594
LOG(ERROR) << "Unknown parameter det_context_pad for layer type "
595
<< type;
596
is_fully_compatible = false;
597
}
598
}
599
if (v0_layer_param.has_det_crop_mode()) {
600
if (type == "window_data") {
601
layer_param->mutable_window_data_param()->set_crop_mode(
602
v0_layer_param.det_crop_mode());
603
} else {
604
LOG(ERROR) << "Unknown parameter det_crop_mode for layer type "
605
<< type;
606
is_fully_compatible = false;
607
}
608
}
609
if (v0_layer_param.has_hdf5_output_param()) {
610
if (type == "hdf5_output") {
611
layer_param->mutable_hdf5_output_param()->CopyFrom(
612
v0_layer_param.hdf5_output_param());
613
} else {
614
LOG(ERROR) << "Unknown parameter hdf5_output_param for layer type "
615
<< type;
616
is_fully_compatible = false;
617
}
618
}
619
}
620
return is_fully_compatible;
621
}
622
623
V1LayerParameter_LayerType UpgradeV0LayerType(const string& type) {
624
if (type == "accuracy") {
625
return V1LayerParameter_LayerType_ACCURACY;
626
} else if (type == "bnll") {
627
return V1LayerParameter_LayerType_BNLL;
628
} else if (type == "concat") {
629
return V1LayerParameter_LayerType_CONCAT;
630
} else if (type == "conv") {
631
return V1LayerParameter_LayerType_CONVOLUTION;
632
} else if (type == "data") {
633
return V1LayerParameter_LayerType_DATA;
634
} else if (type == "dropout") {
635
return V1LayerParameter_LayerType_DROPOUT;
636
} else if (type == "euclidean_loss") {
637
return V1LayerParameter_LayerType_EUCLIDEAN_LOSS;
638
} else if (type == "flatten") {
639
return V1LayerParameter_LayerType_FLATTEN;
640
} else if (type == "hdf5_data") {
641
return V1LayerParameter_LayerType_HDF5_DATA;
642
} else if (type == "hdf5_output") {
643
return V1LayerParameter_LayerType_HDF5_OUTPUT;
644
} else if (type == "im2col") {
645
return V1LayerParameter_LayerType_IM2COL;
646
} else if (type == "images") {
647
return V1LayerParameter_LayerType_IMAGE_DATA;
648
} else if (type == "infogain_loss") {
649
return V1LayerParameter_LayerType_INFOGAIN_LOSS;
650
} else if (type == "innerproduct") {
651
return V1LayerParameter_LayerType_INNER_PRODUCT;
652
} else if (type == "lrn") {
653
return V1LayerParameter_LayerType_LRN;
654
} else if (type == "multinomial_logistic_loss") {
655
return V1LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS;
656
} else if (type == "pool") {
657
return V1LayerParameter_LayerType_POOLING;
658
} else if (type == "relu") {
659
return V1LayerParameter_LayerType_RELU;
660
} else if (type == "sigmoid") {
661
return V1LayerParameter_LayerType_SIGMOID;
662
} else if (type == "softmax") {
663
return V1LayerParameter_LayerType_SOFTMAX;
664
} else if (type == "softmax_loss") {
665
return V1LayerParameter_LayerType_SOFTMAX_LOSS;
666
} else if (type == "split") {
667
return V1LayerParameter_LayerType_SPLIT;
668
} else if (type == "tanh") {
669
return V1LayerParameter_LayerType_TANH;
670
} else if (type == "window_data") {
671
return V1LayerParameter_LayerType_WINDOW_DATA;
672
} else {
673
LOG(FATAL) << "Unknown layer name: " << type;
674
return V1LayerParameter_LayerType_NONE;
675
}
676
}
677
678
bool NetNeedsDataUpgrade(const NetParameter& net_param) {
679
for (int i = 0; i < net_param.layers_size(); ++i) {
680
if (net_param.layers(i).type() == V1LayerParameter_LayerType_DATA) {
681
DataParameter layer_param = net_param.layers(i).data_param();
682
if (layer_param.has_scale()) { return true; }
683
if (layer_param.has_mean_file()) { return true; }
684
if (layer_param.has_crop_size()) { return true; }
685
if (layer_param.has_mirror()) { return true; }
686
}
687
if (net_param.layers(i).type() == V1LayerParameter_LayerType_IMAGE_DATA) {
688
ImageDataParameter layer_param = net_param.layers(i).image_data_param();
689
if (layer_param.has_scale()) { return true; }
690
if (layer_param.has_mean_file()) { return true; }
691
if (layer_param.has_crop_size()) { return true; }
692
if (layer_param.has_mirror()) { return true; }
693
}
694
if (net_param.layers(i).type() == V1LayerParameter_LayerType_WINDOW_DATA) {
695
WindowDataParameter layer_param = net_param.layers(i).window_data_param();
696
if (layer_param.has_scale()) { return true; }
697
if (layer_param.has_mean_file()) { return true; }
698
if (layer_param.has_crop_size()) { return true; }
699
if (layer_param.has_mirror()) { return true; }
700
}
701
}
702
return false;
703
}
704
705
#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE, Name, param_name) \
706
do { \
707
if (net_param->layers(i).type() == V1LayerParameter_LayerType_##TYPE) { \
708
Name##Parameter* layer_param = \
709
net_param->mutable_layers(i)->mutable_##param_name##_param(); \
710
TransformationParameter* transform_param = \
711
net_param->mutable_layers(i)->mutable_transform_param(); \
712
if (layer_param->has_scale()) { \
713
transform_param->set_scale(layer_param->scale()); \
714
layer_param->clear_scale(); \
715
} \
716
if (layer_param->has_mean_file()) { \
717
transform_param->set_mean_file(layer_param->mean_file()); \
718
layer_param->clear_mean_file(); \
719
} \
720
if (layer_param->has_crop_size()) { \
721
transform_param->set_crop_size(layer_param->crop_size()); \
722
layer_param->clear_crop_size(); \
723
} \
724
if (layer_param->has_mirror()) { \
725
transform_param->set_mirror(layer_param->mirror()); \
726
layer_param->clear_mirror(); \
727
} \
728
} \
729
} while (0)
730
731
void UpgradeNetDataTransformation(NetParameter* net_param) {
732
for (int i = 0; i < net_param->layers_size(); ++i) {
733
CONVERT_LAYER_TRANSFORM_PARAM(DATA, Data, data);
734
CONVERT_LAYER_TRANSFORM_PARAM(IMAGE_DATA, ImageData, image_data);
735
CONVERT_LAYER_TRANSFORM_PARAM(WINDOW_DATA, WindowData, window_data);
736
}
737
}
738
739
bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
740
bool success = true;
741
if (NetNeedsV0ToV1Upgrade(*param)) {
742
// NetParameter was specified using the old style (V0LayerParameter); try to
743
// upgrade it.
744
LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
745
<< "V0LayerParameter: " << param_file;
746
NetParameter original_param(*param);
747
if (!UpgradeV0Net(original_param, param)) {
748
success = false;
749
LOG(ERROR) << "Warning: had one or more problems upgrading "
750
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
751
} else {
752
LOG(INFO) << "Successfully upgraded file specified using deprecated "
753
<< "V0LayerParameter";
754
}
755
LOG(ERROR) << "Note that future Caffe releases will not support "
756
<< "V0NetParameter; use ./build/tools/upgrade_net_proto_text for "
757
<< "prototxt and ./build/tools/upgrade_net_proto_binary for model "
758
<< "weights upgrade this and any other net protos to the new format.";
759
}
760
// NetParameter uses old style data transformation fields; try to upgrade it.
761
if (NetNeedsDataUpgrade(*param)) {
762
LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
763
<< "transformation parameters: " << param_file;
764
UpgradeNetDataTransformation(param);
765
LOG(INFO) << "Successfully upgraded file specified using deprecated "
766
<< "data transformation parameters.";
767
LOG(ERROR) << "Note that future Caffe releases will only support "
768
<< "transform_param messages for transformation fields.";
769
}
770
if (NetNeedsV1ToV2Upgrade(*param)) {
771
LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
772
<< "V1LayerParameter: " << param_file;
773
if (!UpgradeV1Net(param)) {
774
success = false;
775
LOG(ERROR) << "Warning: had one or more problems upgrading "
776
<< "V1LayerParameter (see above); continuing anyway.";
777
} else {
778
LOG(INFO) << "Successfully upgraded file specified using deprecated "
779
<< "V1LayerParameter";
780
}
781
}
782
// NetParameter uses old style batch norm layers; try to upgrade it.
783
if (NetNeedsBatchNormUpgrade(*param)) {
784
LOG(INFO) << "Attempting to upgrade batch norm layers using deprecated "
785
<< "params: " << param_file;
786
UpgradeNetBatchNorm(param);
787
LOG(INFO) << "Successfully upgraded batch norm layers using deprecated "
788
<< "params.";
789
}
790
return success;
791
}
792
793
bool UpgradeV1Net(NetParameter* net_param) {
794
// V1LayerParameter layers -> LayerParameter layer
795
CV_Assert(net_param != NULL);
796
bool is_fully_compatible = true;
797
if (net_param->layer_size() > 0) {
798
LOG(ERROR) << "Input NetParameter to be upgraded already specifies 'layer' "
799
<< "fields; these will be ignored for the upgrade.";
800
is_fully_compatible = false;
801
}
802
net_param->clear_layer();
803
for (int i = 0; i < net_param->layers_size(); ++i) {
804
if (!UpgradeV1LayerParameter(net_param->mutable_layers(i),
805
net_param->add_layer())) {
806
LOG(ERROR) << "Upgrade of input layer " << i << " failed.";
807
is_fully_compatible = false;
808
}
809
}
810
net_param->clear_layers();
811
return is_fully_compatible;
812
}
813
814
bool NetNeedsBatchNormUpgrade(const NetParameter& net_param) {
815
for (int i = 0; i < net_param.layer_size(); ++i) {
816
// Check if BatchNorm layers declare three parameters, as required by
817
// the previous BatchNorm layer definition.
818
if (net_param.layer(i).type() == "BatchNorm"
819
&& net_param.layer(i).param_size() == 3) {
820
return true;
821
}
822
}
823
return false;
824
}
825
826
void UpgradeNetBatchNorm(NetParameter* net_param) {
827
for (int i = 0; i < net_param->layer_size(); ++i) {
828
// Check if BatchNorm layers declare three parameters, as required by
829
// the previous BatchNorm layer definition.
830
if (net_param->layer(i).type() == "BatchNorm"
831
&& net_param->layer(i).param_size() == 3) {
832
net_param->mutable_layer(i)->clear_param();
833
}
834
}
835
}
836
837
bool UpgradeV1LayerParameter(V1LayerParameter* v1_layer_param_,
838
LayerParameter* layer_param) {
839
CV_Assert(v1_layer_param_ != NULL);
840
const V1LayerParameter& v1_layer_param = *v1_layer_param_;
841
layer_param->Clear();
842
bool is_fully_compatible = true;
843
for (int i = 0; i < v1_layer_param.bottom_size(); ++i) {
844
layer_param->add_bottom(v1_layer_param.bottom(i));
845
}
846
for (int i = 0; i < v1_layer_param.top_size(); ++i) {
847
layer_param->add_top(v1_layer_param.top(i));
848
}
849
if (v1_layer_param.has_name()) {
850
layer_param->set_name(v1_layer_param.name());
851
}
852
for (int i = 0; i < v1_layer_param.include_size(); ++i) {
853
layer_param->add_include()->CopyFrom(v1_layer_param.include(i));
854
}
855
for (int i = 0; i < v1_layer_param.exclude_size(); ++i) {
856
layer_param->add_exclude()->CopyFrom(v1_layer_param.exclude(i));
857
}
858
if (v1_layer_param.has_type()) {
859
layer_param->set_type(UpgradeV1LayerType(v1_layer_param.type()));
860
}
861
layer_param->mutable_blobs()->Swap(v1_layer_param_->mutable_blobs());
862
for (int i = 0; i < v1_layer_param.param_size(); ++i) {
863
while (layer_param->param_size() <= i) { layer_param->add_param(); }
864
layer_param->mutable_param(i)->set_name(v1_layer_param.param(i));
865
}
866
ParamSpec_DimCheckMode mode;
867
for (int i = 0; i < v1_layer_param.blob_share_mode_size(); ++i) {
868
while (layer_param->param_size() <= i) { layer_param->add_param(); }
869
switch (v1_layer_param.blob_share_mode(i)) {
870
case V1LayerParameter_DimCheckMode_STRICT:
871
mode = ParamSpec_DimCheckMode_STRICT;
872
break;
873
case V1LayerParameter_DimCheckMode_PERMISSIVE:
874
mode = ParamSpec_DimCheckMode_PERMISSIVE;
875
break;
876
default:
877
LOG(FATAL) << "Unknown blob_share_mode: "
878
<< v1_layer_param.blob_share_mode(i);
879
break;
880
}
881
layer_param->mutable_param(i)->set_share_mode(mode);
882
}
883
for (int i = 0; i < v1_layer_param.blobs_lr_size(); ++i) {
884
while (layer_param->param_size() <= i) { layer_param->add_param(); }
885
layer_param->mutable_param(i)->set_lr_mult(v1_layer_param.blobs_lr(i));
886
}
887
for (int i = 0; i < v1_layer_param.weight_decay_size(); ++i) {
888
while (layer_param->param_size() <= i) { layer_param->add_param(); }
889
layer_param->mutable_param(i)->set_decay_mult(
890
v1_layer_param.weight_decay(i));
891
}
892
for (int i = 0; i < v1_layer_param.loss_weight_size(); ++i) {
893
layer_param->add_loss_weight(v1_layer_param.loss_weight(i));
894
}
895
if (v1_layer_param.has_accuracy_param()) {
896
layer_param->mutable_accuracy_param()->CopyFrom(
897
v1_layer_param.accuracy_param());
898
}
899
if (v1_layer_param.has_argmax_param()) {
900
layer_param->mutable_argmax_param()->CopyFrom(
901
v1_layer_param.argmax_param());
902
}
903
if (v1_layer_param.has_concat_param()) {
904
layer_param->mutable_concat_param()->CopyFrom(
905
v1_layer_param.concat_param());
906
}
907
if (v1_layer_param.has_contrastive_loss_param()) {
908
layer_param->mutable_contrastive_loss_param()->CopyFrom(
909
v1_layer_param.contrastive_loss_param());
910
}
911
if (v1_layer_param.has_convolution_param()) {
912
layer_param->mutable_convolution_param()->CopyFrom(
913
v1_layer_param.convolution_param());
914
}
915
if (v1_layer_param.has_data_param()) {
916
layer_param->mutable_data_param()->CopyFrom(
917
v1_layer_param.data_param());
918
}
919
if (v1_layer_param.has_dropout_param()) {
920
layer_param->mutable_dropout_param()->CopyFrom(
921
v1_layer_param.dropout_param());
922
}
923
if (v1_layer_param.has_dummy_data_param()) {
924
layer_param->mutable_dummy_data_param()->CopyFrom(
925
v1_layer_param.dummy_data_param());
926
}
927
if (v1_layer_param.has_eltwise_param()) {
928
layer_param->mutable_eltwise_param()->CopyFrom(
929
v1_layer_param.eltwise_param());
930
}
931
if (v1_layer_param.has_exp_param()) {
932
layer_param->mutable_exp_param()->CopyFrom(
933
v1_layer_param.exp_param());
934
}
935
if (v1_layer_param.has_hdf5_data_param()) {
936
layer_param->mutable_hdf5_data_param()->CopyFrom(
937
v1_layer_param.hdf5_data_param());
938
}
939
if (v1_layer_param.has_hdf5_output_param()) {
940
layer_param->mutable_hdf5_output_param()->CopyFrom(
941
v1_layer_param.hdf5_output_param());
942
}
943
if (v1_layer_param.has_hinge_loss_param()) {
944
layer_param->mutable_hinge_loss_param()->CopyFrom(
945
v1_layer_param.hinge_loss_param());
946
}
947
if (v1_layer_param.has_image_data_param()) {
948
layer_param->mutable_image_data_param()->CopyFrom(
949
v1_layer_param.image_data_param());
950
}
951
if (v1_layer_param.has_infogain_loss_param()) {
952
layer_param->mutable_infogain_loss_param()->CopyFrom(
953
v1_layer_param.infogain_loss_param());
954
}
955
if (v1_layer_param.has_inner_product_param()) {
956
layer_param->mutable_inner_product_param()->CopyFrom(
957
v1_layer_param.inner_product_param());
958
}
959
if (v1_layer_param.has_lrn_param()) {
960
layer_param->mutable_lrn_param()->CopyFrom(
961
v1_layer_param.lrn_param());
962
}
963
if (v1_layer_param.has_memory_data_param()) {
964
layer_param->mutable_memory_data_param()->CopyFrom(
965
v1_layer_param.memory_data_param());
966
}
967
if (v1_layer_param.has_mvn_param()) {
968
layer_param->mutable_mvn_param()->CopyFrom(
969
v1_layer_param.mvn_param());
970
}
971
if (v1_layer_param.has_pooling_param()) {
972
layer_param->mutable_pooling_param()->CopyFrom(
973
v1_layer_param.pooling_param());
974
}
975
if (v1_layer_param.has_power_param()) {
976
layer_param->mutable_power_param()->CopyFrom(
977
v1_layer_param.power_param());
978
}
979
if (v1_layer_param.has_relu_param()) {
980
layer_param->mutable_relu_param()->CopyFrom(
981
v1_layer_param.relu_param());
982
}
983
if (v1_layer_param.has_sigmoid_param()) {
984
layer_param->mutable_sigmoid_param()->CopyFrom(
985
v1_layer_param.sigmoid_param());
986
}
987
if (v1_layer_param.has_softmax_param()) {
988
layer_param->mutable_softmax_param()->CopyFrom(
989
v1_layer_param.softmax_param());
990
}
991
if (v1_layer_param.has_slice_param()) {
992
layer_param->mutable_slice_param()->CopyFrom(
993
v1_layer_param.slice_param());
994
}
995
if (v1_layer_param.has_tanh_param()) {
996
layer_param->mutable_tanh_param()->CopyFrom(
997
v1_layer_param.tanh_param());
998
}
999
if (v1_layer_param.has_threshold_param()) {
1000
layer_param->mutable_threshold_param()->CopyFrom(
1001
v1_layer_param.threshold_param());
1002
}
1003
if (v1_layer_param.has_window_data_param()) {
1004
layer_param->mutable_window_data_param()->CopyFrom(
1005
v1_layer_param.window_data_param());
1006
}
1007
if (v1_layer_param.has_transform_param()) {
1008
layer_param->mutable_transform_param()->CopyFrom(
1009
v1_layer_param.transform_param());
1010
}
1011
if (v1_layer_param.has_loss_param()) {
1012
layer_param->mutable_loss_param()->CopyFrom(
1013
v1_layer_param.loss_param());
1014
}
1015
if (v1_layer_param.has_layer()) {
1016
LOG(ERROR) << "Input NetParameter has V0 layer -- ignoring.";
1017
is_fully_compatible = false;
1018
}
1019
return is_fully_compatible;
1020
}
1021
1022
const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type) {
1023
switch (type) {
1024
case V1LayerParameter_LayerType_NONE:
1025
return "";
1026
case V1LayerParameter_LayerType_ABSVAL:
1027
return "AbsVal";
1028
case V1LayerParameter_LayerType_ACCURACY:
1029
return "Accuracy";
1030
case V1LayerParameter_LayerType_ARGMAX:
1031
return "ArgMax";
1032
case V1LayerParameter_LayerType_BNLL:
1033
return "BNLL";
1034
case V1LayerParameter_LayerType_CONCAT:
1035
return "Concat";
1036
case V1LayerParameter_LayerType_CONTRASTIVE_LOSS:
1037
return "ContrastiveLoss";
1038
case V1LayerParameter_LayerType_CONVOLUTION:
1039
return "Convolution";
1040
case V1LayerParameter_LayerType_DECONVOLUTION:
1041
return "Deconvolution";
1042
case V1LayerParameter_LayerType_DATA:
1043
return "Data";
1044
case V1LayerParameter_LayerType_DROPOUT:
1045
return "Dropout";
1046
case V1LayerParameter_LayerType_DUMMY_DATA:
1047
return "DummyData";
1048
case V1LayerParameter_LayerType_EUCLIDEAN_LOSS:
1049
return "EuclideanLoss";
1050
case V1LayerParameter_LayerType_ELTWISE:
1051
return "Eltwise";
1052
case V1LayerParameter_LayerType_EXP:
1053
return "Exp";
1054
case V1LayerParameter_LayerType_FLATTEN:
1055
return "Flatten";
1056
case V1LayerParameter_LayerType_HDF5_DATA:
1057
return "HDF5Data";
1058
case V1LayerParameter_LayerType_HDF5_OUTPUT:
1059
return "HDF5Output";
1060
case V1LayerParameter_LayerType_HINGE_LOSS:
1061
return "HingeLoss";
1062
case V1LayerParameter_LayerType_IM2COL:
1063
return "Im2col";
1064
case V1LayerParameter_LayerType_IMAGE_DATA:
1065
return "ImageData";
1066
case V1LayerParameter_LayerType_INFOGAIN_LOSS:
1067
return "InfogainLoss";
1068
case V1LayerParameter_LayerType_INNER_PRODUCT:
1069
return "InnerProduct";
1070
case V1LayerParameter_LayerType_LRN:
1071
return "LRN";
1072
case V1LayerParameter_LayerType_MEMORY_DATA:
1073
return "MemoryData";
1074
case V1LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS:
1075
return "MultinomialLogisticLoss";
1076
case V1LayerParameter_LayerType_MVN:
1077
return "MVN";
1078
case V1LayerParameter_LayerType_POOLING:
1079
return "Pooling";
1080
case V1LayerParameter_LayerType_POWER:
1081
return "Power";
1082
case V1LayerParameter_LayerType_RELU:
1083
return "ReLU";
1084
case V1LayerParameter_LayerType_SIGMOID:
1085
return "Sigmoid";
1086
case V1LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS:
1087
return "SigmoidCrossEntropyLoss";
1088
case V1LayerParameter_LayerType_SILENCE:
1089
return "Silence";
1090
case V1LayerParameter_LayerType_SOFTMAX:
1091
return "Softmax";
1092
case V1LayerParameter_LayerType_SOFTMAX_LOSS:
1093
return "SoftmaxWithLoss";
1094
case V1LayerParameter_LayerType_SPLIT:
1095
return "Split";
1096
case V1LayerParameter_LayerType_SLICE:
1097
return "Slice";
1098
case V1LayerParameter_LayerType_TANH:
1099
return "TanH";
1100
case V1LayerParameter_LayerType_WINDOW_DATA:
1101
return "WindowData";
1102
case V1LayerParameter_LayerType_THRESHOLD:
1103
return "Threshold";
1104
default:
1105
LOG(FATAL) << "Unknown V1LayerParameter layer type: " << type;
1106
return "";
1107
}
1108
}
1109
1110
const int kProtoReadBytesLimit = INT_MAX; // Max size of 2 GB minus 1 byte.
1111
1112
bool ReadProtoFromBinary(ZeroCopyInputStream* input, Message *proto) {
1113
CodedInputStream coded_input(input);
1114
coded_input.SetTotalBytesLimit(kProtoReadBytesLimit, 536870912);
1115
1116
return proto->ParseFromCodedStream(&coded_input);
1117
}
1118
1119
bool ReadProtoFromTextFile(const char* filename, Message* proto) {
1120
std::ifstream fs(filename, std::ifstream::in);
1121
CHECK(fs.is_open()) << "Can't open \"" << filename << "\"";
1122
IstreamInputStream input(&fs);
1123
#ifndef OPENCV_DNN_EXTERNAL_PROTOBUF
1124
return google::protobuf::TextFormat::Parser(true).Parse(&input, proto);
1125
#else
1126
return google::protobuf::TextFormat::Parser().Parse(&input, proto);
1127
#endif
1128
}
1129
1130
bool ReadProtoFromBinaryFile(const char* filename, Message* proto) {
1131
std::ifstream fs(filename, std::ifstream::in | std::ifstream::binary);
1132
CHECK(fs.is_open()) << "Can't open \"" << filename << "\"";
1133
IstreamInputStream raw_input(&fs);
1134
1135
return ReadProtoFromBinary(&raw_input, proto);
1136
}
1137
1138
bool ReadProtoFromTextBuffer(const char* data, size_t len, Message* proto) {
1139
ArrayInputStream input(data, len);
1140
return google::protobuf::TextFormat::Parse(&input, proto);
1141
}
1142
1143
1144
bool ReadProtoFromBinaryBuffer(const char* data, size_t len, Message* proto) {
1145
ArrayInputStream raw_input(data, len);
1146
return ReadProtoFromBinary(&raw_input, proto);
1147
}
1148
1149
void ReadNetParamsFromTextFileOrDie(const char* param_file,
1150
NetParameter* param) {
1151
CHECK(ReadProtoFromTextFile(param_file, param))
1152
<< "Failed to parse NetParameter file: " << param_file;
1153
UpgradeNetAsNeeded(param_file, param);
1154
}
1155
1156
void ReadNetParamsFromTextBufferOrDie(const char* data, size_t len,
1157
NetParameter* param) {
1158
CHECK(ReadProtoFromTextBuffer(data, len, param))
1159
<< "Failed to parse NetParameter buffer";
1160
UpgradeNetAsNeeded("memory buffer", param);
1161
}
1162
1163
void ReadNetParamsFromBinaryFileOrDie(const char* param_file,
1164
NetParameter* param) {
1165
CHECK(ReadProtoFromBinaryFile(param_file, param))
1166
<< "Failed to parse NetParameter file: " << param_file;
1167
UpgradeNetAsNeeded(param_file, param);
1168
}
1169
1170
void ReadNetParamsFromBinaryBufferOrDie(const char* data, size_t len,
1171
NetParameter* param) {
1172
CHECK(ReadProtoFromBinaryBuffer(data, len, param))
1173
<< "Failed to parse NetParameter buffer";
1174
UpgradeNetAsNeeded("memory buffer", param);
1175
}
1176
1177
}
1178
}
1179
#endif
1180
1181