Skip to content

Commit

Permalink
Merge pull request #6461 from open-cv/patch_1
Browse files Browse the repository at this point in the history
fix typos and add passing by reference
  • Loading branch information
Noiredd authored Aug 21, 2018
2 parents 8e97b8a + 828dd10 commit 128797e
Show file tree
Hide file tree
Showing 10 changed files with 20 additions and 20 deletions.
2 changes: 1 addition & 1 deletion cmake/Modules/FindMKL.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# This module defines the following variables:
#
# MKL_FOUND : True mkl is found
# MKL_INCLUDE_DIR : unclude directory
# MKL_INCLUDE_DIR : include directory
# MKL_LIBRARIES : the libraries to link against.


Expand Down
6 changes: 3 additions & 3 deletions include/caffe/net.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,9 @@ class Net {
* another Net.
*/
void CopyTrainedLayersFrom(const NetParameter& param);
void CopyTrainedLayersFrom(const string trained_filename);
void CopyTrainedLayersFromBinaryProto(const string trained_filename);
void CopyTrainedLayersFromHDF5(const string trained_filename);
void CopyTrainedLayersFrom(const string& trained_filename);
void CopyTrainedLayersFromBinaryProto(const string& trained_filename);
void CopyTrainedLayersFromHDF5(const string& trained_filename);
/// @brief Writes the net to a proto.
void ToProto(NetParameter* param, bool write_diff = false) const;
/// @brief Writes the net to an HDF5 file.
Expand Down
4 changes: 2 additions & 2 deletions include/caffe/solver.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class Solver {
// The main entry of the solver function. In default, iter will be zero. Pass
// in a non-zero iter number to resume training for a pre-trained net.
virtual void Solve(const char* resume_file = NULL);
inline void Solve(const string resume_file) { Solve(resume_file.c_str()); }
inline void Solve(const string& resume_file) { Solve(resume_file.c_str()); }
void Step(int iters);
// The Restore method simply dispatches to one of the
// RestoreSolverStateFrom___ protected methods. You should implement these
Expand Down Expand Up @@ -98,7 +98,7 @@ class Solver {
virtual void ApplyUpdate() = 0;

protected:
string SnapshotFilename(const string extension);
string SnapshotFilename(const string& extension);
string SnapshotToBinaryProto();
string SnapshotToHDF5();
// The test routine
Expand Down
2 changes: 1 addition & 1 deletion include/caffe/util/signal_handler.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ namespace caffe {

class SignalHandler {
public:
// Contructor. Specify what action to take when a signal is received.
// Constructor. Specify what action to take when a signal is received.
SignalHandler(SolverAction::Enum SIGINT_action,
SolverAction::Enum SIGHUP_action);
~SignalHandler();
Expand Down
2 changes: 1 addition & 1 deletion python/caffe/_caffe.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ BOOST_PYTHON_MODULE(_caffe) {
.def("reshape", &Net<Dtype>::Reshape)
.def("clear_param_diffs", &Net<Dtype>::ClearParamDiffs)
// The cast is to select a particular overload.
.def("copy_from", static_cast<void (Net<Dtype>::*)(const string)>(
.def("copy_from", static_cast<void (Net<Dtype>::*)(const string&)>(
&Net<Dtype>::CopyTrainedLayersFrom))
.def("share_with", &Net<Dtype>::ShareTrainedLayersWith)
.add_property("_blob_loss_weights", bp::make_function(
Expand Down
2 changes: 1 addition & 1 deletion src/caffe/layers/pooling_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ void PoolingLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const int top_count = top[0]->count();
// We'll output the mask to top[1] if it's of size >1.
const bool use_top_mask = top.size() > 1;
int* mask = NULL; // suppress warnings about uninitalized variables
int* mask = NULL; // suppress warnings about uninitialized variables
Dtype* top_mask = NULL;
// Different pooling methods. We explicitly do the switch outside the for
// loop to save time, although this results in more code.
Expand Down
8 changes: 4 additions & 4 deletions src/caffe/net.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ void Net<Dtype>::Init(const NetParameter& in_param) {
// loss. We can skip backward computation for blobs that don't contribute
// to the loss.
// Also checks if all bottom blobs don't need backward computation (possible
// because the skip_propagate_down param) and so we can skip bacward
// because the skip_propagate_down param) and so we can skip backward
// computation for the entire layer
set<string> blobs_under_loss;
set<string> blobs_skip_backp;
Expand Down Expand Up @@ -770,7 +770,7 @@ void Net<Dtype>::CopyTrainedLayersFrom(const NetParameter& param) {
}

template <typename Dtype>
void Net<Dtype>::CopyTrainedLayersFrom(const string trained_filename) {
void Net<Dtype>::CopyTrainedLayersFrom(const string& trained_filename) {
if (H5Fis_hdf5(trained_filename.c_str())) {
CopyTrainedLayersFromHDF5(trained_filename);
} else {
Expand All @@ -780,14 +780,14 @@ void Net<Dtype>::CopyTrainedLayersFrom(const string trained_filename) {

template <typename Dtype>
void Net<Dtype>::CopyTrainedLayersFromBinaryProto(
const string trained_filename) {
const string& trained_filename) {
NetParameter param;
ReadNetParamsFromBinaryFileOrDie(trained_filename, &param);
CopyTrainedLayersFrom(param);
}

template <typename Dtype>
void Net<Dtype>::CopyTrainedLayersFromHDF5(const string trained_filename) {
void Net<Dtype>::CopyTrainedLayersFromHDF5(const string& trained_filename) {
#ifdef USE_HDF5
hid_t file_hid = H5Fopen(trained_filename.c_str(), H5F_ACC_RDONLY,
H5P_DEFAULT);
Expand Down
6 changes: 3 additions & 3 deletions src/caffe/proto/caffe.proto
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ message SolverParameter {

optional int32 snapshot = 14 [default = 0]; // The snapshot interval
// The prefix for the snapshot.
// If not set then is replaced by prototxt file path without extention.
// If not set then is replaced by prototxt file path without extension.
// If is set to directory then is augmented by prototxt file name
// without extention.
optional string snapshot_prefix = 15;
Expand Down Expand Up @@ -248,8 +248,8 @@ message SolverParameter {

// Path to caffemodel file(s) with pretrained weights to initialize finetuning.
// Tha same as command line --weights parameter for caffe train command.
// If command line --weights parameter if specified, it has higher priority
// and owerwrites this one(s).
// If command line --weights parameter is specified, it has higher priority
// and overwrites this one(s).
// If --snapshot command line parameter is specified, this one(s) are ignored.
// If several model files are expected, they can be listed in a one
// weights parameter separated by ',' (like in a command string) or
Expand Down
6 changes: 3 additions & 3 deletions src/caffe/solver.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ template <typename Dtype>
void Solver<Dtype>::InitTrainNet() {
const int num_train_nets = param_.has_net() + param_.has_net_param() +
param_.has_train_net() + param_.has_train_net_param();
const string& field_names = "net, net_param, train_net, train_net_param";
const string field_names = "net, net_param, train_net, train_net_param";
CHECK_GE(num_train_nets, 1) << "SolverParameter must specify a train net "
<< "using one of these fields: " << field_names;
CHECK_LE(num_train_nets, 1) << "SolverParameter must not contain more than "
Expand Down Expand Up @@ -447,13 +447,13 @@ void Solver<Dtype>::CheckSnapshotWritePermissions() {
} else {
LOG(FATAL) << "Cannot write to snapshot prefix '"
<< param_.snapshot_prefix() << "'. Make sure "
<< "that the directory exists and is writeable.";
<< "that the directory exists and is writable.";
}
}
}

template <typename Dtype>
string Solver<Dtype>::SnapshotFilename(const string extension) {
string Solver<Dtype>::SnapshotFilename(const string& extension) {
return param_.snapshot_prefix() + "_iter_" + caffe::format_int(iter_)
+ extension;
}
Expand Down
2 changes: 1 addition & 1 deletion src/caffe/util/signal_handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ namespace {
void UnhookHandler() {
if (already_hooked_up) {
struct sigaction sa;
// Setup the sighub handler
// Setup the sighup handler
sa.sa_handler = SIG_DFL;
// Restart the system call, if at all possible
sa.sa_flags = SA_RESTART;
Expand Down

0 comments on commit 128797e

Please sign in to comment.