helloworld.cpp
Ceres solver ์ฌ์ฉ๋ฒ ์ ๋ฆฌ
์ด ๊ธ์ ์ํํ๋์ helloceres git ์ฝ๋๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์์ฑ๋์์ต๋๋ค
Ceres solver๋ฅผ ์ฌ์ฉํ๊ฒ ๋ ์ผ์ด ์์ ๊ฒ ๊ฐ์์ ๊ณต๋ถํ๋ฉด์ ์ ๋ฆฌํ๋ฉด์ ํฌ์คํ
์ค์ด๋ค
Ceres๋ ์ด๋ค ๋ฌธ์ ๋ฅผ ํธ๋๊ฐ?
๊ธฐ๋ณธ์ ์ผ๋ก ๊ตฌ์ ์กฐ๊ฑด์ด ์๋ non-linear least square ๋ฌธ์ ๋ฅผ ํ ์ ์๋ค
ceres solver์ ์ฌ์ฉ๋๋ ๊ธฐ๋ณธ ์์ ๋ผ๋๋ ์๋ ์ฌ์ง๊ณผ ๊ฐ๋ค
Example code
// A simple example of using the Ceres minimizer.
//
// Minimize 0.5 (10 - x)^2 using jacobian matrix computed using
// automatic differentiation.
#include "ceres/ceres.h"
#include "glog/logging.h"
using ceres::AutoDiffCostFunction;
using ceres::CostFunction;
using ceres::Problem;
using ceres::Solver;
using ceres::Solve;
// A templated cost functor that implements the residual r = 10 -
// x. The method operator() is templated so that we can then use an
// automatic differentiation wrapper around it to generate its
// derivatives.
struct CostFunctor {
template<typename T>
bool operator()(const T *const x, T *residual) const {
residual[0] = 10.0 - x[0];
return true;
}
};
int main(int argc, char** argv) {
google::InitGoogleLogging(argv[0]);
// The variable to solve for with its initial value.
double initial_x = 5.0;
double x = initial_x;
// Build the problem.
Problem problem;
// Set up the only cost function (also known as residual). This uses
// auto-differentiation to obtain the derivative (jacobian).
CostFunction* cost_function =
new AutoDiffCostFunction<CostFunctor, 1, 1>(new CostFunctor);
problem.AddResidualBlock(cost_function, nullptr, &x);
// Run the solver!
Solver::Options options;
options.linear_solver_type = ceres::DENSE_QR;
options.minimizer_progress_to_stdout = true;
Solver::Summary summary;
Solve(options, &problem, &summary);
std::cout << summary.BriefReport() << "\n";
std::cout << "x : " << initial_x
<< " -> " << x << "\n";
return 0;
}
Break the code
CostFunctor
CostFunctor
๊ตฌ์กฐ์ฒด ์์ residual์ด ์ ์ธ๋์ด ์๋ค
struct CostFunctor {
template<typename T>
bool operator()(const T *const x, T *residual) const {
residual[0] = 10.0 - x[0];
return true;
}
};
์ด๋ผ๊ณ ์ ์ธํ๋ฉด ์ฐ๋ฆฌ๊ฐ ์ต์ํ ํด์ผํ๋ ํจ์๋
\(\frac{1}{2} *(10 \, - \, x)^2\) ์ธ ๊ฒ์ ์ ์ ์๋ค
CostFunction/AutoDiffCostFunction
CostFunction* cost_function =
new AutoDiffCostFunction<CostFunctor, 1, 1>(new CostFunctor);
problem.AddResidualBlock(cost_function, nullptr, &x);
์์ ์์์ \(f\)์ ํด๋นํ๋ ๋ถ๋ถ์ด๋ค
cost function์ ์ด๋ค ํจ์๋ก ๊ฐ์ ธ๊ฐ ์ง ์ ์ธํด์ค๋ค
์ด๋ฒ ์์ ๋ก๋ \(f=(10-x)^2\) ์ด ๋๊ฒ ๋ค
AutoDiffCostFunction
์ด๋ ๊ฑฐ ๋ณด๋ ์๋์ผ๋ก ๋ฏธ๋ถํด์ฃผ๋? ๊ทธ๋ฐ ํจ์์ธ ๊ฑฐ ๊ฐ์๋ฐ CostFunctor
๋ฅผ ์ธ์๋ก
๊ฐ์ ธ๊ฐ๋ ๊ฑธ ๋ด์ ์ ๊ฑธ ์ฌ์ฉํ๊ฒ ๋ค๊น์ง๋ ์ดํดํ๋๋ฐ, ๊ทธ ๋ค์ ๋ค์ด๊ฐ๋ ์ซ์ ์ธ์์์ธ <Costfunctor, 1, 1> ๋ถ๋ถ์ ์ข ๋ ์คํฐ๋๊ฐ ํ์ํ๋ค
AddResidualBlock
problem.AddResidualBlock(cost_function, nullptr, &x);
์ธ์๊ฐ ๋์ด๊ฐ๋ ํํ๋ก ๋ณด์ ์์์ ์ ์ํ๋ cost_function \(f\) ๋ฅผ ์ต์ํ ํ๋ ํ๋ผ๋ฏธํฐ \(x\)๋ฅผ ์ฐพ์์ผ ํ๋๊น ๊ทธ๋ฐ residual block ํ๋๋ฅผ ์ถ๊ฐํ๊ฒ ๋ค๋ผ๋ ๋ป ์ธ ๊ฑฐ ๊ฐ๋ค!
Solver
solver๋ฅผ ์ ์ธํ๊ธฐ ์ ์ options
๋ฅผ ๋จผ์ ์ ์ธํด์ฃผ๊ณ , solver์ option๋ค์ ๋ฃ์ด์ค๋ค
Solver::Options options;
options.linear_solver_type = ceres::DENSE_QR;
options.minimizer_progress_to_stdout = true;
Solver::Summary summary;
Solve(options, &problem, &summary);
๋ ์ต์ํ ํ๋ ๊ณผ์ ์ ์ถ๋ ฅํ ์ง ๋ง ์ง ๊ฒฐ์ ํ๋ ํ๋ผ๋ฏธํฐ๋ก ์ถ์ ๋๋ค.
Solver()
ํจ์๋ฅผ ํตํด์ ๋ฌธ์ ๋ฅผ ํ์ด์ค๋ค
- ์ฐ๋ฆฌ๊ฐ residual block ํ๋๋ฅผ ๋ฃ์๋ problem
- ๋ฐฉ๊ธ ์ ์ํ solver์ option
- ์ต์ํ ๊ณผ์ ์ ์์ฝํ summary
์ด ์ธ ์ธ์๋ฅผ ๋๊ฒจ์ฃผ๊ณ ๋ฌธ์ ๋ฅผ ํ ์ ์๊ฒ ๋๋ค
๊ฒฐ๊ณผ
\(f=(10-x)^2\) ์ด์์ผ๋ฏ๋ก ์ฐ๋ฆฌ๋ ์์ผ๋ก๋ \(x=10\)์ ๊ตฌํ ์ ์๊ณ ,
ceres solver ๊ฒฐ๊ณผ๋ ๋์ผํจ์ ํ์ธํ๋ค
iter cost cost_change |gradient| |step| tr_ratio tr_radius ls_iter iter_time total_time
0 1.250000e+01 0.00e+00 5.00e+00 0.00e+00 0.00e+00 1.00e+04 0 4.98e-05 1.13e-04
1 1.249750e-07 1.25e+01 5.00e-04 5.00e+00 1.00e+00 3.00e+04 1 7.61e-05 3.02e-04
2 1.388518e-16 1.25e-07 1.67e-08 5.00e-04 1.00e+00 9.00e+04 1 1.79e-05 3.43e-04
Ceres Solver Report: Iterations: 3, Initial cost: 1.250000e+01, Final cost: 1.388518e-16, Termination: CONVERGENCE
x : 5 -> 10
์ต์์์น๋ฒ์ผ๋ก ๋ฌธ์ ๋ฅผ ํ๊ธฐ ๋๋ฌธ์ iteration์ด ์ฌ๋ฌ๋ฒ ๋ค์ด๊ฐ๋๋ฐ, ์ด๋ฒ ์์ ์์๋ 2๋ฒ ๋ง์ \(x=10\)์ผ๋ก ์๋ ดํ์๋ค
Leave a comment