1 #ifndef STAN_MODEL_TEST_GRADIENTS_HPP 2 #define STAN_MODEL_TEST_GRADIENTS_HPP 35 template <
bool propto,
bool jacobian_adjust_transform,
class M>
37 std::vector<double>& params_r,
38 std::vector<int>& params_i,
42 std::stringstream msg;
43 std::vector<double> grad;
45 = log_prob_grad<propto, jacobian_adjust_transform>(model,
50 if (msg.str().length() > 0)
53 std::vector<double> grad_fd;
60 if (msg.str().length() > 0)
66 msg <<
" Log probability=" << lp;
73 msg << std::setw(10) <<
"param idx" 74 << std::setw(16) <<
"value" 75 << std::setw(16) <<
"model" 76 << std::setw(16) <<
"finite diff" 77 << std::setw(16) <<
"error";
81 for (
size_t k = 0; k < params_r.size(); k++) {
83 msg << std::setw(10) << k
84 << std::setw(16) << params_r[k]
85 << std::setw(16) << grad[k]
86 << std::setw(16) << grad_fd[k]
87 << std::setw(16) << (grad[k] - grad_fd[k]);
89 if (std::fabs(grad[k] - grad_fd[k]) > error)
Probability, optimization and sampling library.
int test_gradients(const M &model, std::vector< double > ¶ms_r, std::vector< int > ¶ms_i, double epsilon, double error, stan::interface_callbacks::writer::base_writer &writer)
Test the log_prob_grad() function's ability to produce accurate gradients using finite differences...
void finite_diff_grad(const M &model, std::vector< double > ¶ms_r, std::vector< int > ¶ms_i, std::vector< double > &grad, double epsilon=1e-6, std::ostream *msgs=0)
Compute the gradient using finite differences for the specified parameters, writing the result into t...
base_writer is an abstract base class defining the interface for Stan writer callbacks.