24 #ifndef DOXYGEN_SHOULD_SKIP_THIS 25 struct GRADIENT_THREAD_PARAM
46 set_features(features);
61 void CInferenceMethod::init()
80 int32_t num_importance_samples,
float64_t ridge_size)
106 memcpy(scaled_kernel.
matrix, m_ktrtr.matrix,
107 sizeof(
float64_t)*m_ktrtr.num_rows*m_ktrtr.num_cols);
108 for (
index_t i=0; i<m_ktrtr.num_rows*m_ktrtr.num_cols; ++i)
113 scaled_kernel(i,i)+=ridge_size;
116 m_mean->get_mean_vector(m_features), scaled_kernel);
126 ASSERT(log_likelihood.
vlen==num_importance_samples);
127 ASSERT(log_likelihood.
vlen==log_pdf_prior.vlen);
131 sum[i]=log_likelihood[i]+log_pdf_prior[i]-log_pdf_post_approx[i];
143 if (update_parameter_hash())
162 for (
index_t i=0; i<num_deriv; i++)
166 GRADIENT_THREAD_PARAM thread_params;
168 thread_params.inf=
this;
169 thread_params.obj=node->data;
170 thread_params.param=node->key;
171 thread_params.grad=result;
172 thread_params.lock=&lock;
174 get_derivative_helper((
void*) &thread_params);
180 pthread_t* threads=SG_MALLOC(pthread_t, num_deriv);
181 GRADIENT_THREAD_PARAM* thread_params=SG_MALLOC(GRADIENT_THREAD_PARAM,
184 for (
index_t t=0; t<num_deriv; t++)
188 thread_params[t].inf=
this;
189 thread_params[t].obj=node->data;
190 thread_params[t].param=node->key;
191 thread_params[t].grad=result;
192 thread_params[t].lock=&lock;
195 (
void*)&thread_params[t]);
198 for (
index_t t=0; t<num_deriv; t++)
199 pthread_join(threads[t], NULL);
201 SG_FREE(thread_params);
211 GRADIENT_THREAD_PARAM* thread_param=(GRADIENT_THREAD_PARAM*)p;
217 CLock* lock=thread_param->lock;
219 REQUIRE(param,
"Parameter should not be NULL\n");
220 REQUIRE(obj,
"Object of the parameter should not be NULL\n");
239 else if (obj==inf->
m_mean)
246 SG_SERROR(
"Can't compute derivative of negative log marginal " 251 grad->
add(param, gradient);
260 update_train_kernel();
265 REQUIRE(m_features,
"Training features should not be NULL\n")
266 REQUIRE(m_features->get_num_vectors(),
267 "Number of training features must be greater than zero\n")
268 REQUIRE(m_labels,
"Labels should not be NULL\n")
269 REQUIRE(m_labels->get_num_labels(),
270 "Number of labels must be greater than zero\n")
271 REQUIRE(m_labels->get_num_labels()==m_features->get_num_vectors(),
272 "Number of training vectors must match number of labels, which is " 273 "%d, but number of training vectors is %d\n",
274 m_labels->get_num_labels(), m_features->get_num_vectors())
275 REQUIRE(m_kernel,
"Kernel should not be NULL\n")
276 REQUIRE(m_mean,
"Mean function should not be NULL\n")
281 m_kernel->init(m_features, m_features);
282 m_ktrtr=m_kernel->get_kernel_matrix();
virtual const char * get_name() const =0
The Inference Method base class.
The class Labels models labels, i.e. class assignments of objects.
virtual ~CInferenceMethod()
virtual SGVector< float64_t > log_pdf_multiple(SGMatrix< float64_t > samples) const
CMapNode< K, T > * get_node_ptr(int32_t index)
virtual void check_members() const
An abstract class of the mean function.
virtual SGVector< float64_t > get_derivative_wrt_likelihood_model(const TParameter *param)=0
virtual CMap< TParameter *, SGVector< float64_t > > * get_negative_log_marginal_likelihood_derivatives(CMap< TParameter *, CSGObject *> *parameters)
Class SGObject is the base class of all shogun objects.
int32_t get_num_elements() const
Class Lock used for synchronization in concurrent programs.
virtual SGMatrix< float64_t > sample(int32_t num_samples, SGMatrix< float64_t > pre_samples=SGMatrix< float64_t >()) const
the class CMap, a map based on the hash-table. w: http://en.wikipedia.org/wiki/Hash_table ...
virtual SGVector< float64_t > get_derivative_wrt_inference_method(const TParameter *param)=0
virtual void update_train_kernel()
virtual SGVector< float64_t > get_derivative_wrt_kernel(const TParameter *param)=0
float64_t get_marginal_likelihood_estimate(int32_t num_importance_samples=1, float64_t ridge_size=1e-15)
all of classes and functions are contained in the shogun namespace
Dense version of the well-known Gaussian probability distribution, defined as
virtual SGVector< float64_t > get_derivative_wrt_mean(const TParameter *param)=0
int32_t add(const K &key, const T &data)
The class Features is the base class of all feature objects.
void(* update)(float *foo, float bar)
static T log_mean_exp(SGVector< T > values)
The Likelihood model base class.
CLikelihoodModel * m_model
static void * get_derivative_helper(void *p)