Skip to content

Commit ae5a431

Browse files
author
Gil
committed
sg_add refactor on GPL branch
* Replaced the SG_ADD macro calls with the new ParameterProperties enum class * this was done in the same way as in PR shogun-toolbox/shogun#4417
1 parent 79d42cb commit ae5a431

14 files changed

+62
-67
lines changed

shogun/classifier/FeatureBlockLogisticRegression.cpp

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -57,13 +57,13 @@ CFeatureBlockLogisticRegression::~CFeatureBlockLogisticRegression()
5757

5858
void CFeatureBlockLogisticRegression::register_parameters()
5959
{
60-
SG_ADD((CSGObject**)&m_feature_relation, "feature_relation", "feature relation", MS_NOT_AVAILABLE);
61-
SG_ADD(&m_z, "z", "regularization coefficient", MS_AVAILABLE);
62-
SG_ADD(&m_q, "q", "q of L1/Lq", MS_AVAILABLE);
63-
SG_ADD(&m_termination, "termination", "termination", MS_NOT_AVAILABLE);
64-
SG_ADD(&m_regularization, "regularization", "regularization", MS_NOT_AVAILABLE);
65-
SG_ADD(&m_tolerance, "tolerance", "tolerance", MS_NOT_AVAILABLE);
66-
SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations", MS_NOT_AVAILABLE);
60+
SG_ADD((CSGObject**)&m_feature_relation, "feature_relation", "feature relation");
61+
SG_ADD(&m_z, "z", "regularization coefficient", ParameterProperties::HYPER);
62+
SG_ADD(&m_q, "q", "q of L1/Lq", ParameterProperties::HYPER);
63+
SG_ADD(&m_termination, "termination", "termination");
64+
SG_ADD(&m_regularization, "regularization", "regularization");
65+
SG_ADD(&m_tolerance, "tolerance", "tolerance");
66+
SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations");
6767
}
6868

6969
CIndexBlockRelation* CFeatureBlockLogisticRegression::get_feature_relation() const

shogun/classifier/svm/SVMLin.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,12 @@ CSVMLin::~CSVMLin()
4343
void CSVMLin::init()
4444
{
4545
SG_ADD(
46-
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
46+
&use_bias, "use_bias", "Indicates if bias is used.");
4747
SG_ADD(
48-
&C1, "C1", "C constant for negatively labeled examples.", MS_AVAILABLE);
48+
&C1, "C1", "C constant for negatively labeled examples.", ParameterProperties::HYPER);
4949
SG_ADD(
50-
&C2, "C2", "C constant for positively labeled examples.", MS_AVAILABLE);
51-
SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE);
50+
&C2, "C2", "C constant for positively labeled examples.", ParameterProperties::HYPER);
51+
SG_ADD(&epsilon, "epsilon", "Convergence precision.");
5252
}
5353

5454
bool CSVMLin::train_machine(CFeatures* data)

shogun/classifier/svm/SVMSGD.cpp

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -213,16 +213,16 @@ void CSVMSGD::init()
213213
loss=new CHingeLoss();
214214
SG_REF(loss);
215215

216-
SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE);
217-
SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE);
218-
SG_ADD(&wscale, "wscale", "W scale", MS_NOT_AVAILABLE);
219-
SG_ADD(&bscale, "bscale", "b scale", MS_NOT_AVAILABLE);
220-
SG_ADD(&epochs, "epochs", "epochs", MS_NOT_AVAILABLE);
221-
SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE);
222-
SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE);
216+
SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER);
217+
SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER);
218+
SG_ADD(&wscale, "wscale", "W scale");
219+
SG_ADD(&bscale, "bscale", "b scale");
220+
SG_ADD(&epochs, "epochs", "epochs");
221+
SG_ADD(&skip, "skip", "skip");
222+
SG_ADD(&count, "count", "count");
223223
SG_ADD(
224-
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
224+
&use_bias, "use_bias", "Indicates if bias is used.");
225225
SG_ADD(
226226
&use_regularized_bias, "use_regularized_bias",
227-
"Indicates if bias is regularized.", MS_NOT_AVAILABLE);
227+
"Indicates if bias is regularized.");
228228
}

shogun/kernel/string/DistantSegmentsKernel.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,8 @@ bool CDistantSegmentsKernel::init(CFeatures* l, CFeatures* r)
4343

4444
void CDistantSegmentsKernel::init()
4545
{
46-
SG_ADD(&m_delta, "delta", "Delta parameter of the DS-Kernel", MS_AVAILABLE);
47-
SG_ADD(&m_theta, "theta", "Theta parameter of the DS-Kernel", MS_AVAILABLE);
46+
SG_ADD(&m_delta, "delta", "Delta parameter of the DS-Kernel", ParameterProperties::HYPER);
47+
SG_ADD(&m_theta, "theta", "Theta parameter of the DS-Kernel", ParameterProperties::HYPER);
4848
}
4949

5050
float64_t CDistantSegmentsKernel::compute(int32_t idx_a, int32_t idx_b)

shogun/mathematics/SparseInverseCovariance.cpp

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,12 +31,11 @@ CSparseInverseCovariance::~CSparseInverseCovariance()
3131
void CSparseInverseCovariance::register_parameters()
3232
{
3333
SG_ADD(&m_lasso_max_iter,"lasso_max_iter",
34-
"maximum iteration of LASSO step",MS_NOT_AVAILABLE);
35-
SG_ADD(&m_max_iter,"max_iter","maximum total iteration",
36-
MS_NOT_AVAILABLE);
37-
SG_ADD(&m_f_gap,"f_gap","f gap",MS_NOT_AVAILABLE);
38-
SG_ADD(&m_x_gap,"x_gap","x gap",MS_NOT_AVAILABLE);
39-
SG_ADD(&m_xtol,"xtol","xtol",MS_NOT_AVAILABLE);
34+
"maximum iteration of LASSO step");
35+
SG_ADD(&m_max_iter,"max_iter","maximum total iteration");
36+
SG_ADD(&m_f_gap,"f_gap","f gap");
37+
SG_ADD(&m_x_gap,"x_gap","x gap");
38+
SG_ADD(&m_xtol,"xtol","xtol");
4039
}
4140

4241
SGMatrix<float64_t> CSparseInverseCovariance::estimate(SGMatrix<float64_t> S, float64_t lambda_c)

shogun/multiclass/MulticlassLogisticRegression.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,9 @@ void CMulticlassLogisticRegression::init_defaults()
4242

4343
void CMulticlassLogisticRegression::register_parameters()
4444
{
45-
SG_ADD(&m_z, "m_z", "regularization constant",MS_AVAILABLE);
46-
SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",MS_NOT_AVAILABLE);
47-
SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",MS_NOT_AVAILABLE);
45+
SG_ADD(&m_z, "m_z", "regularization constant", ParameterProperties::HYPER);
46+
SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon");
47+
SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations");
4848
}
4949

5050
CMulticlassLogisticRegression::~CMulticlassLogisticRegression()

shogun/multiclass/MulticlassTreeGuidedLogisticRegression.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,9 +41,9 @@ void CMulticlassTreeGuidedLogisticRegression::init_defaults()
4141

4242
void CMulticlassTreeGuidedLogisticRegression::register_parameters()
4343
{
44-
SG_ADD(&m_z, "m_z", "regularization constant",MS_AVAILABLE);
45-
SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",MS_NOT_AVAILABLE);
46-
SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",MS_NOT_AVAILABLE);
44+
SG_ADD(&m_z, "m_z", "regularization constant", ParameterProperties::HYPER);
45+
SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon");
46+
SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations");
4747
}
4848

4949
CMulticlassTreeGuidedLogisticRegression::~CMulticlassTreeGuidedLogisticRegression()

shogun/optimization/NLOPTMinimizer.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -57,13 +57,13 @@ void CNLOPTMinimizer::init()
5757
m_target_variable=SGVector<float64_t>();
5858
set_nlopt_parameters();
5959
SG_ADD(&m_max_iterations, "CNLOPTMinimizer__m_max_iterations",
60-
"max_iterations in CNLOPTMinimizer", MS_NOT_AVAILABLE);
60+
"max_iterations in CNLOPTMinimizer");
6161
SG_ADD(&m_variable_tolerance, "CNLOPTMinimizer__m_variable_tolerance",
62-
"variable_tolerance in CNLOPTMinimizer", MS_NOT_AVAILABLE);
62+
"variable_tolerance in CNLOPTMinimizer");
6363
SG_ADD(&m_function_tolerance, "CNLOPTMinimizer__m_function_tolerance",
64-
"function_tolerance in CNLOPTMinimizer", MS_NOT_AVAILABLE);
64+
"function_tolerance in CNLOPTMinimizer");
6565
SG_ADD(&m_nlopt_algorithm_id, "CNLOPTMinimizer__m_nlopt_algorithm_id",
66-
"nlopt_algorithm_id in CNLOPTMinimizer", MS_NOT_AVAILABLE);
66+
"nlopt_algorithm_id in CNLOPTMinimizer");
6767
#endif
6868
}
6969

shogun/structure/DualLibQPBMSOSVM.cpp

Lines changed: 9 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -55,21 +55,17 @@ CDualLibQPBMSOSVM::~CDualLibQPBMSOSVM()
5555

5656
void CDualLibQPBMSOSVM::init()
5757
{
58-
SG_ADD(&m_TolRel, "m_TolRel", "Relative tolerance", MS_AVAILABLE);
59-
SG_ADD(&m_TolAbs, "m_TolAbs", "Absolute tolerance", MS_AVAILABLE);
60-
SG_ADD(&m_BufSize, "m_BuffSize", "Size of CP Buffer", MS_AVAILABLE);
61-
SG_ADD(&m_lambda, "m_lambda", "Regularization constant lambda",
62-
MS_AVAILABLE);
63-
SG_ADD(&m_cleanICP, "m_cleanICP", "Inactive cutting plane removal flag",
64-
MS_AVAILABLE);
58+
SG_ADD(&m_TolRel, "m_TolRel", "Relative tolerance", ParameterProperties::HYPER);
59+
SG_ADD(&m_TolAbs, "m_TolAbs", "Absolute tolerance", ParameterProperties::HYPER);
60+
SG_ADD(&m_BufSize, "m_BuffSize", "Size of CP Buffer", ParameterProperties::HYPER);
61+
SG_ADD(&m_lambda, "m_lambda", "Regularization constant lambda");
62+
SG_ADD(&m_cleanICP, "m_cleanICP", "Inactive cutting plane removal flag");
6563
SG_ADD(&m_cleanAfter,
6664
"m_cleanAfter",
67-
"Number of inactive iterations after which ICP will be removed",
68-
MS_AVAILABLE);
69-
SG_ADD(&m_K, "m_K", "Parameter K", MS_NOT_AVAILABLE);
70-
SG_ADD(&m_Tmax, "m_Tmax", "Parameter Tmax", MS_AVAILABLE);
71-
SG_ADD(&m_cp_models, "m_cp_models", "Number of cutting plane models",
72-
MS_AVAILABLE);
65+
"Number of inactive iterations after which ICP will be removed");
66+
SG_ADD(&m_K, "m_K", "Parameter K");
67+
SG_ADD(&m_Tmax, "m_Tmax", "Parameter Tmax", ParameterProperties::HYPER);
68+
SG_ADD(&m_cp_models, "m_cp_models", "Number of cutting plane models");
7369

7470
set_TolRel(0.001);
7571
set_TolAbs(0.0);

shogun/transfer/multitask/MultitaskL12LogisticRegression.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,8 @@ CMultitaskL12LogisticRegression::CMultitaskL12LogisticRegression(
4949

5050
void CMultitaskL12LogisticRegression::init()
5151
{
52-
SG_ADD(&self->m_rho1,"rho1","rho L1/L2 regularization parameter",MS_AVAILABLE);
53-
SG_ADD(&self->m_rho2,"rho2","rho L2 regularization parameter",MS_AVAILABLE);
52+
SG_ADD(&self->m_rho1,"rho1","rho L1/L2 regularization parameter", ParameterProperties::HYPER);
53+
SG_ADD(&self->m_rho2,"rho2","rho L2 regularization parameter", ParameterProperties::HYPER);
5454
}
5555

5656
void CMultitaskL12LogisticRegression::set_rho1(float64_t rho1)

0 commit comments

Comments
 (0)