@@ -68,22 +68,22 @@ gbt.model.complexity <- function(model){
6868    initial_raw_prediction  =  model $ initialPred 
6969
7070    res  <-  list (
71-       loss_function  =  loss_function ,
72-       nrounds  =  model $ get_num_trees(),
73-       learning_rate  =  model $ get_learning_rate(),
74-       initial_raw_prediction  =  initial_raw_prediction ,
75-       initial_prediction  =  transform_prediction(initial_raw_prediction , loss_function ),
71+       " loss_function" =  loss_function ,
72+       " nrounds" =  model $ get_num_trees(),
73+       " learning_rate" =  model $ get_learning_rate(),
74+       " initial_raw_prediction" =  initial_raw_prediction ,
75+       " initial_prediction" =  transform_prediction(initial_raw_prediction , loss_function ),
7676      #  # tree parameters
77-       max_depth  =  max(model $ get_tree_depths()),
78-       min_loss_reductions  =  model $ get_max_node_optimism(),
79-       sum_hessian_weights  =  model $ get_min_hessian_weights(),
80-       number_of_leaves  =  max(model $ get_num_leaves()),
77+       " max_depth" =  max(model $ get_tree_depths()),
78+       " min_loss_reductions" =  model $ get_max_node_optimism(),
79+       " sum_hessian_weights" =  model $ get_min_hessian_weights(),
80+       " number_of_leaves" =  max(model $ get_num_leaves()),
8181      #  # objective
82-       l1_regularization  =  0.0 ,
83-       l2_regularization  =  0.0 ,
82+       " l1_regularization" =  0.0 ,
83+       " l2_regularization" =  0.0 ,
8484      #  # sampling
85-       row_subsampling  =  1.0 ,
86-       column_subsampling  =  1.0 
85+       " row_subsampling" =  1.0 ,
86+       " column_subsampling" =  1.0 
8787    )
8888    return (res )
8989}
@@ -114,8 +114,9 @@ gbt.model.complexity <- function(model){
114114# ' yte <- rnorm(n, xte, 1)
115115# ' model <- gbt.train(ytr, xtr, learning_rate = 0.1)
116116# ' gbt.complexity(model, type="xgboost")
117- # ' gbt.complexity(model, type="lightgbm)
117+ # ' gbt.complexity(model, type="lightgbm" )
118118# ' ## See demo(topic="gbt-complexity", package="agtboost")
119+ # ' }
119120# ' 
120121# ' @importFrom graphics barplot mtext par
121122# ' @rdname gbt.complexity
@@ -129,51 +130,50 @@ gbt.complexity <- function(model, type){
129130    #  Setup or get agtboost implicit parameters/complexity measures
130131    model_complexity  <-  gbt.model.complexity(model )
131132
132-     
133133    #  Transform parameters/complexity into library-specific parameters
134-     attach (model_complexity ) 
135-     if (type == " xgboost" 
136-       #  Transform agtboost parameters/complexity measures to xgboost parameters
137-       parameters   =  list (
138-         #  ensemble param
139-         " base_score" =  initial_prediction ,
140-         " nrounds" =  nrounds ,
141-         " learning_rate" =  learning_rate ,
142-         #  tree param
143-         " max_depth" =  max_depth ,
144-         " gamma" =  min_loss_reductions ,
145-         " min_child_weight" =  sum_hessian_weights ,
146-         " max_leaves" =  number_of_leaves ,
147-         " grow_policy" =  " lossguide" 
148-         #  objective
149-         " objective" =  loss_to_xgbloss(loss_function ),
150-         " alpha" =  0.0 ,
151-         " lambda" =  0.0 ,
152-         #  subsampling
153-         " subsample" =  1.0 ,
154-         " colsample_bytree" =  1.0 
155-       )
156-     }else  if (type == " lightgbm" 
157-       #  Transform agtboost parameters/complexity measures to lightgbm parameters
158-       parameters   =  list (
159-         #  ensemble param
160-         " init_score" =  initial_prediction ,
161-         " nrounds" =  nrounds ,
162-         " learning_rate" =  learning_rate ,
163-         #  tree param
164-         " max_depth" =  max_depth ,
165-         " min_gain_to_split" =  min_loss_reductions ,
166-         " min_sum_hessian_in_leaf" =  sum_hessian_weights ,
167-         " num_leaves" =  number_of_leaves ,
168-         #  objective
169-         " objective" =  loss_to_lgbloss(loss_function ),
170-         " lambda_l1" =  0.0 ,
171-         " lambda_l2" =  0.0 ,
172-         #  subsampling
173-         " bagging_fraction" =  1.0 ,
174-         " feature_fraction" =  1.0 
175-       )  
176-     }
177-     detach( model_complexity )
134+     parameters   =  with (model_complexity , 
135+           if (type == " xgboost" 
136+             #  Transform agtboost parameters/complexity measures to xgboost parameters
137+             list (
138+               #  ensemble param
139+               " base_score" =  initial_prediction ,
140+               " nrounds" =  nrounds ,
141+               " learning_rate" =  learning_rate ,
142+               #  tree param
143+               " max_depth" =  max_depth ,
144+               " gamma" =  min_loss_reductions ,
145+               " min_child_weight" =  sum_hessian_weights ,
146+               " max_leaves" =  number_of_leaves ,
147+               " grow_policy" =  " lossguide" 
148+               #  objective
149+               " objective" =  loss_to_xgbloss(loss_function ),
150+               " alpha" =  0.0 ,
151+               " lambda" =  0.0 ,
152+               #  subsampling
153+               " subsample" =  1.0 ,
154+               " colsample_bytree" =  1.0 
155+             )
156+           }else  if (type == " lightgbm" 
157+             #  Transform agtboost parameters/complexity measures to lightgbm parameters
158+             list (
159+               #  ensemble param
160+               " init_score" =  initial_prediction ,
161+               " nrounds" =  nrounds ,
162+               " learning_rate" =  learning_rate ,
163+               #  tree param
164+               " max_depth" =  max_depth ,
165+               " min_gain_to_split" =  min_loss_reductions ,
166+               " min_sum_hessian_in_leaf" =  sum_hessian_weights ,
167+               " num_leaves" =  number_of_leaves ,
168+               #  objective
169+               " objective" =  loss_to_lgbloss(loss_function ),
170+               " lambda_l1" =  0.0 ,
171+               " lambda_l2" =  0.0 ,
172+               #  subsampling
173+               " bagging_fraction" =  1.0 ,
174+               " feature_fraction" =  1.0 
175+              ) 
176+           }
177+     )
178178    return (parameters )
179179}
0 commit comments