@@ -32,24 +32,45 @@ class GenModel(base.Generative):
32
32
by default None
33
33
"""
34
34
35
- def __init__ (self ,* ,lambda_ = 1.0 ,h_alpha = 1.0 ,h_beta = 1.0 ,seed = None ):
36
- self .lambda_ = _check .pos_float (lambda_ ,'lambda_' ,ParameterFormatError )
37
- self .h_alpha = _check .pos_float (h_alpha ,'h_alpha' ,ParameterFormatError )
38
- self .h_beta = _check .pos_float (h_beta ,'h_beta' ,ParameterFormatError )
35
+ def __init__ (self ,lambda_ = 1.0 ,h_alpha = 1.0 ,h_beta = 1.0 ,seed = None ):
39
36
self .rng = np .random .default_rng (seed )
40
37
41
- def set_h_params (self ,h_alpha ,h_beta ):
38
+ # params
39
+ self .lambda_ = 1.0
40
+
41
+ # h_params
42
+ self .h_alpha = 1.0
43
+ self .h_beta = 1.0
44
+
45
+ self .set_params (lambda_ )
46
+ self .set_h_params (h_alpha ,h_beta )
47
+
48
+ def get_constants (self ):
49
+ """Get constants of GenModel.
50
+
51
+ This model does not have any constants.
52
+ Therefore, this function returns an emtpy dict ``{}``.
53
+
54
+ Returns
55
+ -------
56
+ constants : an empty dict
57
+ """
58
+ return {}
59
+
60
+ def set_h_params (self ,h_alpha = None ,h_beta = None ):
42
61
"""Set the hyperparameters of the prior distribution.
43
62
44
63
Parameters
45
64
----------
46
- h_alpha : float
47
- a positive real number
48
- h_beta : float
49
- a positibe real number
65
+ h_alpha : float, optional
66
+ a positive real number, by default None
67
+ h_beta : float, optional
68
+ a positibe real number, by default None
50
69
"""
51
- self .h_alpha = _check .pos_float (h_alpha ,'h_alpha' ,ParameterFormatError )
52
- self .h_beta = _check .pos_float (h_beta ,'h_beta' ,ParameterFormatError )
70
+ if h_alpha is not None :
71
+ self .h_alpha = _check .pos_float (h_alpha ,'h_alpha' ,ParameterFormatError )
72
+ if h_beta is not None :
73
+ self .h_beta = _check .pos_float (h_beta ,'h_beta' ,ParameterFormatError )
53
74
return self
54
75
55
76
def get_h_params (self ):
@@ -71,15 +92,16 @@ def gen_params(self):
71
92
self .lambda_ = self .rng .gamma (shape = self .h_alpha ,scale = 1.0 / self .h_beta )
72
93
return self
73
94
74
- def set_params (self ,lambda_ ):
95
+ def set_params (self ,lambda_ = None ):
75
96
"""Set the parameter of the sthocastic data generative model.
76
97
77
98
Parameters
78
99
----------
79
- lambda_ : float
80
- a positive real number
100
+ lambda_ : float, optional
101
+ a positive real number, by default None
81
102
"""
82
- self .lambda_ = _check .pos_float (lambda_ ,'lambda_' ,ParameterFormatError )
103
+ if lambda_ is not None :
104
+ self .lambda_ = _check .pos_float (lambda_ ,'lambda_' ,ParameterFormatError )
83
105
return self
84
106
85
107
def get_params (self ):
@@ -175,34 +197,55 @@ class LearnModel(base.Posterior,base.PredictiveMixin):
175
197
Attributes
176
198
----------
177
199
hn_alpha : float
178
- a positive real number, by default 1.0
200
+ a positive real number
179
201
hn_beta : float
180
- a positibe real number, by default 1.0
202
+ a positibe real number
181
203
p_r : float
182
- a positive real number, by default 1.0
204
+ a positive real number
183
205
p_theta : float
184
- a real number in :math:`[0, 1]`, by default 0.5
206
+ a real number in :math:`[0, 1]`
185
207
"""
186
208
def __init__ (self ,h0_alpha = 1.0 ,h0_beta = 1.0 ):
187
- self .h0_alpha = _check .pos_float (h0_alpha ,'h0_alpha' ,ParameterFormatError )
188
- self .h0_beta = _check .pos_float (h0_beta ,'h0_beta' ,ParameterFormatError )
189
- self .hn_alpha = self .h0_alpha
190
- self .hn_beta = self .h0_beta
191
- self .p_r = self .hn_alpha
192
- self .p_theta = 1.0 / (1.0 + self .hn_beta )
193
-
194
- def set_h0_params (self ,h0_alpha ,h0_beta ):
209
+ # h0_params
210
+ self .h0_alpha = 1.0
211
+ self .h0_beta = 1.0
212
+
213
+ # hn_params
214
+ self .hn_alpha = 1.0
215
+ self .hn_beta = 1.0
216
+
217
+ #p_params
218
+ self .p_r = 1.0
219
+ self .p_theta = 0.5
220
+
221
+ self .set_h0_params (h0_alpha ,h0_beta )
222
+
223
+ def get_constants (self ):
224
+ """Get constants of LearnModel.
225
+
226
+ This model does not have any constants.
227
+ Therefore, this function returns an emtpy dict ``{}``.
228
+
229
+ Returns
230
+ -------
231
+ constants : an empty dict
232
+ """
233
+ return {}
234
+
235
+ def set_h0_params (self ,h0_alpha = None ,h0_beta = None ):
195
236
"""Set initial values of the hyperparameter of the posterior distribution.
196
237
197
238
Parameters
198
239
----------
199
- h0_alpha : float
200
- a positive real number
201
- h0_beta : float
202
- a positibe real number
240
+ h0_alpha : float, optional
241
+ a positive real number, by default None
242
+ h0_beta : float, optional
243
+ a positibe real number, by default None
203
244
"""
204
- self .h0_alpha = _check .pos_float (h0_alpha ,'h0_alpha' ,ParameterFormatError )
205
- self .h0_beta = _check .pos_float (h0_beta ,'h0_beta' ,ParameterFormatError )
245
+ if h0_alpha is not None :
246
+ self .h0_alpha = _check .pos_float (h0_alpha ,'h0_alpha' ,ParameterFormatError )
247
+ if h0_beta is not None :
248
+ self .h0_beta = _check .pos_float (h0_beta ,'h0_beta' ,ParameterFormatError )
206
249
self .reset_hn_params ()
207
250
return self
208
251
@@ -217,18 +260,20 @@ def get_h0_params(self):
217
260
"""
218
261
return {"h0_alpha" :self .h0_alpha , "h0_beta" :self .h0_beta }
219
262
220
- def set_hn_params (self ,hn_alpha ,hn_beta ):
263
+ def set_hn_params (self ,hn_alpha = None ,hn_beta = None ):
221
264
"""Set updated values of the hyperparameter of the posterior distribution.
222
265
223
266
Parameters
224
267
----------
225
- hn_alpha : float
226
- a positive real number
227
- hn_beta : float
228
- a positibe real number
268
+ hn_alpha : float, optional
269
+ a positive real number, by default None
270
+ hn_beta : float, optional
271
+ a positibe real number, by default None
229
272
"""
230
- self .hn_alpha = _check .pos_float (hn_alpha ,'hn_alpha' ,ParameterFormatError )
231
- self .hn_beta = _check .pos_float (hn_beta ,'hn_beta' ,ParameterFormatError )
273
+ if hn_alpha is not None :
274
+ self .hn_alpha = _check .pos_float (hn_alpha ,'hn_alpha' ,ParameterFormatError )
275
+ if hn_beta is not None :
276
+ self .hn_beta = _check .pos_float (hn_beta ,'hn_beta' ,ParameterFormatError )
232
277
self .calc_pred_dist ()
233
278
return self
234
279
@@ -243,28 +288,6 @@ def get_hn_params(self):
243
288
"""
244
289
return {"hn_alpha" :self .hn_alpha , "hn_beta" :self .hn_beta }
245
290
246
- def overwrite_h0_params (self ):
247
- """Overwrite the initial values of the hyperparameters of the posterior distribution by the learned values.
248
-
249
- They are overwritten by `self.hn_alpha` and `self.hn_beta`.
250
- Note that the parameters of the predictive distribution are also calculated from `self.hn_alpha` and `self.hn_beta`.
251
- """
252
- self .h0_alpha = self .hn_alpha
253
- self .h0_beta = self .hn_beta
254
- self .calc_pred_dist ()
255
- return self
256
-
257
- def reset_hn_params (self ):
258
- """Reset the hyperparameters of the posterior distribution to their initial values.
259
-
260
- They are reset to `self.h0_alpha` and `self.h0_beta`.
261
- Note that the parameters of the predictive distribution are also calculated from `self.h0_alpha` and `self.h0_beta`.
262
- """
263
- self .hn_alpha = self .h0_alpha
264
- self .hn_beta = self .h0_beta
265
- self .calc_pred_dist ()
266
- return self
267
-
268
291
def update_posterior (self ,x ):
269
292
"""Update the hyperparameters of the posterior distribution using traning data.
270
293
@@ -275,6 +298,15 @@ def update_posterior(self,x):
275
298
"""
276
299
_check .nonneg_ints (x ,'x' ,DataFormatError )
277
300
self .hn_alpha += np .sum (x )
301
+ try :
302
+ self .hn_beta += x .size
303
+ except :
304
+ self .hn_beta += 1
305
+ return self
306
+
307
+ def _update_posterior (self ,x ):
308
+ """Update opsterior without input check."""
309
+ self .hn_alpha += np .sum (x )
278
310
self .hn_beta += x .size
279
311
return self
280
312
0 commit comments