diff --git a/src/algorithms/__init__.py b/src/algorithms/__init__.py new file mode 100644 index 0000000..6c1bacd --- /dev/null +++ b/src/algorithms/__init__.py @@ -0,0 +1,6 @@ +from src.algorithms.semiparam_algorithms.nvm_semi_param_algorithms.mu_estimation import SemiParametricMuEstimation +from src.register.algorithm_purpose import AlgorithmPurpose +from src.register.register import Registry + +ALGORITHM_REGISTRY: Registry = Registry() +ALGORITHM_REGISTRY.register("mu_estimation", AlgorithmPurpose.NMV_SEMIPARAMETRIC)(SemiParametricMuEstimation) diff --git a/src/algorithms/param_algorithms/__init__.py b/src/algorithms/param_algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/algorithms/param_algorithms/nm_param_algorithms/__init__.py b/src/algorithms/param_algorithms/nm_param_algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/algorithms/param_algorithms/nv_param_algorithms/__init__.py b/src/algorithms/param_algorithms/nv_param_algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/algorithms/param_algorithms/nvm_param_algorithms/__init__.py b/src/algorithms/param_algorithms/nvm_param_algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/algorithms/semiparam_algorithms/__init__.py b/src/algorithms/semiparam_algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/algorithms/semiparam_algorithms/nm_semi_param_algorithms/__init__.py b/src/algorithms/semiparam_algorithms/nm_semi_param_algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/algorithms/semiparam_algorithms/nv_semi_param_algorithms/__init__.py b/src/algorithms/semiparam_algorithms/nv_semi_param_algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/algorithms/semiparam_algorithms/nvm_semi_param_algorithms/__init__.py b/src/algorithms/semiparam_algorithms/nvm_semi_param_algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/algorithms/nvm_semi_param_algorithms/mu_estimation.py b/src/algorithms/semiparam_algorithms/nvm_semi_param_algorithms/mu_estimation.py similarity index 82% rename from src/algorithms/nvm_semi_param_algorithms/mu_estimation.py rename to src/algorithms/semiparam_algorithms/nvm_semi_param_algorithms/mu_estimation.py index 0dd74c4..472759c 100644 --- a/src/algorithms/nvm_semi_param_algorithms/mu_estimation.py +++ b/src/algorithms/semiparam_algorithms/nvm_semi_param_algorithms/mu_estimation.py @@ -1,10 +1,12 @@ import math -from typing import Callable, TypedDict, Unpack +from typing import Callable, Optional, TypedDict, Unpack import mpmath import numpy as np from numpy import _typing +from src.estimators.estimate_result import EstimateResult + M_DEFAULT_VALUE = 1000 TOLERANCE_DEFAULT_VALUE = 10**-5 OMEGA_DEFAULT_VALUE = lambda x: -1 * math.sin(x) if abs(x) <= math.pi else 0 @@ -33,7 +35,7 @@ class ParamsAnnotation(TypedDict, total=False): omega: Callable[[float], float] max_iterations: float - def __init__(self, sample: _typing.ArrayLike = None, **kwargs: Unpack[ParamsAnnotation]): + def __init__(self, sample: Optional[_typing.ArrayLike] = None, **kwargs: Unpack[ParamsAnnotation]): self.sample = np.array([]) if sample is None else sample self.m, self.tolerance, self.omega, self.max_iterations = self._validate_kwargs(**kwargs) @@ -82,7 +84,7 @@ def __w(self, p: float, sample: np._typing.NDArray) -> float: y += e * self.omega(x) return y - def algorithm(self, sample: np._typing.NDArray) -> float: + def algorithm(self, sample: np._typing.NDArray) -> EstimateResult: """Root of this function is an estimation of mu Args: @@ -91,25 +93,25 @@ def algorithm(self, sample: np._typing.NDArray) -> float: Returns: estimated mu value """ - if self.__w(0, sample) == 0: - return 0 + return EstimateResult(value=0, success=True) if self.__w(0, sample) > 0: - return -1 * self.algorithm(-1 * sample) + second_result = self.algorithm(-1 * sample) + return EstimateResult(-1 * second_result.value, second_result.success) if self.__w(self.m, sample) < 0: - return self.m + return EstimateResult(value=self.m, success=False) left, right = 0.0, self.m iteration = 0 while left <= right: mid = (right + left) / 2 if iteration > self.max_iterations: - return mid + return EstimateResult(value=mid, success=False) iteration += 1 if abs(self.__w(mid, sample)) < self.tolerance: - return mid + return EstimateResult(value=mid, success=True) elif self.__w(mid, sample) < 0: left = mid else: right = mid - return -1 + return EstimateResult(value=-1, success=False) diff --git a/src/estimators/abstract_estimator.py b/src/estimators/abstract_estimator.py new file mode 100644 index 0000000..290a576 --- /dev/null +++ b/src/estimators/abstract_estimator.py @@ -0,0 +1,67 @@ +from abc import abstractmethod +from typing import Self + +from numpy import _typing + +from src.algorithms import ALGORITHM_REGISTRY +from src.estimators.estimate_result import EstimateResult +from src.register.algorithm_purpose import AlgorithmPurpose + + +class AbstractEstimator: + """Base class for Estimators + + Attributes: + algorithm_name: A string indicating chosen algorithm. + params: A dictionary of algorithm parameters. + estimate_result: Estimation result. + _registry: Registry that contains classes of all algorithms. + _purpose: Defines purpose of algorithm, one of the registry key. + """ + + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + """Initializes the instance based on algorithm name and params. + + Args: + algorithm_name: A string indicating chosen algorithm. + params: A dictionary of algorithm parameters. + """ + + self.algorithm_name = algorithm_name + if params is None: + self.params = dict() + else: + self.params = params + self.estimate_result = EstimateResult() + self._registry = ALGORITHM_REGISTRY + self._purpose = AlgorithmPurpose.DEFAULT + + def get_params(self) -> dict: + return {"algorithm_name": self.algorithm_name, "params": self.params, "estimated_result": self.estimate_result} + + def set_params(self, algorithm_name: str, params: dict | None = None) -> Self: + self.algorithm_name = algorithm_name + if params is None: + self.params = dict() + else: + self.params = params + return self + + def get_available_algorithms(self) -> list[str]: + """Get all algorithms that can be used for current estimator class""" + return [key[0] for key in self._registry.register_of_names.keys() if key[1] == self._purpose] + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + """Applies an algorithm to the given sample + + Args: + sample: sample of the analysed distribution + + """ + cls = None + if (self.algorithm_name, self._purpose) in self._registry.register_of_names: + cls = self._registry.dispatch(self.algorithm_name, self._purpose)(sample, **self.params) + if cls is None: + raise ValueError("This algorithm does not exist") + self.estimate_result = cls.algorithm(sample) + return self.estimate_result diff --git a/src/estimators/estimate_result.py b/src/estimators/estimate_result.py new file mode 100644 index 0000000..ee8f494 --- /dev/null +++ b/src/estimators/estimate_result.py @@ -0,0 +1,8 @@ +from dataclasses import dataclass + + +@dataclass +class EstimateResult: + value: float = -1 + success: bool = False + message: str = "No message" diff --git a/src/estimators/parametric/abstract_parametric_estimator.py b/src/estimators/parametric/abstract_parametric_estimator.py new file mode 100644 index 0000000..7951e4e --- /dev/null +++ b/src/estimators/parametric/abstract_parametric_estimator.py @@ -0,0 +1,12 @@ +from numpy import _typing + +from src.estimators.abstract_estimator import AbstractEstimator +from src.estimators.estimate_result import EstimateResult + + +class AbstractParametricEstimator(AbstractEstimator): + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + super().__init__(algorithm_name, params) + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + return super().estimate(sample) diff --git a/src/estimators/parametric/nm_parametric_estimator.py b/src/estimators/parametric/nm_parametric_estimator.py new file mode 100644 index 0000000..c505758 --- /dev/null +++ b/src/estimators/parametric/nm_parametric_estimator.py @@ -0,0 +1,14 @@ +from numpy import _typing + +from src.estimators.estimate_result import EstimateResult +from src.estimators.parametric.abstract_parametric_estimator import AbstractParametricEstimator +from src.register.algorithm_purpose import AlgorithmPurpose + + +class NMParametricEstimator(AbstractParametricEstimator): + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + super().__init__(algorithm_name, params) + self._purpose = AlgorithmPurpose.NM_PARAMETRIC + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + return super().estimate(sample) diff --git a/src/estimators/parametric/nmv_parametric_estimator.py b/src/estimators/parametric/nmv_parametric_estimator.py new file mode 100644 index 0000000..c80183c --- /dev/null +++ b/src/estimators/parametric/nmv_parametric_estimator.py @@ -0,0 +1,14 @@ +from numpy import _typing + +from src.estimators.estimate_result import EstimateResult +from src.estimators.parametric.abstract_parametric_estimator import AbstractParametricEstimator +from src.register.algorithm_purpose import AlgorithmPurpose + + +class NMVParametricEstimator(AbstractParametricEstimator): + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + super().__init__(algorithm_name, params) + self._purpose = AlgorithmPurpose.NMV_PARAMETRIC + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + return super().estimate(sample) diff --git a/src/estimators/parametric/nv_parametric_estimator.py b/src/estimators/parametric/nv_parametric_estimator.py new file mode 100644 index 0000000..fc3e108 --- /dev/null +++ b/src/estimators/parametric/nv_parametric_estimator.py @@ -0,0 +1,14 @@ +from numpy import _typing + +from src.estimators.estimate_result import EstimateResult +from src.estimators.parametric.abstract_parametric_estimator import AbstractParametricEstimator +from src.register.algorithm_purpose import AlgorithmPurpose + + +class NVParametricEstimator(AbstractParametricEstimator): + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + super().__init__(algorithm_name, params) + self._purpose = AlgorithmPurpose.NV_PARAMETRIC + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + return super().estimate(sample) diff --git a/src/estimators/semiparametric/abstract_semiparametric_estimator.py b/src/estimators/semiparametric/abstract_semiparametric_estimator.py new file mode 100644 index 0000000..11b4ee2 --- /dev/null +++ b/src/estimators/semiparametric/abstract_semiparametric_estimator.py @@ -0,0 +1,14 @@ +from abc import abstractmethod + +from numpy import _typing + +from src.estimators.abstract_estimator import AbstractEstimator +from src.estimators.estimate_result import EstimateResult + + +class AbstractSemiParametricEstimator(AbstractEstimator): + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + super().__init__(algorithm_name, params) + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + return super().estimate(sample) diff --git a/src/estimators/semiparametric/nm_semiparametric_estimator.py b/src/estimators/semiparametric/nm_semiparametric_estimator.py new file mode 100644 index 0000000..958ec5c --- /dev/null +++ b/src/estimators/semiparametric/nm_semiparametric_estimator.py @@ -0,0 +1,14 @@ +from numpy import _typing + +from src.estimators.estimate_result import EstimateResult +from src.estimators.semiparametric.abstract_semiparametric_estimator import AbstractSemiParametricEstimator +from src.register.algorithm_purpose import AlgorithmPurpose + + +class NMSemiParametricEstimator(AbstractSemiParametricEstimator): + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + super().__init__(algorithm_name, params) + self._purpose = AlgorithmPurpose.NM_SEMIPARAMETRIC + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + return super().estimate(sample) diff --git a/src/estimators/semiparametric/nmv_semiparametric_estimator.py b/src/estimators/semiparametric/nmv_semiparametric_estimator.py new file mode 100644 index 0000000..e1f456d --- /dev/null +++ b/src/estimators/semiparametric/nmv_semiparametric_estimator.py @@ -0,0 +1,14 @@ +from numpy import _typing + +from src.estimators.estimate_result import EstimateResult +from src.estimators.semiparametric.abstract_semiparametric_estimator import AbstractSemiParametricEstimator +from src.register.algorithm_purpose import AlgorithmPurpose + + +class NMVSemiParametricEstimator(AbstractSemiParametricEstimator): + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + super().__init__(algorithm_name, params) + self._purpose = AlgorithmPurpose.NMV_SEMIPARAMETRIC + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + return super().estimate(sample) diff --git a/src/estimators/semiparametric/nv_semiparametric_estimator.py b/src/estimators/semiparametric/nv_semiparametric_estimator.py new file mode 100644 index 0000000..95f0ce5 --- /dev/null +++ b/src/estimators/semiparametric/nv_semiparametric_estimator.py @@ -0,0 +1,14 @@ +from numpy import _typing + +from src.estimators.estimate_result import EstimateResult +from src.estimators.semiparametric.abstract_semiparametric_estimator import AbstractSemiParametricEstimator +from src.register.algorithm_purpose import AlgorithmPurpose + + +class NVSemiParametricEstimator(AbstractSemiParametricEstimator): + def __init__(self, algorithm_name: str, params: dict | None = None) -> None: + super().__init__(algorithm_name, params) + self._purpose = AlgorithmPurpose.NV_SEMIPARAMETRIC + + def estimate(self, sample: _typing.ArrayLike) -> EstimateResult: + return super().estimate(sample) diff --git a/src/generators/abstract_generator.py b/src/generators/abstract_generator.py new file mode 100644 index 0000000..da0ad33 --- /dev/null +++ b/src/generators/abstract_generator.py @@ -0,0 +1,35 @@ +from abc import abstractmethod + +import numpy._typing as tpg + +from src.mixtures.abstract_mixture import AbstractMixtures + + +class AbstractGenerator: + @staticmethod + @abstractmethod + def canonical_generate(mixture: AbstractMixtures, size: int) -> tpg.NDArray: ... + + """Generate a sample of given size. Classical form of Mixture + + Args: + mixture: NMM | NVM | NMVM + size: length of sample + + Returns: sample of given size + + """ + + @staticmethod + @abstractmethod + def classical_generate(mixture: AbstractMixtures, size: int) -> tpg.NDArray: ... + + """Generate a sample of given size. Canonical form of Mixture + + Args: + mixture: NMM | NVM | NMVM + size: length of sample + + Returns: sample of given size + + """ diff --git a/src/generators/nm_generator.py b/src/generators/nm_generator.py new file mode 100644 index 0000000..4c95b0f --- /dev/null +++ b/src/generators/nm_generator.py @@ -0,0 +1,51 @@ +import numpy._typing as tpg +import scipy + +from src.generators.abstract_generator import AbstractGenerator +from src.mixtures.abstract_mixture import AbstractMixtures +from src.mixtures.nm_mixture import NormalMeanMixtures + + +class NMGenerator(AbstractGenerator): + + @staticmethod + def classical_generate(mixture: AbstractMixtures, size: int) -> tpg.NDArray: + """Generate a sample of given size. Classical form of NMM + + Args: + mixture: Normal Mean Mixture + size: length of sample + + Returns: sample of given size + + Raises: + ValueError: If mixture is not a Normal Mean Mixture + + """ + + if not isinstance(mixture, NormalMeanMixtures): + raise ValueError("Mixture must be NormalMeanMixtures") + mixing_values = mixture.params.distribution.rvs(size=size) + normal_values = scipy.stats.norm.rvs(size=size) + return mixture.params.alpha + mixture.params.beta * mixing_values + mixture.params.gamma * normal_values + + @staticmethod + def canonical_generate(mixture: AbstractMixtures, size: int) -> tpg.NDArray: + """Generate a sample of given size. Canonical form of NMM + + Args: + mixture: Normal Mean Mixture + size: length of sample + + Returns: sample of given size + + Raises: + ValueError: If mixture is not a Normal Mean Mixture + + """ + + if not isinstance(mixture, NormalMeanMixtures): + raise ValueError("Mixture must be NormalMeanMixtures") + mixing_values = mixture.params.distribution.rvs(size=size) + normal_values = scipy.stats.norm.rvs(size=size) + return mixing_values + mixture.params.sigma * normal_values diff --git a/src/generators/nmv_generator.py b/src/generators/nmv_generator.py new file mode 100644 index 0000000..5ff3221 --- /dev/null +++ b/src/generators/nmv_generator.py @@ -0,0 +1,55 @@ +import numpy._typing as tpg +import scipy + +from src.generators.abstract_generator import AbstractGenerator +from src.mixtures.abstract_mixture import AbstractMixtures +from src.mixtures.nmv_mixture import NormalMeanVarianceMixtures + + +class NMVGenerator(AbstractGenerator): + + @staticmethod + def classical_generate(mixture: AbstractMixtures, size: int) -> tpg.NDArray: + """Generate a sample of given size. Classical form of NMVM + + Args: + mixture: Normal Mean Variance Mixtures + size: length of sample + + Returns: sample of given size + + Raises: + ValueError: If mixture type is not Normal Mean Variance Mixtures + + """ + + if not isinstance(mixture, NormalMeanVarianceMixtures): + raise ValueError("Mixture must be NormalMeanMixtures") + mixing_values = mixture.params.distribution.rvs(size=size) + normal_values = scipy.stats.norm.rvs(size=size) + return ( + mixture.params.alpha + + mixture.params.beta * mixing_values + + mixture.params.gamma * (mixing_values**0.5) * normal_values + ) + + @staticmethod + def canonical_generate(mixture: AbstractMixtures, size: int) -> tpg.NDArray: + """Generate a sample of given size. Canonical form of NMVM + + Args: + mixture: Normal Mean Variance Mixtures + size: length of sample + + Returns: sample of given size + + Raises: + ValueError: If mixture type is not Normal Mean Variance Mixtures + + """ + + if not isinstance(mixture, NormalMeanVarianceMixtures): + raise ValueError("Mixture must be NormalMeanMixtures") + mixing_values = mixture.params.distribution.rvs(size=size) + normal_values = scipy.stats.norm.rvs(size=size) + return mixture.params.alpha + mixture.params.mu * mixing_values + (mixing_values**0.5) * normal_values diff --git a/src/generators/nv_generator.py b/src/generators/nv_generator.py new file mode 100644 index 0000000..faa55f5 --- /dev/null +++ b/src/generators/nv_generator.py @@ -0,0 +1,51 @@ +import numpy._typing as tpg +import scipy + +from src.generators.abstract_generator import AbstractGenerator +from src.mixtures.abstract_mixture import AbstractMixtures +from src.mixtures.nv_mixture import NormalVarianceMixtures + + +class NVGenerator(AbstractGenerator): + + @staticmethod + def classical_generate(mixture: AbstractMixtures, size: int) -> tpg.NDArray: + """Generate a sample of given size. Classical form of NVM + + Args: + mixture: Normal Variance Mixtures + size: length of sample + + Returns: sample of given size + + Raises: + ValueError: If mixture type is not Normal Variance Mixtures + + """ + + if not isinstance(mixture, NormalVarianceMixtures): + raise ValueError("Mixture must be NormalMeanMixtures") + mixing_values = mixture.params.distribution.rvs(size=size) + normal_values = scipy.stats.norm.rvs(size=size) + return mixture.params.alpha + mixture.params.gamma * (mixing_values**0.5) * normal_values + + @staticmethod + def canonical_generate(mixture: AbstractMixtures, size: int) -> tpg.NDArray: + """Generate a sample of given size. Canonical form of NVM + + Args: + mixture: Normal Variance Mixtures + size: length of sample + + Returns: sample of given size + + Raises: + ValueError: If mixture type is not Normal Variance Mixtures + + """ + + if not isinstance(mixture, NormalVarianceMixtures): + raise ValueError("Mixture must be NormalMeanMixtures") + mixing_values = mixture.params.distribution.rvs(size=size) + normal_values = scipy.stats.norm.rvs(size=size) + return mixture.params.alpha + (mixing_values**0.5) * normal_values diff --git a/src/mixtures/abstract_mixture.py b/src/mixtures/abstract_mixture.py index bf02793..0ffd3dc 100644 --- a/src/mixtures/abstract_mixture.py +++ b/src/mixtures/abstract_mixture.py @@ -1,71 +1,67 @@ from abc import ABCMeta, abstractmethod +from dataclasses import fields from typing import Any -import scipy -from numpy import _typing - -from src.register.register import Registry +from scipy.stats import rv_continuous +from scipy.stats.distributions import rv_frozen class AbstractMixtures(metaclass=ABCMeta): """Base class for Mixtures""" - def __init__(self) -> None: - self.param_collector: Registry = Registry() - self.semi_param_collector: Registry = Registry() + _classical_collector: Any + _canonical_collector: Any @abstractmethod - def classic_generate( - self, size: int, w_distribution: scipy.stats.rv_continuous, params: list[float] - ) -> _typing.ArrayLike: - """Generate a samples of given size. Classical form of Mixture - - Args: - size: length of sample - w_distribution: Distribution of random value w - params: Parameters of Mixture. For example: alpha, betta, gamma for NMM - - Returns: samples of given size - + def __init__(self, mixture_form: str, **kwargs: Any) -> None: """ - @abstractmethod - def canonical_generate( - self, size: int, w_distribution: scipy.stats.rv_continuous, params: list[float] - ) -> _typing.ArrayLike: - """Generate a samples of given size. Canonical form of Mixture - Args: - size: length of sample - w_distribution: Distribution of random value w - params: Parameters of Mixture. For example: theta for NMM - - Returns: samples of given size - + mixture_form: Form of Mixture classical or Canonical + **kwargs: Parameters of Mixture """ + if mixture_form == "classical": + self.params = self._params_validation(self._classical_collector, kwargs) + elif mixture_form == "canonical": + self.params = self._params_validation(self._canonical_collector, kwargs) + else: + raise AssertionError(f"Unknown mixture form: {mixture_form}") @abstractmethod - def param_algorithm(self, name: str, sample: _typing.ArrayLike, params: dict) -> Any: - """Select and run parametric algorithm + def compute_moment(self) -> Any: ... - Args: - name: Name of Algorithm - sample: Vector of random values - params: Parameters of Algorithm - - Returns: TODO + @abstractmethod + def compute_cdf(self) -> Any: ... - """ + @abstractmethod + def compute_pdf(self) -> Any: ... @abstractmethod - def semi_param_algorithm(self, name: str, sample: _typing.ArrayLike, params: dict) -> Any: - """Select and run semi-parametric algorithm + def compute_logpdf(self) -> Any: ... + + def _params_validation(self, data_collector: Any, params: dict[str, float | rv_continuous | rv_frozen]) -> Any: + """Mixture Parameters Validation Args: - name: Name of Algorithm - sample: Vector of random values - params: Parameters of Algorithm + data_collector: Dataclass that collect parameters of Mixture + params: Input parameters - Returns: TODO + Returns: Instance of dataclass + + Raises: + ValueError: If given parameters is unexpected + ValueError: If parameter type is invalid + ValueError: If parameters age not given """ + + if len(params) == 0: + raise ValueError("Empty parameters dict") + dataclass_fields = fields(data_collector) + names_and_types = dict((field.name, field.type) for field in dataclass_fields) + for pair in params.items(): + if pair[0] not in names_and_types: + raise ValueError(f"Unexpected parameter {pair[0]}") + if not isinstance(pair[1], names_and_types[pair[0]]): + raise ValueError(f"Type missmatch: {pair[0]} should be {names_and_types[pair[0]]}, not {type(pair[1])}") + return data_collector(**params) diff --git a/src/mixtures/nm_mixture.py b/src/mixtures/nm_mixture.py index 17a4237..614244a 100644 --- a/src/mixtures/nm_mixture.py +++ b/src/mixtures/nm_mixture.py @@ -1,110 +1,65 @@ +from dataclasses import dataclass from typing import Any -import scipy -from numpy import _typing +from scipy.stats import rv_continuous +from scipy.stats.distributions import rv_frozen from src.mixtures.abstract_mixture import AbstractMixtures -class NormalMeanMixtures(AbstractMixtures): - def __init__(self) -> None: - super().__init__() - ... - - @staticmethod - def _classic_generate_params_validation(params: list[float]) -> tuple[float, float, float]: - """Validation parameters for classic generate for NMM +@dataclass +class _NMMClassicDataCollector: + """TODO: Change typing from float | int | etc to Protocol with __addition__ __multiplication__ __subtraction__""" - Args: - params: Parameters of Mixture. For example: alpha, beta, gamma for NMM + """Data Collector for parameters of classical NMM""" + alpha: float | int + beta: float | int + gamma: float | int + distribution: rv_frozen | rv_continuous - Returns: - params: alpha, beta, gamma for NMM - """ - if len(params) != 3: - raise ValueError("Expected 3 parameters") - alpha, beta, gamma = params - return alpha, beta, gamma +@dataclass +class _NMMCanonicalDataCollector: + """TODO: Change typing from float | int | etc to Protocol with __addition__ __multiplication__ __subtraction__""" - @staticmethod - def _canonical_generate_params_validation(params: list[float]) -> float: - """Validation parameters for canonical generate for NMM + """Data Collector for parameters of canonical NMM""" + sigma: float | int + distribution: rv_frozen | rv_continuous - Args: - params: Parameters of Mixture. For example: sigma for NMM - Returns: - params: sigma for NMM +class NormalMeanMixtures(AbstractMixtures): + _classical_collector = _NMMClassicDataCollector + _canonical_collector = _NMMCanonicalDataCollector + def __init__(self, mixture_form: str, **kwargs: Any) -> None: """ - if len(params) != 1: - raise ValueError("Expected 1 parameter") - sigma = params[0] - if sigma < 0: - raise ValueError("Expected parameter greater than or equal to zero") - return sigma - - def classic_generate( - self, size: int, w_distribution: scipy.stats.rv_continuous, params: list[float] - ) -> _typing.ArrayLike: - """Generate a sample of given size. Classical form of NMM - - Args: - size: length of sample - w_distribution: Distribution of random value w - params: Parameters of Mixture. For example: alpha, beta, gamma for NMM - - Returns: sample of given size - + Read Doc of Parent Method """ - alpha, beta, gamma = self._classic_generate_params_validation(params) - mixing_values = w_distribution.rvs(size=size) - normal_values = scipy.stats.norm.rvs(size=size) - return alpha + beta * mixing_values + gamma * normal_values - - def canonical_generate( - self, size: int, w_distribution: scipy.stats.rv_continuous, params: list[float] - ) -> _typing.ArrayLike: - """Generate a sample of given size. Canonical form of NMM - Args: - size: length of sample - w_distribution: Distribution of random value w - params: Parameters of Mixture. For example: sigma for NMM - - Returns: sample of given size + super().__init__(mixture_form, **kwargs) + def _params_validation(self, data_collector: Any, params: dict[str, float | rv_continuous | rv_frozen]) -> Any: """ - sigma = self._canonical_generate_params_validation(params) - mixing_values = w_distribution.rvs(size=size) - normal_values = scipy.stats.norm.rvs(size=size) - return mixing_values + sigma * normal_values - - def param_algorithm(self, name: str, sample: _typing.ArrayLike, params: dict) -> Any: - """Select and run parametric algorithm for NMM + Read parent method doc - Args: - name: Name of Algorithm - sample: Vector of random values - params: Parameters of Algorithm - - Returns: TODO + Raises: + ValueError: If canonical Mixture has negative sigma parameter """ - cls = self.param_collector.dispatch(name)(sample, **params) - return cls.algorithm(sample) - def semi_param_algorithm(self, name: str, sample: _typing.ArrayLike, params: dict) -> Any: - """Select and run semi-parametric algorithm for NMM + data_class = super()._params_validation(data_collector, params) + if hasattr(data_class, "sigma") and data_class.sigma < 0: + raise ValueError("Sigma is negative") + return data_class - Args: - name: Name of Algorithm - sample: Vector of random values - params: Parameters of Algorithm + def compute_moment(self) -> Any: + raise NotImplementedError("Must implement compute_moment") - Returns: TODO + def compute_cdf(self) -> Any: + raise NotImplementedError("Must implement cdf") - """ - cls = self.semi_param_collector.dispatch(name)(sample, **params) - return cls.algorithm(sample) + def compute_pdf(self) -> Any: + raise NotImplementedError("Must implement pdf") + + def compute_logpdf(self) -> Any: + raise NotImplementedError("Must implement logpdf") diff --git a/src/mixtures/nmv_mixture.py b/src/mixtures/nmv_mixture.py index e69b1dc..37a7d8e 100644 --- a/src/mixtures/nmv_mixture.py +++ b/src/mixtures/nmv_mixture.py @@ -1,113 +1,48 @@ +from dataclasses import dataclass from typing import Any -import scipy -from numpy import _typing +from scipy.stats import rv_continuous +from scipy.stats.distributions import rv_frozen -from src.algorithms.nvm_semi_param_algorithms.mu_estimation import SemiParametricMuEstimation from src.mixtures.abstract_mixture import AbstractMixtures -class NormalMeanVarianceMixtures(AbstractMixtures): - - def __init__(self) -> None: - super().__init__() - self.semi_param_collector.register("mu_estimation")(SemiParametricMuEstimation) - ... - - @staticmethod - def _classic_generate_params_validation(params: list[float]) -> tuple[float, float, float]: - """Validation parameters for classic generate for NMVM - - Args: - params: Parameters of Mixture. For example: alpha, beta, gamma for NMVM - - Returns: - params: alpha, beta, gamma for NMVM - - """ - if len(params) != 3: - raise ValueError("Expected 3 parameters") - alpha, beta, gamma = params - return alpha, beta, gamma - - @staticmethod - def _canonical_generate_params_validation(params: list[float]) -> tuple[float, float]: - """Validation parameters for canonical generate for NMVM - - Args: - params: Parameters of Mixture. For example: alpha, mu for NMVM +@dataclass +class _NMVMClassicDataCollector: + """TODO: Change typing from float | int | etc to Protocol with __addition__ __multiplication__ __subtraction__""" - Returns: - params: alpha, mu for NMVM + """Data Collector for parameters of classical NMVM""" + alpha: float | int + beta: float | int + gamma: float | int + distribution: rv_frozen | rv_continuous - """ - if len(params) != 2: - raise ValueError("Expected 2 parameters") - alpha, mu = params - return alpha, mu - def classic_generate( - self, size: int, w_distribution: scipy.stats.rv_continuous, params: list[float] - ) -> _typing.ArrayLike: - """Generate a sample of given size. Classical form of NMVM +@dataclass +class _NMVMCanonicalDataCollector: + """TODO: Change typing from float | int | etc to Protocol with __addition__ __multiplication__ __subtraction__""" - Args: - size: length of sample - w_distribution: Distribution of random value w - params: Parameters of Mixture. For example: alpha, beta, gamma for NMVM + """Data Collector for parameters of canonical NMVM""" + alpha: float | int + mu: float | int + distribution: rv_frozen | rv_continuous - Returns: sample of given size - """ - alpha, beta, gamma = self._classic_generate_params_validation(params) - mixing_values = w_distribution.rvs(size=size) - normal_values = scipy.stats.norm.rvs(size=size) - return alpha + beta * mixing_values + gamma * (mixing_values**0.5) * normal_values - - def canonical_generate( - self, size: int, w_distribution: scipy.stats.rv_continuous, params: list[float] - ) -> _typing.ArrayLike: - """Generate a sample of given size. Canonical form of NMVM - - Args: - size: length of sample - w_distribution: Distribution of random value w - params: Parameters of Mixture. For example: alpha, mu for NMVM - - Returns: sample of given size - - """ - alpha, mu = self._canonical_generate_params_validation(params) - mixing_values = w_distribution.rvs(size=size) - normal_values = scipy.stats.norm.rvs(size=size) - return alpha + mu * mixing_values + (mixing_values**0.5) * normal_values - - def param_algorithm(self, name: str, sample: _typing.ArrayLike, params: dict) -> Any: - """Select and run parametric algorithm for NMVM - - Args: - name: Name of Algorithm - sample: Vector of random values - params: Parameters of Algorithm - - Returns: TODO +class NormalMeanVarianceMixtures(AbstractMixtures): + _classical_collector = _NMVMClassicDataCollector + _canonical_collector = _NMVMCanonicalDataCollector - """ - cls = self.param_collector.dispatch(name)(sample, **params) - return cls.algorithm(sample) + def __init__(self, mixture_form: str, **kwargs: Any) -> None: + super().__init__(mixture_form, **kwargs) - def semi_param_algorithm(self, name: str, sample: _typing.ArrayLike, params: dict = None) -> Any: - """Select and run semi-parametric algorithm for NMVM + def compute_moment(self) -> Any: + pass - Args: - name: Name of Algorithm - sample: Vector of random values - params: Parameters of Algorithm + def compute_cdf(self) -> Any: + pass - Returns: TODO + def compute_pdf(self) -> Any: + pass - """ - if params is None: - params = {} - cls = self.semi_param_collector.dispatch(name)(sample, **params) - return cls.algorithm(sample) + def compute_logpdf(self) -> Any: + pass diff --git a/src/mixtures/nv_mixture.py b/src/mixtures/nv_mixture.py index 6855b2a..dcbedb9 100644 --- a/src/mixtures/nv_mixture.py +++ b/src/mixtures/nv_mixture.py @@ -1,109 +1,47 @@ +from dataclasses import dataclass from typing import Any -import scipy -from numpy import _typing +from scipy.stats import rv_continuous +from scipy.stats.distributions import rv_frozen from src.mixtures.abstract_mixture import AbstractMixtures -class NormalVarianceMixtures(AbstractMixtures): - - def __init__(self) -> None: - super().__init__() - ... - - @staticmethod - def _classic_generate_params_validation(params: list[float]) -> tuple[float, float]: - """Validation parameters for classic generate for NVM - - Args: - params: Parameters of Mixture. For example: alpha, gamma for NVM - - Returns: - params: alpha, gamma for NVM - - """ - if len(params) != 2: - raise ValueError("Expected 2 parameters") - alpha, gamma = params - return alpha, gamma - - @staticmethod - def _canonical_generate_params_validation(params: list[float]) -> float: - """Validation parameters for canonical generate for NVM - - Args: - params: Parameters of Mixture. For example: alpha for NVM +@dataclass +class _NVMClassicDataCollector: + """TODO: Change typing from float | int | etc to Protocol with __addition__ __multiplication__ __subtraction__""" - Returns: - params: alpha for NVM + """Data Collector for parameters of classical NVM""" + alpha: float | int + gamma: float | int + distribution: rv_frozen | rv_continuous - """ - if len(params) != 1: - raise ValueError("Expected 1 parameter") - alpha = params[0] - return alpha - def classic_generate( - self, size: int, w_distribution: scipy.stats.rv_continuous, params: list[float] - ) -> _typing.ArrayLike: - """Generate a sample of given size. Classical form of NVM +@dataclass +class _NVMCanonicalDataCollector: + """TODO: Change typing from float | int | etc to Protocol with __addition__ __multiplication__ __subtraction__""" - Args: - size: length of sample - w_distribution: Distribution of random value w - params: Parameters of Mixture. For example: alpha, gamma for NVM + """Data Collector for parameters of canonical NVM""" + alpha: float | int + distribution: rv_frozen | rv_continuous - Returns: sample of given size - """ - alpha, gamma = self._classic_generate_params_validation(params) - mixing_values = w_distribution.rvs(size=size) - normal_values = scipy.stats.norm.rvs(size=size) - return alpha + gamma * (mixing_values**0.5) * normal_values - - def canonical_generate( - self, size: int, w_distribution: scipy.stats.rv_continuous, params: list[float] - ) -> _typing.ArrayLike: - """Generate a sample of given size. Canonical form of NVM - - Args: - size: length of sample - w_distribution: Distribution of random value w - params: Parameters of Mixture. For example: alpha for NVM - - Returns: sample of given size - - """ - alpha = self._canonical_generate_params_validation(params) - mixing_values = w_distribution.rvs(size=size) - normal_values = scipy.stats.norm.rvs(size=size) - return alpha + (mixing_values**0.5) * normal_values - - def param_algorithm(self, name: str, sample: _typing.ArrayLike, params: dict) -> Any: - """Select and run parametric algorithm for NVM - - Args: - name: Name of Algorithm - sample: Vector of random values - params: Parameters of Algorithm +class NormalVarianceMixtures(AbstractMixtures): - Returns: TODO + _classical_collector = _NVMClassicDataCollector + _canonical_collector = _NVMCanonicalDataCollector - """ - cls = self.param_collector.dispatch(name)(sample, params) - return cls.algorithm(sample) + def __init__(self, mixture_form: str, **kwargs: Any) -> None: + super().__init__(mixture_form, **kwargs) - def semi_param_algorithm(self, name: str, sample: _typing.ArrayLike, params: dict) -> Any: - """Select and run semi-parametric algorithm for NVM + def compute_moment(self) -> Any: + raise NotImplementedError("Must implement compute_moment") - Args: - name: Name of Algorithm - sample: Vector of random values - params: Parameters of Algorithm + def compute_cdf(self) -> Any: + raise NotImplementedError("Must implement cdf") - Returns: TODO + def compute_pdf(self) -> Any: + raise NotImplementedError("Must implement pdf") - """ - cls = self.semi_param_collector.dispatch(name)(sample, **params) - return cls.algorithm(sample) + def compute_logpdf(self) -> Any: + raise NotImplementedError("Must implement logpdf") diff --git a/src/register/algorithm_purpose.py b/src/register/algorithm_purpose.py new file mode 100644 index 0000000..9b1aaec --- /dev/null +++ b/src/register/algorithm_purpose.py @@ -0,0 +1,11 @@ +import enum + + +class AlgorithmPurpose(enum.Enum): + DEFAULT = "Any" + NM_PARAMETRIC = "Normal Mean Parametric" + NV_PARAMETRIC = "Normal Variance Parametric" + NMV_PARAMETRIC = "Normal Mean-Variance Parametric" + NM_SEMIPARAMETRIC = "Normal Mean Semiparametric" + NV_SEMIPARAMETRIC = "Normal Variance Semiparametric" + NMV_SEMIPARAMETRIC = "Normal Mean-Variance Semiparametric" diff --git a/src/register/register.py b/src/register/register.py index 0bc4949..983bba8 100644 --- a/src/register/register.py +++ b/src/register/register.py @@ -1,4 +1,6 @@ -from typing import Callable, Generic, Optional, Type, TypeVar +from typing import Callable, Generic, Optional, Tuple, Type, TypeVar + +from src.register.algorithm_purpose import AlgorithmPurpose T = TypeVar("T") @@ -14,13 +16,15 @@ def __init__(self, default: Optional[Type[T]] = None) -> None: """ self.default = default - self.register_of_names: dict[str, Type[T]] = {} + self.register_of_names: dict[Tuple[str, AlgorithmPurpose], Type[T]] = {} - def register(self, name: str) -> Callable: + def register(self, name: str, purpose: AlgorithmPurpose) -> Callable: """Register new object Args: - name: Name of lass + name: Class name + purpose: Purpose of the algorithm ("NMParametric, NVParametric, NMVParametric, + NMSemiparametric, NVSemiparametric, NMVSemiparametric") Returns: Decorator function @@ -32,16 +36,20 @@ def register(self, name: str) -> Callable: def decorator(cls: Type[T]) -> Type[T]: if name in self.register_of_names: raise ValueError("This name is already registered") - self.register_of_names[name] = cls + if not isinstance(purpose, AlgorithmPurpose): + raise ValueError("Unexpected purpose value") + self.register_of_names[(name, purpose)] = cls return cls return decorator - def dispatch(self, name: str) -> Type[T]: + def dispatch(self, name: str, purpose: AlgorithmPurpose) -> Type[T]: """Find object by name Args: - name: Name of class + name: Class name + purpose: Purpose of the algorithm ("NMParametric, NVParametric, NMVParametric, + NMSemiparametric, NVSemiparametric, NMVSemiparametric") Returns: object @@ -49,8 +57,8 @@ def dispatch(self, name: str) -> Type[T]: ValueError: When object with this name was not found """ - if name in self.register_of_names: - return self.register_of_names[name] + if (name, purpose) in self.register_of_names: + return self.register_of_names[(name, purpose)] if self.default is None: - raise ValueError(f"{name} realisation not registered") + raise ValueError(f"{name}, {purpose} realisation not registered") return self.default diff --git a/tests/algorithms/nmv_algorithms/semiparametric_mu_estimation/test_semiparametric_mu_estimation.py b/tests/algorithms/nmv_algorithms/semiparametric_mu_estimation/test_semiparametric_mu_estimation.py index 783e1bf..51ebd15 100644 --- a/tests/algorithms/nmv_algorithms/semiparametric_mu_estimation/test_semiparametric_mu_estimation.py +++ b/tests/algorithms/nmv_algorithms/semiparametric_mu_estimation/test_semiparametric_mu_estimation.py @@ -3,112 +3,129 @@ import pytest from scipy.stats import expon, gamma, halfnorm, pareto +from src.estimators.semiparametric.nmv_semiparametric_estimator import NMVSemiParametricEstimator +from src.generators.nmv_generator import NMVGenerator from src.mixtures.nmv_mixture import NormalMeanVarianceMixtures class TestSemiParametricMuEstimation: + generator = NMVGenerator() + @pytest.mark.parametrize("real_mu", [i for i in range(-3, 3)]) def test_mu_estimation_expon_no_parameters(self, real_mu: float) -> None: - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation") + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize("real_mu", [10**i for i in range(0, -10, -2)]) def test_mu_estimation_expon_no_parameters_small(self, real_mu: float) -> None: - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation") + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize("real_mu", [50, 100]) def test_mu_estimation_expon_no_parameters_huge(self, real_mu: float) -> None: - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(1000000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample) - assert abs(real_mu - est_mu) < real_mu / 2 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 1000000) + estimator = NMVSemiParametricEstimator("mu_estimation") + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < real_mu / 2 and est.success is True @pytest.mark.parametrize("real_mu", [i for i in range(-3, 3)]) def test_mu_estimation_pareto_no_parameters(self, real_mu: float) -> None: - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(50000, pareto(2.62), [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=pareto(2.62)) + sample = self.generator.canonical_generate(mixture, 50000) + estimator = NMVSemiParametricEstimator("mu_estimation") + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize("real_mu", [10**i for i in range(0, -10, -2)]) def test_mu_estimation_pareto_no_parameters_small(self, real_mu: float) -> None: - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(50000, pareto(2.62), [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=pareto(2.62)) + sample = self.generator.canonical_generate(mixture, 50000) + estimator = NMVSemiParametricEstimator("mu_estimation") + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize("real_mu", [i for i in range(-3, 3)]) def test_mu_estimation_halfnorm_no_parameters(self, real_mu: float) -> None: - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, halfnorm, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=halfnorm) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation") + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize("real_mu", [i for i in range(-3, 3)]) def test_mu_estimation_gamma_no_parameters(self, real_mu: float) -> None: - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, gamma(2), [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=gamma(2)) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation") + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize("params", [{"m": m} for m in range(5, 10)]) def test_mu_estimation_expon_1_parameter_m_positive(self, params: dict) -> None: real_mu = 1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize("params", [{"m": m} for m in range(5, 10)]) def test_mu_estimation_expon_1_parameter_m_negative(self, params: dict) -> None: real_mu = -1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize( "params", [{"max_iterations": max_iterations} for max_iterations in (10**3, 10**4, 10**10)] ) def test_mu_estimation_expon_1_parameter_max_iterations(self, params: dict) -> None: real_mu = 1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize("params", [{"m": m} for m in range(1, 5)]) def test_mu_estimation_expon_1_parameter_m_is_best_estimation(self, params: dict) -> None: real_mu = 10 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(est_mu == params["m"]) + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(est.value == params["m"]) and est.success is False @pytest.mark.parametrize( "params", [{"m": 10, "tolerance": tol} for tol in (1 / 10**6, 1 / 10**7, 1 / 10**8, 1 / 10**9, 1 / 10**10)] ) def test_mu_estimation_expon_2_parameters_tol_positive(self, params: dict) -> None: real_mu = 1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize( "params", [{"m": 10, "tolerance": tol} for tol in (1 / 10**6, 1 / 10**7, 1 / 10**8, 1 / 10**9, 1 / 10**10)] ) def test_mu_estimation_expon_2_parameters_tol_negative(self, params: dict) -> None: real_mu = -1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize( "params", @@ -121,10 +138,11 @@ def test_mu_estimation_expon_2_parameters_tol_negative(self, params: dict) -> No ) def test_mu_estimation_expon_3_parameters_omega_positive(self, params: dict) -> None: real_mu = 1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize( "params", @@ -137,10 +155,11 @@ def test_mu_estimation_expon_3_parameters_omega_positive(self, params: dict) -> ) def test_mu_estimation_expon_3_parameters_omega_negative(self, params: dict) -> None: real_mu = -1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize( "params", @@ -155,10 +174,11 @@ def test_mu_estimation_expon_3_parameters_omega_negative(self, params: dict) -> ) def test_mu_estimation_expon_3_parameters_all_positive(self, params: dict) -> None: real_mu = 1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True @pytest.mark.parametrize( "params", @@ -173,7 +193,8 @@ def test_mu_estimation_expon_3_parameters_all_positive(self, params: dict) -> No ) def test_mu_estimation_expon_3_parameters_all_negative(self, params: dict) -> None: real_mu = -1 - mixture = NormalMeanVarianceMixtures() - sample = mixture.canonical_generate(10000, expon, [0, real_mu]) - est_mu = mixture.semi_param_algorithm("mu_estimation", sample, params) - assert abs(real_mu - est_mu) < 1 + mixture = NormalMeanVarianceMixtures("canonical", alpha=0, mu=real_mu, distribution=expon) + sample = self.generator.canonical_generate(mixture, 10000) + estimator = NMVSemiParametricEstimator("mu_estimation", params) + est = estimator.estimate(sample) + assert abs(real_mu - est.value) < 1 and est.success is True diff --git a/tests/algorithms/nmv_algorithms/semiparametric_mu_estimation/test_validate_kwargs.py b/tests/algorithms/nmv_algorithms/semiparametric_mu_estimation/test_validate_kwargs.py index e673a65..5479e4c 100644 --- a/tests/algorithms/nmv_algorithms/semiparametric_mu_estimation/test_validate_kwargs.py +++ b/tests/algorithms/nmv_algorithms/semiparametric_mu_estimation/test_validate_kwargs.py @@ -3,7 +3,7 @@ import numpy as np import pytest -from src.algorithms.nvm_semi_param_algorithms.mu_estimation import SemiParametricMuEstimation +from src.algorithms.semiparam_algorithms.nvm_semi_param_algorithms.mu_estimation import SemiParametricMuEstimation def _test_omega(x: float) -> float: diff --git a/tests/generators/nm_generator/test_mixing_normal.py b/tests/generators/nm_generator/test_mixing_normal.py index bf885af..ea85692 100644 --- a/tests/generators/nm_generator/test_mixing_normal.py +++ b/tests/generators/nm_generator/test_mixing_normal.py @@ -3,80 +3,90 @@ from scipy import ndimage from scipy.stats import norm +from src.generators.nm_generator import NMGenerator from src.mixtures.nm_mixture import * class TestMixingNormal: test_mixture_size = 100000 + generator = NMGenerator() @pytest.mark.parametrize( "mixing_variance, expected_variance", [(0, 1), (1, 2), (100, 101), (1.5, 2.5), (0.333, 1.333)] ) def test_classic_generate_variance_0(self, mixing_variance: float, expected_variance: float) -> None: - mixture = NormalMeanMixtures().classic_generate(self.test_mixture_size, norm, [0, mixing_variance**0.5, 1]) - actual_variance = ndimage.variance(mixture) + mixture = NormalMeanMixtures("classical", alpha=0, beta=mixing_variance**0.5, gamma=1, distribution=norm) + sample = self.generator.classical_generate(mixture, self.test_mixture_size) + actual_variance = ndimage.variance(sample) assert actual_variance == pytest.approx(expected_variance, 0.1) @pytest.mark.parametrize("beta", np.random.uniform(0, 100, size=50)) def test_classic_generate_variance_1(self, beta: float) -> None: expected_variance = beta**2 + 1 - mixture = NormalMeanMixtures().classic_generate(self.test_mixture_size, norm, [0, beta, 1]) - actual_variance = ndimage.variance(mixture) + mixture = NormalMeanMixtures("classical", alpha=0, beta=beta, gamma=1, distribution=norm) + sample = self.generator.classical_generate(mixture, self.test_mixture_size) + actual_variance = ndimage.variance(sample) assert actual_variance == pytest.approx(expected_variance, 0.1) @pytest.mark.parametrize("beta, gamma", np.random.uniform(0, 100, size=(50, 2))) def test_classic_generate_variance_2(self, beta: float, gamma: float) -> None: expected_variance = beta**2 + gamma**2 - mixture = NormalMeanMixtures().classic_generate(self.test_mixture_size, norm, [0, beta, gamma]) - actual_variance = ndimage.variance(mixture) + mixture = NormalMeanMixtures("classical", alpha=0, beta=beta, gamma=gamma, distribution=norm) + sample = self.generator.classical_generate(mixture, self.test_mixture_size) + actual_variance = ndimage.variance(sample) assert actual_variance == pytest.approx(expected_variance, 0.1) @pytest.mark.parametrize("beta, gamma", np.random.uniform(0, 10, size=(50, 2))) def test_classic_generate_mean(self, beta: float, gamma: float) -> None: expected_mean = 0 - mixture = NormalMeanMixtures().classic_generate(self.test_mixture_size, norm, [0, beta, gamma]) - actual_mean = np.mean(np.array(mixture)) + mixture = NormalMeanMixtures("classical", alpha=0, beta=beta, gamma=gamma, distribution=norm) + sample = self.generator.classical_generate(mixture, self.test_mixture_size) + actual_mean = np.mean(np.array(sample)) assert abs(actual_mean - expected_mean) < 1 @pytest.mark.parametrize("expected_size", np.random.randint(0, 100, size=50)) def test_classic_generate_size(self, expected_size: int) -> None: - mixture = NormalMeanMixtures().classic_generate(expected_size, norm, [0, 1, 1]) - actual_size = np.size(mixture) + mixture = NormalMeanMixtures("classical", alpha=0, beta=1, gamma=1, distribution=norm) + sample = self.generator.classical_generate(mixture, expected_size) + actual_size = np.size(sample) assert actual_size == expected_size @pytest.mark.parametrize( "mixing_variance, expected_variance", [(0, 1), (1, 2), (100, 101), (1.5, 2.5), (0.333, 1.333)] ) def test_canonical_generate_variance_0(self, mixing_variance: float, expected_variance: float) -> None: - mixture = NormalMeanMixtures().canonical_generate(self.test_mixture_size, norm(0, mixing_variance**0.5), [1]) - actual_variance = ndimage.variance(mixture) + mixture = NormalMeanMixtures("canonical", sigma=1, distribution=norm(0, mixing_variance**0.5)) + sample = self.generator.canonical_generate(mixture, self.test_mixture_size) + actual_variance = ndimage.variance(sample) assert actual_variance == pytest.approx(expected_variance, 0.1) @pytest.mark.parametrize("sigma", np.random.uniform(0, 100, size=50)) def test_canonical_generate_variance_1(self, sigma: float) -> None: expected_variance = sigma**2 + 1 - mixture = NormalMeanMixtures().canonical_generate(self.test_mixture_size, norm, [sigma]) - actual_variance = ndimage.variance(mixture) + mixture = NormalMeanMixtures("canonical", sigma=sigma, distribution=norm) + sample = self.generator.canonical_generate(mixture, self.test_mixture_size) + actual_variance = ndimage.variance(sample) assert actual_variance == pytest.approx(expected_variance, 0.1) @pytest.mark.parametrize("mixing_variance, sigma", np.random.uniform(0, 100, size=(50, 2))) def test_canonical_generate_variance_2(self, mixing_variance: float, sigma: float) -> None: expected_variance = mixing_variance + sigma**2 - mixture = NormalMeanMixtures().canonical_generate( - self.test_mixture_size, norm(0, mixing_variance**0.5), [sigma] - ) - actual_variance = ndimage.variance(mixture) + mixture = NormalMeanMixtures("canonical", sigma=sigma, distribution=norm(0, mixing_variance**0.5)) + sample = self.generator.canonical_generate(mixture, self.test_mixture_size) + actual_variance = ndimage.variance(sample) assert actual_variance == pytest.approx(expected_variance, 0.1) @pytest.mark.parametrize("sigma", np.random.uniform(0, 10, size=50)) def test_canonical_generate_mean(self, sigma: float) -> None: expected_mean = 0 - mixture = NormalMeanMixtures().canonical_generate(self.test_mixture_size, norm, [sigma]) - actual_mean = np.mean(np.array(mixture)) + mixture = NormalMeanMixtures("canonical", sigma=sigma, distribution=norm) + sample = self.generator.canonical_generate(mixture, self.test_mixture_size) + actual_mean = np.mean(np.array(sample)) assert abs(actual_mean - expected_mean) < 1 @pytest.mark.parametrize("expected_size", [*np.random.randint(0, 100, size=50), 0, 1, 1000000]) def test_canonical_generate_size(self, expected_size: int) -> None: - mixture = NormalMeanMixtures().canonical_generate(expected_size, norm, [1]) - actual_size = np.size(mixture) + mixture = NormalMeanMixtures("canonical", sigma=1, distribution=norm) + sample = self.generator.canonical_generate(mixture, expected_size) + actual_size = np.size(sample) assert actual_size == expected_size diff --git a/tests/generators/nm_generator/test_nm_params_validator.py b/tests/generators/nm_generator/test_nm_params_validator.py index c1766a2..5061a56 100644 --- a/tests/generators/nm_generator/test_nm_params_validator.py +++ b/tests/generators/nm_generator/test_nm_params_validator.py @@ -1,63 +1,38 @@ -import sys - -import numpy as np import pytest +from scipy.stats import norm -from src.mixtures.nm_mixture import * +from src.mixtures.nm_mixture import NormalMeanMixtures class TestGenerateParamsValidators: - @pytest.mark.parametrize( - "params", - [[], [1], [1.111, 2.111], [1, 1, 1, 1], np.random.uniform(-100, 100, size=(100, 1))], - ) - def test_classic_generate_validator_value_error_length(self, params: list[float]) -> None: - with pytest.raises(ValueError, match="Expected 3 parameters"): - NormalMeanMixtures._classic_generate_params_validation(params) - @pytest.mark.parametrize( "params", [ - [sys.float_info.max, sys.float_info.max, sys.float_info.max], - [1, 1, 1], - [0, 0, 0], - [-1, -1, -1], - [0.333, 0.333, 0.666], + {"wrong_alpha": 1, "beta": 1, "gamma": 1, "distribution": norm}, + {"alpha": 1, "wrong_beta": 1, "gamma": 1, "distribution": norm}, + {"alpha": 1, "beta": 1, "wrong_gamma": 1, "distribution": norm}, + {"alpha": 1, "beta": 1, "gamma": 1, "wrong_distribution": norm}, ], ) - def test_classic_generate_validator_correct(self, params: list[float]) -> None: - NormalMeanMixtures._classic_generate_params_validation(params) - - @pytest.mark.parametrize("params", np.random.uniform(-100, 100, size=(50, 3))) - def test_classic_generate_validator_correct_random(self, params: list[float]) -> None: - NormalMeanMixtures._classic_generate_params_validation(params) + def test_classical_wrong_names(self, params): + with pytest.raises(ValueError): + NormalMeanMixtures("classical", **params) - @pytest.mark.parametrize( - "params", - [ - [], - [1.111, 2.111], - [1, 1, 1], - [1, 1, 1, 1], - np.random.uniform(1, 100, size=(100, 1)), - ], - ) - def test_canonical_generate_validator_value_error_length(self, params: list[float]) -> None: - with pytest.raises(ValueError, match="Expected 1 parameter"): - NormalMeanMixtures._canonical_generate_params_validation(params) + def test_classical_wrong_distribution_type(self): + with pytest.raises(ValueError): + NormalMeanMixtures("classical", **{"alpha": 1, "beta": 1, "gamma": 1, "distribution": 1}) @pytest.mark.parametrize( - "params", - [[-1], [-1000], [-9999]], + "params", [{"wrong_sigma": 1, "distribution": norm}, {"sigma": 1, "wrong_distribution": norm}] ) - def test_canonical_generate_validator_value_error_sign(self, params: list[float]) -> None: - with pytest.raises(ValueError, match="Expected parameter greater than or equal to zero"): - NormalMeanMixtures._canonical_generate_params_validation(params) + def test_canonical_wrong_names(self, params): + with pytest.raises(ValueError): + NormalMeanMixtures("canonical", **params) - @pytest.mark.parametrize("params", [[sys.float_info.max], [1], [0], [0.333], [10000]]) - def test_canonical_generate_validator_correct(self, params: list[float]) -> None: - NormalMeanMixtures._canonical_generate_params_validation(params) + def test_canonical_wrong_distribution_type(self): + with pytest.raises(ValueError): + NormalMeanMixtures("canonical", **{"sigma": 1, "distribution": 1}) - @pytest.mark.parametrize("params", np.random.uniform(1, 100, size=(50, 1))) - def test_canonical_generate_validator_correct_random(self, params: list[float]) -> None: - NormalMeanMixtures._canonical_generate_params_validation(params) + def test_canonical_wrong_sigma_sign(self): + with pytest.raises(ValueError): + NormalMeanMixtures("canonical", **{"sigma": -1, "distribution": norm}) diff --git a/tests/generators/nmv_generator/test_nmv_params_validator.py b/tests/generators/nmv_generator/test_nmv_params_validator.py index 4edf6e3..598fadb 100644 --- a/tests/generators/nmv_generator/test_nmv_params_validator.py +++ b/tests/generators/nmv_generator/test_nmv_params_validator.py @@ -1,74 +1,40 @@ -import sys - -import numpy as np import pytest +from scipy.stats import norm -from src.mixtures.nmv_mixture import * +from src.mixtures.nmv_mixture import NormalMeanVarianceMixtures class TestGenerateParamsValidators: - @pytest.mark.parametrize( - "params", - [ - [], - [0], - [1.111, 2.111], - [1, 1, 1, 1], - np.random.uniform(0, 100, size=(100, 1)), - np.random.uniform(0, 100, size=(100, 100)), - ], - ) - def test_classic_generate_validator_value_error_length(self, params: list[float]) -> None: - with pytest.raises(ValueError, match="Expected 3 parameters"): - NormalMeanVarianceMixtures._classic_generate_params_validation(params) @pytest.mark.parametrize( "params", [ - [sys.float_info.max, sys.float_info.max, sys.float_info.max], - [sys.float_info.min, sys.float_info.min, sys.float_info.min], - [1, 1, 1], - [0, 0, 0], - [-1, -1, -1], - [0.333, -0.333, 0.666], + {"wrong_alpha": 1, "beta": 1, "gamma": 1, "distribution": norm}, + {"alpha": 1, "wrong_beta": 1, "gamma": 1, "distribution": norm}, + {"alpha": 1, "beta": 1, "wrong_gamma": 1, "distribution": norm}, + {"alpha": 1, "beta": 1, "gamma": 1, "wrong_distribution": norm}, ], ) - def test_classic_generate_validator_correct(self, params: list[float]) -> None: - NormalMeanVarianceMixtures._classic_generate_params_validation(params) + def test_classical_wrong_names(self, params): + with pytest.raises(ValueError): + NormalMeanVarianceMixtures("classical", **params) - @pytest.mark.parametrize("params", np.random.uniform(-100, 100, size=(50, 3))) - def test_classic_generate_validator_correct_random(self, params: list[float]) -> None: - NormalMeanVarianceMixtures._classic_generate_params_validation(params) - - @pytest.mark.parametrize( - "params", - [ - [], - [1.111], - [1, 1, 1], - [1, 1, 1, 1], - np.random.uniform(-100, 100, size=(100, 1)), - np.random.uniform(-100, 100, size=(100, 100)), - ], - ) - def test_canonical_generate_validator_value_error_length(self, params: list[float]) -> None: - with pytest.raises(ValueError, match="Expected 2 parameters"): - NormalMeanVarianceMixtures._canonical_generate_params_validation(params) + def test_classical_wrong_distribution_type(self): + with pytest.raises(ValueError): + NormalMeanVarianceMixtures("classical", **{"alpha": 1, "beta": 1, "gamma": 1, "distribution": 1}) @pytest.mark.parametrize( "params", [ - [sys.float_info.max, sys.float_info.max], - [sys.float_info.min, sys.float_info.min], - [1, 1], - [0, 0], - [-1, -1], - [0.333, 0.666], + {"wrong_alpha": 1, "mu": 1, "distribution": norm}, + {"alpha": 1, "mu": 1, "wrong_distribution": norm}, + {"alpha": 1, "mu_wrong": 1, "distribution": norm}, ], ) - def test_canonical_generate_validator_correct(self, params: list[float]) -> None: - NormalMeanVarianceMixtures._canonical_generate_params_validation(params) + def test_canonical_wrong_names(self, params): + with pytest.raises(ValueError): + NormalMeanVarianceMixtures("canonical", **params) - @pytest.mark.parametrize("params", np.random.uniform(-100, 100, size=(50, 2))) - def test_canonical_generate_validator_correct_random(self, params: list[float]) -> None: - NormalMeanVarianceMixtures._canonical_generate_params_validation(params) + def test_canonical_wrong_distribution_type(self): + with pytest.raises(ValueError): + NormalMeanVarianceMixtures("canonical", **{"alpha": 1, "mu": 1, "distribution": 1}) diff --git a/tests/generators/nv_generator/test_nv_params_validator.py b/tests/generators/nv_generator/test_nv_params_validator.py index a584f4b..f6c5994 100644 --- a/tests/generators/nv_generator/test_nv_params_validator.py +++ b/tests/generators/nv_generator/test_nv_params_validator.py @@ -1,64 +1,39 @@ -import sys - -import numpy as np import pytest +from scipy.stats import norm -from src.mixtures.nv_mixture import * +from src.mixtures.nv_mixture import NormalVarianceMixtures class TestGenerateParamsValidators: @pytest.mark.parametrize( "params", [ - [], - [0], - [1.111, 2.111, 3.111], - [1, 1, 1, 1], - np.random.uniform(0, 100, size=(100, 1)), - np.random.uniform(0, 100, size=(100, 100)), + {"wrong_alpha": 1, "beta": 1, "distribution": norm}, + {"alpha": 1, "wrong_beta": 1, "distribution": norm}, + {"alpha": 1, "beta": 1, "distribution": norm}, + {"alpha": 1, "beta": 1, "wrong_distribution": norm}, ], ) - def test_classic_generate_validator_value_error_length(self, params: list[float]) -> None: - with pytest.raises(ValueError, match="Expected 2 parameters"): - NormalVarianceMixtures._classic_generate_params_validation(params) + def test_classical_wrong_names(self, params): + with pytest.raises(ValueError): + NormalVarianceMixtures("classical", **params) - @pytest.mark.parametrize( - "params", - [ - [sys.float_info.max, sys.float_info.max], - [sys.float_info.min, sys.float_info.min], - [1, 1], - [0, 0], - [-1, -1], - [0.333, -0.333], - ], - ) - def test_classic_generate_validator_correct(self, params: list[float]) -> None: - NormalVarianceMixtures._classic_generate_params_validation(params) + def test_classical_wrong_distribution_type(self): + with pytest.raises(ValueError): + NormalVarianceMixtures("classical", **{"alpha": 1, "beta": 1, "distribution": 1}) - @pytest.mark.parametrize("params", np.random.uniform(-100, 100, size=(50, 2))) - def test_classic_generate_validator_correct_random(self, params: list[float]) -> None: - NormalVarianceMixtures._classic_generate_params_validation(params) + @pytest.mark.parametrize("params", [{}, {"a": 1, "b": 2, "c": 3, "d": 4, "m": 5}]) + def test_canonical_args_validation_length_error(self, params): + with pytest.raises(ValueError): + NormalVarianceMixtures("canonical", **params) @pytest.mark.parametrize( - "params", - [ - [], - [1.111, 2.111], - [1, 1, 1], - [1, 1, 1, 1], - np.random.uniform(0, 100, size=(100, 1)), - np.random.uniform(0, 100, size=(100, 100)), - ], + "params", [{"wrong_alpha": 1, "distribution": norm}, {"alpha": 1, "wrong_distribution": norm}] ) - def test_canonical_generate_validator_value_error_length(self, params: list[float]) -> None: - with pytest.raises(ValueError, match="Expected 1 parameter"): - NormalVarianceMixtures._canonical_generate_params_validation(params) - - @pytest.mark.parametrize("params", [[sys.float_info.max], [sys.float_info.min], [1], [0], [-1], [0.333]]) - def test_canonical_generate_validator_correct(self, params: list[float]) -> None: - NormalVarianceMixtures._canonical_generate_params_validation(params) + def test_canonical_wrong_names(self, params): + with pytest.raises(ValueError): + NormalVarianceMixtures("canonical", **params) - @pytest.mark.parametrize("params", np.random.uniform(-100, 100, size=(50, 1))) - def test_canonical_generate_validator_correct_random(self, params: list[float]) -> None: - NormalVarianceMixtures._canonical_generate_params_validation(params) + def test_canonical_wrong_distribution_type(self): + with pytest.raises(ValueError): + NormalVarianceMixtures("canonical", **{"alpha": 1, "distribution": 1})