Skip to content

Base Parameter Distributions

ParamDist

Bases: Distribution, ABC

Source code in src/methods/bayes/base/distribution.py
class ParamDist(D.distribution.Distribution, ABC):
    @classmethod
    @abstractmethod
    def from_parameter(self, p: nn.Parameter) -> "ParamDist":
        """
        Default initialization of ParamDist forom parameters of nn.Module

        Args:
            p (nn.Parameter): paramaters for which ParamDist should be created.
        """
        ...

    def __init__(self, *args, **kwargs) -> None:
        super().__init__(*args, **kwargs)

    @abstractmethod
    def get_params(self) -> dict[str, nn.Parameter]:
        """
        Returns dictionary of parameters that should be registered as parameters at nn.Module.
        """
        ...

    @abstractmethod
    def log_prob(self, weights: torch.Tensor) -> torch.Tensor:
        """
        Returns logarithm of probability density function of distibution evaluated at weights.

        Args:
            weights (torch.Tensor): the point at which probability should be evaluated.
        """
        ...

    @abstractmethod
    def log_z_test(self):
        """
        Returns parameter which is used to be compared with threshold to estimate
        wether this parameter should be pruned. By default it is logarithm of z_test
        or equivalent of it. log_z_test = log(abs(mean)) - log(variance)
        """
        return torch.log(torch.abs(self.mean)) - torch.log(self.variance)

    @abstractmethod
    def rsample(self, sample_shape: _size = torch.Size()) -> torch.Tensor:
        """
        Returns parameters sampled using reparametrization trick, so they could be used for
        gradient estimation

        Returns:
            torch.Tensor: sampled parameters
        """
        ...

    @property
    @abstractmethod
    def map(self) -> torch.Tensor:
        """
        Returns mode of the distibution. It has a sense of maximum aposteriori estimation
        for bayessian nets.

        Returns:
            torch.Tensor: MAP parameters
        """
        ...

    @property
    @abstractmethod
    def mean(self) -> torch.Tensor:
        """
        Returns mean of the distibution. It has a sense of non-bias estimation
        for bayessian nets.

        Returns:
            torch.Tensor: mean parameters
        """
        ...

    @abstractmethod
    def variance(self):
        """
        Returns variance of the distibution. It has a sense of error estimation
        for bayessian nets and assumed to be used in prunning.
        """
        ...

map: torch.Tensor abstractmethod property

Returns mode of the distibution. It has a sense of maximum aposteriori estimation for bayessian nets.

Returns:

Type Description
Tensor

torch.Tensor: MAP parameters

mean: torch.Tensor abstractmethod property

Returns mean of the distibution. It has a sense of non-bias estimation for bayessian nets.

Returns:

Type Description
Tensor

torch.Tensor: mean parameters

from_parameter(p) abstractmethod classmethod

Default initialization of ParamDist forom parameters of nn.Module

Parameters:

Name Type Description Default
p Parameter

paramaters for which ParamDist should be created.

required
Source code in src/methods/bayes/base/distribution.py
@classmethod
@abstractmethod
def from_parameter(self, p: nn.Parameter) -> "ParamDist":
    """
    Default initialization of ParamDist forom parameters of nn.Module

    Args:
        p (nn.Parameter): paramaters for which ParamDist should be created.
    """
    ...

get_params() abstractmethod

Returns dictionary of parameters that should be registered as parameters at nn.Module.

Source code in src/methods/bayes/base/distribution.py
@abstractmethod
def get_params(self) -> dict[str, nn.Parameter]:
    """
    Returns dictionary of parameters that should be registered as parameters at nn.Module.
    """
    ...

log_prob(weights) abstractmethod

Returns logarithm of probability density function of distibution evaluated at weights.

Parameters:

Name Type Description Default
weights Tensor

the point at which probability should be evaluated.

required
Source code in src/methods/bayes/base/distribution.py
@abstractmethod
def log_prob(self, weights: torch.Tensor) -> torch.Tensor:
    """
    Returns logarithm of probability density function of distibution evaluated at weights.

    Args:
        weights (torch.Tensor): the point at which probability should be evaluated.
    """
    ...

log_z_test() abstractmethod

Returns parameter which is used to be compared with threshold to estimate wether this parameter should be pruned. By default it is logarithm of z_test or equivalent of it. log_z_test = log(abs(mean)) - log(variance)

Source code in src/methods/bayes/base/distribution.py
@abstractmethod
def log_z_test(self):
    """
    Returns parameter which is used to be compared with threshold to estimate
    wether this parameter should be pruned. By default it is logarithm of z_test
    or equivalent of it. log_z_test = log(abs(mean)) - log(variance)
    """
    return torch.log(torch.abs(self.mean)) - torch.log(self.variance)

rsample(sample_shape=torch.Size()) abstractmethod

Returns parameters sampled using reparametrization trick, so they could be used for gradient estimation

Returns:

Type Description
Tensor

torch.Tensor: sampled parameters

Source code in src/methods/bayes/base/distribution.py
@abstractmethod
def rsample(self, sample_shape: _size = torch.Size()) -> torch.Tensor:
    """
    Returns parameters sampled using reparametrization trick, so they could be used for
    gradient estimation

    Returns:
        torch.Tensor: sampled parameters
    """
    ...

variance() abstractmethod

Returns variance of the distibution. It has a sense of error estimation for bayessian nets and assumed to be used in prunning.

Source code in src/methods/bayes/base/distribution.py
@abstractmethod
def variance(self):
    """
    Returns variance of the distibution. It has a sense of error estimation
    for bayessian nets and assumed to be used in prunning.
    """
    ...