Skip to content

Commit

Permalink
Docstrings updates (#548)
Browse files Browse the repository at this point in the history
* ale docs update

* ale some conventions updates

* ale single-under

* moving back to original return format with inconsistency italic/normal font

* first pass through anchor_base

* back to standard returns without :

* anchor_text and anchor_explanation first pass

* first pass through anchor_text

* first pass through cfproto and cem

* fist pass through cfrl_base

* first pass through cfrl_tabular

* first pass through shap_wrappers

* first pass through models

* First pass through backend top

* first pass through backends pytorch

* first pass through backends tensorflow

* small updates

* minor correction

* example duplicated attribute - TabularSampler

* replaced attributes docstrings

* second pass through explainers.

* added explain fields up to cfproto (inclusive)

* add description of explanation return fields

* add links to docstrings + minor corrections

* tensor to array

* fixed duplicated target names

* first pass confidence docs

* fixed broken links in shap

* included links in defaults SHAP

* minor fixes

* fixed minor indentation and punctuation. private IG build_explanation

* Fixed IG and test_shap_wrappers build_explanation

* minor updates on defaults, interfaces, autoencoder and anchortabular

* fixed approximation_methods

* fixed data

* fixed discretizer

* fix app_methods, distance & distributed

* fix language model

* fist pass through utils

* second pass through utils

* fixed mypy errors
  • Loading branch information
RobertSamoilescu authored Jan 17, 2022
1 parent ce961ca commit e458ab3
Show file tree
Hide file tree
Showing 46 changed files with 1,894 additions and 1,405 deletions.
6 changes: 4 additions & 2 deletions alibi/api/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,8 @@
'kwargs',
]
"""
KernelShap parameters updated and return in metadata['params'].
KernelShap parameters updated and returned in ``metadata['params']``.
See :py:class:`alibi.explainers.shap_wrappers.KernelShap`.
"""

DEFAULT_META_KERNEL_SHAP = {
Expand Down Expand Up @@ -172,7 +173,8 @@
'kwargs'
]
"""
TreeShap parameters updated and return in metadata['params'].
TreeShap parameters updated and returned in ``metadata['params']``.
See :py:class:`alibi.explainers.shap_wrappers.TreeShap`.
"""

DEFAULT_META_TREE_SHAP = {
Expand Down
26 changes: 17 additions & 9 deletions alibi/api/interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ class Explainer(abc.ABC):
"""
Base class for explainer algorithms
"""
meta = attr.ib(default=attr.Factory(default_meta), repr=alibi_pformat) # type: dict
meta: dict = attr.ib(default=attr.Factory(default_meta), repr=alibi_pformat) #: Explainer meta-data.

def __attrs_post_init__(self):
# add a name and version to the metadata dictionary
Expand Down Expand Up @@ -102,6 +102,14 @@ def load(cls, path: Union[str, os.PathLike], predictor: Any) -> "Explainer":
return load_explainer(path, predictor)

def reset_predictor(self, predictor: Any) -> None:
"""
Resets the predictor.
Parameters
----------
predictor
New predictor.
"""
raise NotImplementedError

def save(self, path: Union[str, os.PathLike]) -> None:
Expand All @@ -118,14 +126,14 @@ def save(self, path: Union[str, os.PathLike]) -> None:
def _update_metadata(self, data_dict: dict, params: bool = False) -> None:
"""
Updates the metadata of the explainer using the data from the `data_dict`. If the params option
is specified, then each key-value pair is added to the metadata `'params'` dictionary.
is specified, then each key-value pair is added to the metadata ``'params'`` dictionary.
Parameters
----------
data_dict
Contains the data to be stored in the metadata.
params
If True, the method updates the `'params'` attribute of the metatadata.
If ``True``, the method updates the ``'params'`` attribute of the metadata.
"""

if params:
Expand All @@ -151,34 +159,34 @@ class Explanation:

def __attrs_post_init__(self):
"""
Expose keys stored in self.meta and self.data as attributes of the class.
Expose keys stored in `self.meta` and `self.data` as attributes of the class.
"""
for key, value in ChainMap(self.meta, self.data).items():
setattr(self, key, value)

def to_json(self) -> str:
"""
Serialize the explanation data and metadata into a json format.
Serialize the explanation data and metadata into a `json` format.
Returns
-------
String containing json representation of the explanation
String containing `json` representation of the explanation.
"""
return json.dumps(attr.asdict(self), cls=NumpyEncoder)

@classmethod
def from_json(cls, jsonrepr) -> "Explanation":
"""
Create an instance of an Explanation class using a json representation of the Explanation.
Create an instance of an `Explanation` class using a `json` representation of the `Explanation`.
Parameters
----------
jsonrepr
json representation of an explanation
`json` representation of an explanation.
Returns
-------
An Explanation object
An Explanation object.
"""
dictrepr = json.loads(jsonrepr)
try:
Expand Down
89 changes: 43 additions & 46 deletions alibi/confidence/model_linearity.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@ def _linear_superposition(alphas, vecs, shape):
Parameters
----------
alphas
Coefficients of the superposition
Coefficients of the superposition.
vecs
Tensors of the superposition
Tensors of the superposition.
shape
Shape of each tensor
Shape of each tensor.
Returns
-------
Linear tensor superposition
Linear tensor superposition.
"""
input_str = string.ascii_lowercase[2: 2 + len(shape)]
einstr = 'a,ba{}->b{}'.format(input_str, input_str)
Expand All @@ -39,19 +39,19 @@ def _calculate_global_linearity(predict_fn: Callable, input_shape: Tuple, X_samp
Parameters
----------
predict_fn
Model prediction function
Model prediction function.
input_shape
Shape of the input
Shape of the input.
X_samples
Array of feature vectors in the linear superposition
Array of feature vectors in the linear superposition.
model_type
'classifier' or 'regressor'
Supported values: ``'classifier'`` | ``'regressor'``.
alphas
Array of coefficients in the linear superposition
Array of coefficients in the linear superposition.
Returns
-------
Linearity score
Linearity score.
"""
ss = X_samples.shape[:2] # X_samples shape=(nb_instances, nb_samples, nb_features)
Expand Down Expand Up @@ -105,27 +105,27 @@ def _calculate_global_linearity(predict_fn: Callable, input_shape: Tuple, X_samp

def _calculate_pairwise_linearity(predict_fn: Callable, x: np.ndarray, input_shape: Tuple, X_samples: np.ndarray,
model_type: str, alphas: np.ndarray) -> np.ndarray:
"""Calculates the norm of the difference between the output of a linear superposition of a test vector x and
vectors in X_samples and the linear superposition of the outputs, averaged over all the vectors in X_samples.
"""Calculates the norm of the difference between the output of a linear superposition of a test vector `x` and
vectors in `X_samples` and the linear superposition of the outputs, averaged over all the vectors in `X_samples`.
Parameters
----------
predict_fn
Model prediction function
Model prediction function.
x
Test instance for which to calculate the linearity measure
Test instance for which to calculate the linearity measure.
input_shape
Shape of the input
Shape of the input.
X_samples
Array of feature vectors in the linear superposition
Array of feature vectors in the linear superposition.
model_type
'classifier' or 'regressor'
Supported values: ``'classifier'`` | ``'regressor'``.
alphas
Array of coefficients in the linear superposition
Array of coefficients in the linear superposition.
Returns
-------
Linearity score
Linearity score.
"""
ss = X_samples.shape[:2] # X_samples shape=(nb_instances, nb_samples, nb_features)
Expand Down Expand Up @@ -190,15 +190,15 @@ def _sample_knn(x: np.ndarray, X_train: np.ndarray, nb_samples: int = 10) -> np.
Parameters
----------
x
Central instance for sampling
Central instance for sampling.
X_train
Training set.
nb_samples
Number of samples to generate.
Returns
-------
Sampled vectors
Sampled vectors.
"""
x = x.reshape(x.shape[0], -1)
Expand All @@ -221,23 +221,23 @@ def _sample_knn(x: np.ndarray, X_train: np.ndarray, nb_samples: int = 10) -> np.

def _sample_grid(x: np.ndarray, feature_range: np.ndarray, epsilon: float = 0.04,
nb_samples: int = 10, res: int = 100) -> np.ndarray:
"""Samples data points uniformly from an interval centered at x and with size epsilon * Delta,
with delta = f_max - f_min the features ranges.
"""Samples data points uniformly from an interval centered at `x` and with size `epsilon * delta`,
with `delta = f_max - f_min` the features ranges.
Parameters
----------
x
Instance of interest.
feature_range
Array with min and max values for each feature
Array with min and max values for each feature.
epsilon
Size of the sampling region around central instance as percentage of features range.
nb_samples
Number of samples to generate.
Returns
-------
Sampled vectors
Sampled vectors.
"""
nb_instances = x.shape[0]
Expand Down Expand Up @@ -271,7 +271,7 @@ def _linearity_measure(predict_fn: Callable,
alphas: Optional[np.ndarray] = None,
model_type: str = 'classifier',
agg: str = 'global') -> np.ndarray:
"""Calculate the linearity measure of the model around an instance of interest x.
"""Calculate the linearity measure of the model around an instance of interest `x`.
Parameters
----------
Expand All @@ -284,7 +284,7 @@ def _linearity_measure(predict_fn: Callable,
feature_range
Array with min and max values for each feature.
method
Method for sampling. Supported values 'knn' or 'grid'.
Method for sampling. Supported values: ``'knn'`` | ``'grid'``.
epsilon
Size of the sampling region around the central instance as a percentage of feature range.
nb_samples
Expand All @@ -294,13 +294,13 @@ def _linearity_measure(predict_fn: Callable,
alphas
Array of coefficients in the superposition.
model_type
Type of task. Supported values are 'regressor' or 'classifier'.
Type of task. Supported values: ``'regressor'`` | ``'classifier'``.
agg
Aggregation method. Supported values are 'global' or 'pairwise'.
Aggregation method. Supported values: ``'global'`` | ``'pairwise'``.
Returns
-------
Linearity score
Linearity score.
"""
input_shape = x.shape[1:]
Expand Down Expand Up @@ -339,11 +339,11 @@ def _infer_feature_range(X_train: np.ndarray) -> np.ndarray:
Parameters
----------
X_train
Training set
Training set.
Returns
-------
Feature range
Feature range.
"""
X_train = X_train.reshape(X_train.shape[0], -1)
return np.vstack((X_train.min(axis=0), X_train.max(axis=0))).T
Expand All @@ -365,7 +365,7 @@ def __init__(self,
Parameters
----------
method
Method for sampling. Supported methods are 'knn' or 'grid'.
Method for sampling. Supported methods: ``'knn'`` | ``'grid'``.
epsilon
Size of the sampling region around the central instance as a percentage of the features range.
nb_samples
Expand All @@ -375,9 +375,9 @@ def __init__(self,
alphas
Coefficients in the superposition.
agg
Aggregation method. Supported values are 'global' or 'pairwise'.
Aggregation method. Supported values: ``'global'`` | ``'pairwise'``.
model_type
Type of task. Supported values are 'regressor' or 'classifier'.
Type of task. Supported values: ``'regressor'`` | ``'classifier'``.
"""
self.method = method
self.epsilon = epsilon
Expand All @@ -395,11 +395,8 @@ def fit(self, X_train: np.ndarray) -> None:
Parameters
----------
X_train
Training set
Training set.
Returns
-------
None
"""
self.X_train = X_train
self.feature_range = _infer_feature_range(X_train)
Expand All @@ -412,13 +409,13 @@ def score(self, predict_fn: Callable, x: np.ndarray) -> np.ndarray:
Parameters
----------
predict_fn
Prediction function
Prediction function.
x
Instance of interest
Instance of interest.
Returns
-------
Linearity measure
Linearity measure.
"""
input_shape = x.shape[1:]
Expand Down Expand Up @@ -466,7 +463,7 @@ def linearity_measure(predict_fn: Callable,
feature_range
Array with min and max values for each feature.
method
Method for sampling. Supported values 'knn' or 'grid'.
Method for sampling. Supported values: ``'knn'`` | ``'grid'``.
X_train
Training set.
epsilon
Expand All @@ -478,13 +475,13 @@ def linearity_measure(predict_fn: Callable,
alphas
Coefficients in the superposition.
agg
Aggregation method. Supported values 'global' or 'pairwise'.
Aggregation method. Supported values: ``'global'`` | ``'pairwise'``.
model_type
Type of task. Supported values 'regressor' or 'classifier'.
Type of task. Supported values: ``'regressor'`` | ``'classifier'``.
Returns
-------
Linearity measure
Linearity measure.
"""
if method == 'knn':
Expand Down
Loading

0 comments on commit e458ab3

Please sign in to comment.