nirdizati_light.explanation.common

 1from enum import Enum
 2
 3from nirdizati_light.explanation.wrappers.dice_wrapper import dice_explain
 4from nirdizati_light.explanation.wrappers.ice_wrapper import ice_explain
 5from nirdizati_light.explanation.wrappers.shap_wrapper import shap_explain
 6
 7class ExplainerType(Enum):
 8    SHAP = 'shap'
 9    # ICE = 'ice'
10    DICE = 'dice'
11
12
13def explain(CONF, predictive_model, encoder, test_df=None, df=None, query_instances=None, target_trace_id=None,
14            method=None, optimization=None, heuristic=None, support=0.9, timestamp_col_name=None,
15            model_path=None,random_seed=None,adapted=None,filtering=None):
16    """
17        Generate explanation based on the configuration provided in the CONF dictionary.
18        :param dict CONF: dictionary for configuring the encoding
19        :param nirdizati_light.predictive_model.PredictiveModel predictive_model: predictive model to explain
20        :param nirdizati_light.encoding.data_encoder.Encoder encoder: encoder to use for encoding the log
21        :param pandas.DataFrame test_df: test data couto evaluate model
22        :param pandas.DataFrame df: full dataset
23        :param pandas.DataFrame query_instances: instances to explain
24        :param str target_trace_id: trace id to explain
25        :param str method: method to use for explanation
26        :param str optimization: optimization method to use for explanation
27        :param str heuristic: heuristic to use for  counterfactual explanation
28        :param float support: support for Declare model discovery for Knowledge-Aware methods
29        :param str timestamp_col_name: name of the timestamp column in the log
30        :param str model_path: path to save the discovered Declare model
31        :param int random_seed: random seed for reproducibility
32        :param bool adapted: whether to use Knowledge aware counterfactual generation method
33        :param bool filtering: whether to use filtering for counterfactual explanation
34        :param EventLog log: EventLog object of the log
35        :param dict CONF: dictionary for configuring the encoding
36        :param nirdizati_light.encoding.data_encoder.Encoder: if an encoder is provided, that encoder will be used instead of creating a new one
37        :return: A list of explanations, either by providing the feature importance or the counterfactual explanations
38        """
39    explainer = CONF['explanator']
40    if explainer is ExplainerType.SHAP.value:
41        return shap_explain(CONF, predictive_model,encoder, test_df, target_trace_id=target_trace_id)
42    # elif explainer is ExplainerType.ICE.value:
43    #     return ice_explain(CONF, predictive_model, encoder, target_df=test_df,explanation_target=column)
44    if explainer is ExplainerType.DICE.value:
45        return dice_explain(CONF, predictive_model, encoder=encoder, df=df, query_instances=query_instances,
46                            method=method, optimization=optimization,
47                            heuristic=heuristic, support=support, timestamp_col_name=timestamp_col_name,model_path=model_path,
48                            random_seed=random_seed,adapted=adapted,filtering=filtering)
class ExplainerType(enum.Enum):
 8class ExplainerType(Enum):
 9    SHAP = 'shap'
10    # ICE = 'ice'
11    DICE = 'dice'

Create a collection of name/value pairs.

Example enumeration:

>>> class Color(Enum):
...     RED = 1
...     BLUE = 2
...     GREEN = 3

Access them by:

  • attribute access::
>>> Color.RED
<Color.RED: 1>
  • value lookup:
>>> Color(1)
<Color.RED: 1>
  • name lookup:
>>> Color['RED']
<Color.RED: 1>

Enumerations can be iterated over, and know how many members they have:

>>> len(Color)
3
>>> list(Color)
[<Color.RED: 1>, <Color.BLUE: 2>, <Color.GREEN: 3>]

Methods can be added to enumerations, and members can have their own attributes -- see the documentation for details.

SHAP = <ExplainerType.SHAP: 'shap'>
DICE = <ExplainerType.DICE: 'dice'>
Inherited Members
enum.Enum
name
value
def explain( CONF, predictive_model, encoder, test_df=None, df=None, query_instances=None, target_trace_id=None, method=None, optimization=None, heuristic=None, support=0.9, timestamp_col_name=None, model_path=None, random_seed=None, adapted=None, filtering=None):
14def explain(CONF, predictive_model, encoder, test_df=None, df=None, query_instances=None, target_trace_id=None,
15            method=None, optimization=None, heuristic=None, support=0.9, timestamp_col_name=None,
16            model_path=None,random_seed=None,adapted=None,filtering=None):
17    """
18        Generate explanation based on the configuration provided in the CONF dictionary.
19        :param dict CONF: dictionary for configuring the encoding
20        :param nirdizati_light.predictive_model.PredictiveModel predictive_model: predictive model to explain
21        :param nirdizati_light.encoding.data_encoder.Encoder encoder: encoder to use for encoding the log
22        :param pandas.DataFrame test_df: test data couto evaluate model
23        :param pandas.DataFrame df: full dataset
24        :param pandas.DataFrame query_instances: instances to explain
25        :param str target_trace_id: trace id to explain
26        :param str method: method to use for explanation
27        :param str optimization: optimization method to use for explanation
28        :param str heuristic: heuristic to use for  counterfactual explanation
29        :param float support: support for Declare model discovery for Knowledge-Aware methods
30        :param str timestamp_col_name: name of the timestamp column in the log
31        :param str model_path: path to save the discovered Declare model
32        :param int random_seed: random seed for reproducibility
33        :param bool adapted: whether to use Knowledge aware counterfactual generation method
34        :param bool filtering: whether to use filtering for counterfactual explanation
35        :param EventLog log: EventLog object of the log
36        :param dict CONF: dictionary for configuring the encoding
37        :param nirdizati_light.encoding.data_encoder.Encoder: if an encoder is provided, that encoder will be used instead of creating a new one
38        :return: A list of explanations, either by providing the feature importance or the counterfactual explanations
39        """
40    explainer = CONF['explanator']
41    if explainer is ExplainerType.SHAP.value:
42        return shap_explain(CONF, predictive_model,encoder, test_df, target_trace_id=target_trace_id)
43    # elif explainer is ExplainerType.ICE.value:
44    #     return ice_explain(CONF, predictive_model, encoder, target_df=test_df,explanation_target=column)
45    if explainer is ExplainerType.DICE.value:
46        return dice_explain(CONF, predictive_model, encoder=encoder, df=df, query_instances=query_instances,
47                            method=method, optimization=optimization,
48                            heuristic=heuristic, support=support, timestamp_col_name=timestamp_col_name,model_path=model_path,
49                            random_seed=random_seed,adapted=adapted,filtering=filtering)

Generate explanation based on the configuration provided in the CONF dictionary.

Parameters
  • dict CONF: dictionary for configuring the encoding
  • nirdizati_light.predictive_model.PredictiveModel predictive_model: predictive model to explain
  • nirdizati_light.encoding.data_encoder.Encoder encoder: encoder to use for encoding the log
  • pandas.DataFrame test_df: test data couto evaluate model
  • pandas.DataFrame df: full dataset
  • pandas.DataFrame query_instances: instances to explain
  • str target_trace_id: trace id to explain
  • str method: method to use for explanation
  • str optimization: optimization method to use for explanation
  • str heuristic: heuristic to use for counterfactual explanation
  • float support: support for Declare model discovery for Knowledge-Aware methods
  • str timestamp_col_name: name of the timestamp column in the log
  • str model_path: path to save the discovered Declare model
  • int random_seed: random seed for reproducibility
  • bool adapted: whether to use Knowledge aware counterfactual generation method
  • bool filtering: whether to use filtering for counterfactual explanation
  • EventLog log: EventLog object of the log
  • dict CONF: dictionary for configuring the encoding
  • nirdizati_light.encoding.data_encoder.Encoder: if an encoder is provided, that encoder will be used instead of creating a new one
Returns

A list of explanations, either by providing the feature importance or the counterfactual explanations