forked from explainX/explainx
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathexplain.py
More file actions
57 lines (32 loc) · 1.07 KB
/
explain.py
File metadata and controls
57 lines (32 loc) · 1.07 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from .imports import *
from .dashboard import *
from .calculate_shap import *
"""
This class calculates feature importance
Input:
"""
class explain():
def __init__(self):
super(explain, self).__init__()
self.param= None
def ai(self, df, y, model, model_name="xgboost", mode=None):
y_variable= "y_variable"
y_variable_predict= "prediction"
#shap
c = calculate_shap()
self.df_final = c.find(model, df, model_name=model_name)
#prediction col
if model_name=="xgboost":
self.df_final[y_variable_predict] = model.predict(xgboost.DMatrix(df))
if model_name=="catboost":
self.df_final[y_variable_predict] = model.predict(df.to_numpy())
self.df_final[y_variable] = y
d= dashboard()
d.find(self.df_final, y_variable, y_variable_predict, mode)
return True
def dataset_boston(self):
# load JS visualization code to notebook
shap.initjs()
X, y = shap.datasets.boston()
return X,y
explainx=explain()