From 62f3b4a288ca292a2b34737ad9af4577e754afee Mon Sep 17 00:00:00 2001 From: CoprDistGit Date: Wed, 10 May 2023 04:29:14 +0000 Subject: automatic import of python-model-inspector --- .gitignore | 1 + python-model-inspector.spec | 2158 +++++++++++++++++++++++++++++++++++++++++++ sources | 1 + 3 files changed, 2160 insertions(+) create mode 100644 python-model-inspector.spec create mode 100644 sources diff --git a/.gitignore b/.gitignore index e69de29..f73b2f8 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1 @@ +/model_inspector-0.27.4.tar.gz diff --git a/python-model-inspector.spec b/python-model-inspector.spec new file mode 100644 index 0000000..64063ff --- /dev/null +++ b/python-model-inspector.spec @@ -0,0 +1,2158 @@ +%global _empty_manifest_terminate_build 0 +Name: python-model-inspector +Version: 0.27.4 +Release: 1 +Summary: Inspect machine learning models +License: Apache Software License 2.0 +URL: https://github.com/gsganden/model_inspector/ +Source0: https://mirrors.nju.edu.cn/pypi/web/packages/b4/0a/b5d157541b43b5ddffa2802dab05149d42e76fb1db8c8e9235cd97805aa7/model_inspector-0.27.4.tar.gz +BuildArch: noarch + +Requires: python3-catboost +Requires: python3-fastcore +Requires: python3-IPython +Requires: python3-matplotlib +Requires: python3-numpy +Requires: python3-pandas +Requires: python3-scikit-learn +Requires: python3-scipy +Requires: python3-seaborn +Requires: python3-tqdm +Requires: python3-waterfallcharts +Requires: python3-black +Requires: python3-flake8 +Requires: python3-ipdb +Requires: python3-jinja2 +Requires: python3-jupyterlab +Requires: python3-nbdev +Requires: python3-nbqa +Requires: python3-twine + +%description + +`model_inspector` aims to help you train better +`scikit-learn`-compatible models by providing insights into their +behavior. +## Use +To use `model_inspector`, you create an `Inspector` object from a +`scikit-learn` model, a feature DataFrame `X`, and a target Series `y`. +Typically you will want to create it on held-out data, as shown below. +``` python +import sklearn.datasets +from sklearn.ensemble import RandomForestRegressor +from sklearn.model_selection import train_test_split +from model_inspector import get_inspector +``` +``` python +X, y = sklearn.datasets.load_diabetes(return_X_y=True, as_frame=True) +``` +``` python +X +``` +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
agesexbmibps1s2s3s4s5s6
00.0380760.0506800.0616960.021872-0.044223-0.034821-0.043401-0.0025920.019907-0.017646
1-0.001882-0.044642-0.051474-0.026328-0.008449-0.0191630.074412-0.039493-0.068332-0.092204
20.0852990.0506800.044451-0.005670-0.045599-0.034194-0.032356-0.0025920.002861-0.025930
3-0.089063-0.044642-0.011595-0.0366560.0121910.024991-0.0360380.0343090.022688-0.009362
40.005383-0.044642-0.0363850.0218720.0039350.0155960.008142-0.002592-0.031988-0.046641
.................................
4370.0417080.0506800.0196620.059744-0.005697-0.002566-0.028674-0.0025920.0311930.007207
438-0.0055150.050680-0.015906-0.0676420.0493410.079165-0.0286740.034309-0.0181140.044485
4390.0417080.050680-0.0159060.017293-0.037344-0.013840-0.024993-0.011080-0.0468830.015491
440-0.045472-0.0446420.0390620.0012150.0163180.015283-0.0286740.0265600.044529-0.025930
441-0.045472-0.044642-0.073030-0.0814130.0837400.0278090.173816-0.039493-0.0042220.003064
+

442 rows × 10 columns

+
+``` python +y +``` + 0 151.0 + 1 75.0 + 2 141.0 + 3 206.0 + 4 135.0 + 437 178.0 + 438 104.0 + 439 132.0 + 440 220.0 + 441 57.0 + Name: target, Length: 442, dtype: float64 +``` python +X_train, X_test, y_train, y_test = train_test_split(X, y) +``` +``` python +rfr = RandomForestRegressor().fit(X_train, y_train) +``` +``` python +rfr.score(X_test, y_test) +``` + 0.4145806969881506 +``` python +inspector = get_inspector(rfr, X_test, y_test) +``` +You can then use various methods of `inspector` to learn about how your +model behaves on that data. +The methods that are available for a given inspector depends on the +types of its estimator and its target `y`. An attribute called `methods` +tells you what they are: +``` python +inspector.methods +``` + ['plot_feature_clusters', + 'plot_partial_dependence', + 'permutation_importance', + 'plot_permutation_importance', + 'plot_pred_vs_act', + 'plot_residuals', + 'show_correlation'] +``` python +ax = inspector.plot_feature_clusters() +``` +![](index_files/figure-commonmark/cell-11-output-1.png) +``` python +most_important_features = inspector.permutation_importance().index[:2] +axes = inspector.plot_partial_dependence( + features=[*most_important_features, most_important_features] +) +axes[0, 0].get_figure().set_size_inches(12, 3) +``` +![](index_files/figure-commonmark/cell-12-output-1.png) +``` python +inspector.permutation_importance() +``` + bmi 0.241886 + s5 0.153085 + sex 0.003250 + s3 0.000734 + bp 0.000461 + s4 -0.002687 + s2 -0.004366 + s1 -0.008953 + s6 -0.018925 + age -0.022768 + dtype: float64 +``` python +ax = inspector.plot_permutation_importance() +``` +![](index_files/figure-commonmark/cell-14-output-1.png) +``` python +ax = inspector.plot_pred_vs_act() +``` +![](index_files/figure-commonmark/cell-15-output-1.png) +``` python +axes = inspector.plot_residuals() +``` +![](index_files/figure-commonmark/cell-16-output-1.png) +``` python +inspector.show_correlation() +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 agesexbmibps1s2s3s4s5s6target
age1.000.220.180.190.230.18-0.040.190.280.320.13
sex0.221.000.290.31-0.050.08-0.410.300.130.270.27
bmi0.180.291.000.550.160.18-0.430.450.430.490.66
bp0.190.310.551.000.090.04-0.200.190.360.440.51
s10.23-0.050.160.091.000.880.070.570.500.260.09
s20.180.080.180.040.881.00-0.160.660.230.180.09
s3-0.04-0.41-0.43-0.200.07-0.161.00-0.72-0.37-0.30-0.46
s40.190.300.450.190.570.66-0.721.000.600.410.41
s50.280.130.430.360.500.23-0.370.601.000.520.46
s60.320.270.490.440.260.18-0.300.410.521.000.35
target0.130.270.660.510.090.09-0.460.410.460.351.00
+## Scope +`model_inspector` makes some attempt to support estimators from popular +libraries other than `scikit-learn` that mimic the `scikit-learn` +interface. The following estimators are specifically supported: +- From `catboost`: + - `CatBoostClassifier` + - `CatBoostRegressor` +- From `lightgbm`: + - `LGBMClassifier` + - `LGBMRegressor` +- From `xgboost`: + - `XGBClassifier` + - `XGBRegressor` +## Install +`pip install model_inspector` +## Alternatives +### Yellowbrick +[Yellowbrick](https://www.scikit-yb.org/en/latest/) is similar to Model +Inspector in that it provides tools for visualizing the behavior of +`scikit-learn` models. +The two libraries have different designs. Yellowbrick uses `Visualizer` +objects, each class of which corresponds to a single type of +visualization. The `Visualizer` interface is similar to the +`scikit-learn` transformer and estimator interfaces. In constrast, +`model_inspector` uses `Inspector` objects that bundle together a +`scikit-learn` model, an `X` feature DataFrame, and a `y` target Series. +The `Inspector` object does the work of identifying appropriate +visualization types for the specific model and dataset in question and +exposing corresponding methods, making it easy to visualize a given +model for a given dataset in a variety of ways. +Another fundamental difference is that Yellowbrick is framed as a +machine learning *visualization* library, while Model Inspector treats +visualization as just one approach to inspecting the behavior of machine +learning models. +### SHAP +[SHAP](https://github.com/slundberg/shap) is another library that +provides a set of tools for understanding the behavior of machine +learning models. It has a somewhat similar design to Model Inspector in +that it uses `Explainer` objects to provide access to methods that are +appropriate for a given model. It has broader scope than Model Inspector +in that it supports models from frameworks such as PyTorch and +TensorFlow. It has narrower scope in that it only implements methods +based on Shapley values. +## Acknowledgments +Many aspects of this library were inspired by [FastAI +courses](https://course.fast.ai/), including bundling together a model +with data in a class and providing certain specific visualization +methods such as feature importance bar plots, feature clusters +dendrograms, tree diagrams, waterfall plots, and partial dependence +plots. Its primary contribution is to make all of these methods +available in a single convenient interface. + +%package -n python3-model-inspector +Summary: Inspect machine learning models +Provides: python-model-inspector +BuildRequires: python3-devel +BuildRequires: python3-setuptools +BuildRequires: python3-pip +%description -n python3-model-inspector + +`model_inspector` aims to help you train better +`scikit-learn`-compatible models by providing insights into their +behavior. +## Use +To use `model_inspector`, you create an `Inspector` object from a +`scikit-learn` model, a feature DataFrame `X`, and a target Series `y`. +Typically you will want to create it on held-out data, as shown below. +``` python +import sklearn.datasets +from sklearn.ensemble import RandomForestRegressor +from sklearn.model_selection import train_test_split +from model_inspector import get_inspector +``` +``` python +X, y = sklearn.datasets.load_diabetes(return_X_y=True, as_frame=True) +``` +``` python +X +``` +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
agesexbmibps1s2s3s4s5s6
00.0380760.0506800.0616960.021872-0.044223-0.034821-0.043401-0.0025920.019907-0.017646
1-0.001882-0.044642-0.051474-0.026328-0.008449-0.0191630.074412-0.039493-0.068332-0.092204
20.0852990.0506800.044451-0.005670-0.045599-0.034194-0.032356-0.0025920.002861-0.025930
3-0.089063-0.044642-0.011595-0.0366560.0121910.024991-0.0360380.0343090.022688-0.009362
40.005383-0.044642-0.0363850.0218720.0039350.0155960.008142-0.002592-0.031988-0.046641
.................................
4370.0417080.0506800.0196620.059744-0.005697-0.002566-0.028674-0.0025920.0311930.007207
438-0.0055150.050680-0.015906-0.0676420.0493410.079165-0.0286740.034309-0.0181140.044485
4390.0417080.050680-0.0159060.017293-0.037344-0.013840-0.024993-0.011080-0.0468830.015491
440-0.045472-0.0446420.0390620.0012150.0163180.015283-0.0286740.0265600.044529-0.025930
441-0.045472-0.044642-0.073030-0.0814130.0837400.0278090.173816-0.039493-0.0042220.003064
+

442 rows × 10 columns

+
+``` python +y +``` + 0 151.0 + 1 75.0 + 2 141.0 + 3 206.0 + 4 135.0 + 437 178.0 + 438 104.0 + 439 132.0 + 440 220.0 + 441 57.0 + Name: target, Length: 442, dtype: float64 +``` python +X_train, X_test, y_train, y_test = train_test_split(X, y) +``` +``` python +rfr = RandomForestRegressor().fit(X_train, y_train) +``` +``` python +rfr.score(X_test, y_test) +``` + 0.4145806969881506 +``` python +inspector = get_inspector(rfr, X_test, y_test) +``` +You can then use various methods of `inspector` to learn about how your +model behaves on that data. +The methods that are available for a given inspector depends on the +types of its estimator and its target `y`. An attribute called `methods` +tells you what they are: +``` python +inspector.methods +``` + ['plot_feature_clusters', + 'plot_partial_dependence', + 'permutation_importance', + 'plot_permutation_importance', + 'plot_pred_vs_act', + 'plot_residuals', + 'show_correlation'] +``` python +ax = inspector.plot_feature_clusters() +``` +![](index_files/figure-commonmark/cell-11-output-1.png) +``` python +most_important_features = inspector.permutation_importance().index[:2] +axes = inspector.plot_partial_dependence( + features=[*most_important_features, most_important_features] +) +axes[0, 0].get_figure().set_size_inches(12, 3) +``` +![](index_files/figure-commonmark/cell-12-output-1.png) +``` python +inspector.permutation_importance() +``` + bmi 0.241886 + s5 0.153085 + sex 0.003250 + s3 0.000734 + bp 0.000461 + s4 -0.002687 + s2 -0.004366 + s1 -0.008953 + s6 -0.018925 + age -0.022768 + dtype: float64 +``` python +ax = inspector.plot_permutation_importance() +``` +![](index_files/figure-commonmark/cell-14-output-1.png) +``` python +ax = inspector.plot_pred_vs_act() +``` +![](index_files/figure-commonmark/cell-15-output-1.png) +``` python +axes = inspector.plot_residuals() +``` +![](index_files/figure-commonmark/cell-16-output-1.png) +``` python +inspector.show_correlation() +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 agesexbmibps1s2s3s4s5s6target
age1.000.220.180.190.230.18-0.040.190.280.320.13
sex0.221.000.290.31-0.050.08-0.410.300.130.270.27
bmi0.180.291.000.550.160.18-0.430.450.430.490.66
bp0.190.310.551.000.090.04-0.200.190.360.440.51
s10.23-0.050.160.091.000.880.070.570.500.260.09
s20.180.080.180.040.881.00-0.160.660.230.180.09
s3-0.04-0.41-0.43-0.200.07-0.161.00-0.72-0.37-0.30-0.46
s40.190.300.450.190.570.66-0.721.000.600.410.41
s50.280.130.430.360.500.23-0.370.601.000.520.46
s60.320.270.490.440.260.18-0.300.410.521.000.35
target0.130.270.660.510.090.09-0.460.410.460.351.00
+## Scope +`model_inspector` makes some attempt to support estimators from popular +libraries other than `scikit-learn` that mimic the `scikit-learn` +interface. The following estimators are specifically supported: +- From `catboost`: + - `CatBoostClassifier` + - `CatBoostRegressor` +- From `lightgbm`: + - `LGBMClassifier` + - `LGBMRegressor` +- From `xgboost`: + - `XGBClassifier` + - `XGBRegressor` +## Install +`pip install model_inspector` +## Alternatives +### Yellowbrick +[Yellowbrick](https://www.scikit-yb.org/en/latest/) is similar to Model +Inspector in that it provides tools for visualizing the behavior of +`scikit-learn` models. +The two libraries have different designs. Yellowbrick uses `Visualizer` +objects, each class of which corresponds to a single type of +visualization. The `Visualizer` interface is similar to the +`scikit-learn` transformer and estimator interfaces. In constrast, +`model_inspector` uses `Inspector` objects that bundle together a +`scikit-learn` model, an `X` feature DataFrame, and a `y` target Series. +The `Inspector` object does the work of identifying appropriate +visualization types for the specific model and dataset in question and +exposing corresponding methods, making it easy to visualize a given +model for a given dataset in a variety of ways. +Another fundamental difference is that Yellowbrick is framed as a +machine learning *visualization* library, while Model Inspector treats +visualization as just one approach to inspecting the behavior of machine +learning models. +### SHAP +[SHAP](https://github.com/slundberg/shap) is another library that +provides a set of tools for understanding the behavior of machine +learning models. It has a somewhat similar design to Model Inspector in +that it uses `Explainer` objects to provide access to methods that are +appropriate for a given model. It has broader scope than Model Inspector +in that it supports models from frameworks such as PyTorch and +TensorFlow. It has narrower scope in that it only implements methods +based on Shapley values. +## Acknowledgments +Many aspects of this library were inspired by [FastAI +courses](https://course.fast.ai/), including bundling together a model +with data in a class and providing certain specific visualization +methods such as feature importance bar plots, feature clusters +dendrograms, tree diagrams, waterfall plots, and partial dependence +plots. Its primary contribution is to make all of these methods +available in a single convenient interface. + +%package help +Summary: Development documents and examples for model-inspector +Provides: python3-model-inspector-doc +%description help + +`model_inspector` aims to help you train better +`scikit-learn`-compatible models by providing insights into their +behavior. +## Use +To use `model_inspector`, you create an `Inspector` object from a +`scikit-learn` model, a feature DataFrame `X`, and a target Series `y`. +Typically you will want to create it on held-out data, as shown below. +``` python +import sklearn.datasets +from sklearn.ensemble import RandomForestRegressor +from sklearn.model_selection import train_test_split +from model_inspector import get_inspector +``` +``` python +X, y = sklearn.datasets.load_diabetes(return_X_y=True, as_frame=True) +``` +``` python +X +``` +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
agesexbmibps1s2s3s4s5s6
00.0380760.0506800.0616960.021872-0.044223-0.034821-0.043401-0.0025920.019907-0.017646
1-0.001882-0.044642-0.051474-0.026328-0.008449-0.0191630.074412-0.039493-0.068332-0.092204
20.0852990.0506800.044451-0.005670-0.045599-0.034194-0.032356-0.0025920.002861-0.025930
3-0.089063-0.044642-0.011595-0.0366560.0121910.024991-0.0360380.0343090.022688-0.009362
40.005383-0.044642-0.0363850.0218720.0039350.0155960.008142-0.002592-0.031988-0.046641
.................................
4370.0417080.0506800.0196620.059744-0.005697-0.002566-0.028674-0.0025920.0311930.007207
438-0.0055150.050680-0.015906-0.0676420.0493410.079165-0.0286740.034309-0.0181140.044485
4390.0417080.050680-0.0159060.017293-0.037344-0.013840-0.024993-0.011080-0.0468830.015491
440-0.045472-0.0446420.0390620.0012150.0163180.015283-0.0286740.0265600.044529-0.025930
441-0.045472-0.044642-0.073030-0.0814130.0837400.0278090.173816-0.039493-0.0042220.003064
+

442 rows × 10 columns

+
+``` python +y +``` + 0 151.0 + 1 75.0 + 2 141.0 + 3 206.0 + 4 135.0 + 437 178.0 + 438 104.0 + 439 132.0 + 440 220.0 + 441 57.0 + Name: target, Length: 442, dtype: float64 +``` python +X_train, X_test, y_train, y_test = train_test_split(X, y) +``` +``` python +rfr = RandomForestRegressor().fit(X_train, y_train) +``` +``` python +rfr.score(X_test, y_test) +``` + 0.4145806969881506 +``` python +inspector = get_inspector(rfr, X_test, y_test) +``` +You can then use various methods of `inspector` to learn about how your +model behaves on that data. +The methods that are available for a given inspector depends on the +types of its estimator and its target `y`. An attribute called `methods` +tells you what they are: +``` python +inspector.methods +``` + ['plot_feature_clusters', + 'plot_partial_dependence', + 'permutation_importance', + 'plot_permutation_importance', + 'plot_pred_vs_act', + 'plot_residuals', + 'show_correlation'] +``` python +ax = inspector.plot_feature_clusters() +``` +![](index_files/figure-commonmark/cell-11-output-1.png) +``` python +most_important_features = inspector.permutation_importance().index[:2] +axes = inspector.plot_partial_dependence( + features=[*most_important_features, most_important_features] +) +axes[0, 0].get_figure().set_size_inches(12, 3) +``` +![](index_files/figure-commonmark/cell-12-output-1.png) +``` python +inspector.permutation_importance() +``` + bmi 0.241886 + s5 0.153085 + sex 0.003250 + s3 0.000734 + bp 0.000461 + s4 -0.002687 + s2 -0.004366 + s1 -0.008953 + s6 -0.018925 + age -0.022768 + dtype: float64 +``` python +ax = inspector.plot_permutation_importance() +``` +![](index_files/figure-commonmark/cell-14-output-1.png) +``` python +ax = inspector.plot_pred_vs_act() +``` +![](index_files/figure-commonmark/cell-15-output-1.png) +``` python +axes = inspector.plot_residuals() +``` +![](index_files/figure-commonmark/cell-16-output-1.png) +``` python +inspector.show_correlation() +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 agesexbmibps1s2s3s4s5s6target
age1.000.220.180.190.230.18-0.040.190.280.320.13
sex0.221.000.290.31-0.050.08-0.410.300.130.270.27
bmi0.180.291.000.550.160.18-0.430.450.430.490.66
bp0.190.310.551.000.090.04-0.200.190.360.440.51
s10.23-0.050.160.091.000.880.070.570.500.260.09
s20.180.080.180.040.881.00-0.160.660.230.180.09
s3-0.04-0.41-0.43-0.200.07-0.161.00-0.72-0.37-0.30-0.46
s40.190.300.450.190.570.66-0.721.000.600.410.41
s50.280.130.430.360.500.23-0.370.601.000.520.46
s60.320.270.490.440.260.18-0.300.410.521.000.35
target0.130.270.660.510.090.09-0.460.410.460.351.00
+## Scope +`model_inspector` makes some attempt to support estimators from popular +libraries other than `scikit-learn` that mimic the `scikit-learn` +interface. The following estimators are specifically supported: +- From `catboost`: + - `CatBoostClassifier` + - `CatBoostRegressor` +- From `lightgbm`: + - `LGBMClassifier` + - `LGBMRegressor` +- From `xgboost`: + - `XGBClassifier` + - `XGBRegressor` +## Install +`pip install model_inspector` +## Alternatives +### Yellowbrick +[Yellowbrick](https://www.scikit-yb.org/en/latest/) is similar to Model +Inspector in that it provides tools for visualizing the behavior of +`scikit-learn` models. +The two libraries have different designs. Yellowbrick uses `Visualizer` +objects, each class of which corresponds to a single type of +visualization. The `Visualizer` interface is similar to the +`scikit-learn` transformer and estimator interfaces. In constrast, +`model_inspector` uses `Inspector` objects that bundle together a +`scikit-learn` model, an `X` feature DataFrame, and a `y` target Series. +The `Inspector` object does the work of identifying appropriate +visualization types for the specific model and dataset in question and +exposing corresponding methods, making it easy to visualize a given +model for a given dataset in a variety of ways. +Another fundamental difference is that Yellowbrick is framed as a +machine learning *visualization* library, while Model Inspector treats +visualization as just one approach to inspecting the behavior of machine +learning models. +### SHAP +[SHAP](https://github.com/slundberg/shap) is another library that +provides a set of tools for understanding the behavior of machine +learning models. It has a somewhat similar design to Model Inspector in +that it uses `Explainer` objects to provide access to methods that are +appropriate for a given model. It has broader scope than Model Inspector +in that it supports models from frameworks such as PyTorch and +TensorFlow. It has narrower scope in that it only implements methods +based on Shapley values. +## Acknowledgments +Many aspects of this library were inspired by [FastAI +courses](https://course.fast.ai/), including bundling together a model +with data in a class and providing certain specific visualization +methods such as feature importance bar plots, feature clusters +dendrograms, tree diagrams, waterfall plots, and partial dependence +plots. Its primary contribution is to make all of these methods +available in a single convenient interface. + +%prep +%autosetup -n model-inspector-0.27.4 + +%build +%py3_build + +%install +%py3_install +install -d -m755 %{buildroot}/%{_pkgdocdir} +if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi +if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi +if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi +if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi +pushd %{buildroot} +if [ -d usr/lib ]; then + find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/lib64 ]; then + find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/bin ]; then + find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/sbin ]; then + find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst +fi +touch doclist.lst +if [ -d usr/share/man ]; then + find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst +fi +popd +mv %{buildroot}/filelist.lst . +mv %{buildroot}/doclist.lst . + +%files -n python3-model-inspector -f filelist.lst +%dir %{python3_sitelib}/* + +%files help -f doclist.lst +%{_docdir}/* + +%changelog +* Wed May 10 2023 Python_Bot - 0.27.4-1 +- Package Spec generated diff --git a/sources b/sources new file mode 100644 index 0000000..783e999 --- /dev/null +++ b/sources @@ -0,0 +1 @@ +eb4d27e8b63050aafa897d107e54dd7b model_inspector-0.27.4.tar.gz -- cgit v1.2.3