diff options
-rw-r--r-- | .gitignore | 1 | ||||
-rw-r--r-- | python-mlmodels.spec | 182 | ||||
-rw-r--r-- | sources | 1 |
3 files changed, 184 insertions, 0 deletions
@@ -0,0 +1 @@ +/mlmodels-0.38.1.tar.gz diff --git a/python-mlmodels.spec b/python-mlmodels.spec new file mode 100644 index 0000000..2ac8410 --- /dev/null +++ b/python-mlmodels.spec @@ -0,0 +1,182 @@ +%global _empty_manifest_terminate_build 0 +Name: python-mlmodels +Version: 0.38.1 +Release: 1 +Summary: Generic model API, Model Zoo in Tensorflow, Keras, Pytorch, Hyperparamter search +License: Apache Software License +URL: https://github.com/arita37/mlmodels +Source0: https://mirrors.aliyun.com/pypi/web/packages/8a/69/23f54dc4af5166b555115d1f50b460c2f87462ab44df92d1debfcc3051d7/mlmodels-0.38.1.tar.gz +BuildArch: noarch + +Requires: python3-numpy +Requires: python3-pandas +Requires: python3-scipy +Requires: python3-scikit-learn +Requires: python3-numexpr +Requires: python3-sqlalchemy +Requires: python3-tensorflow +Requires: python3-pytorch +Requires: python3-optuna +Requires: python3-lightgbm +Requires: python3-mlflow + +%description +### AutoML example in Gluon ([Example notebook](mlmodels/example/gluon_automl.ipynb)) +```python +# import library +import mlmodels +import autogluon as ag +#### Define model and data definitions +model_uri = "model_gluon.gluon_automl.py" +data_pars = {"train": True, "uri_type": "amazon_aws", "dt_name": "Inc"} +model_pars = {"model_type": "tabular", + "learning_rate": ag.space.Real(1e-4, 1e-2, default=5e-4, log=True), + "activation": ag.space.Categorical(*tuple(["relu", "softrelu", "tanh"])), + "layers": ag.space.Categorical( + *tuple([[100], [1000], [200, 100], [300, 200, 100]])), + 'dropout_prob': ag.space.Real(0.0, 0.5, default=0.1), + 'num_boost_round': 10, + 'num_leaves': ag.space.Int(lower=26, upper=30, default=36) + } +compute_pars = { + "hp_tune": True, + "num_epochs": 10, + "time_limits": 120, + "num_trials": 5, + "search_strategy": "skopt" +} +out_pars = { + "out_path": "dataset/" +} +#### Load Parameters and Train +from mlmodels.models import module_load +module = module_load( model_uri= model_uri ) # Load file definition +model = module.Model(model_pars=model_pars, compute_pars=compute_pars) # Create Model instance +model, sess = module.fit(model, data_pars=data_pars, model_pars=model_pars, compute_pars=compute_pars, out_pars=out_pars) +#### Inference +ypred = module.predict(model, data_pars, compute_pars, out_pars) # predict pipeline + +%package -n python3-mlmodels +Summary: Generic model API, Model Zoo in Tensorflow, Keras, Pytorch, Hyperparamter search +Provides: python-mlmodels +BuildRequires: python3-devel +BuildRequires: python3-setuptools +BuildRequires: python3-pip +%description -n python3-mlmodels +### AutoML example in Gluon ([Example notebook](mlmodels/example/gluon_automl.ipynb)) +```python +# import library +import mlmodels +import autogluon as ag +#### Define model and data definitions +model_uri = "model_gluon.gluon_automl.py" +data_pars = {"train": True, "uri_type": "amazon_aws", "dt_name": "Inc"} +model_pars = {"model_type": "tabular", + "learning_rate": ag.space.Real(1e-4, 1e-2, default=5e-4, log=True), + "activation": ag.space.Categorical(*tuple(["relu", "softrelu", "tanh"])), + "layers": ag.space.Categorical( + *tuple([[100], [1000], [200, 100], [300, 200, 100]])), + 'dropout_prob': ag.space.Real(0.0, 0.5, default=0.1), + 'num_boost_round': 10, + 'num_leaves': ag.space.Int(lower=26, upper=30, default=36) + } +compute_pars = { + "hp_tune": True, + "num_epochs": 10, + "time_limits": 120, + "num_trials": 5, + "search_strategy": "skopt" +} +out_pars = { + "out_path": "dataset/" +} +#### Load Parameters and Train +from mlmodels.models import module_load +module = module_load( model_uri= model_uri ) # Load file definition +model = module.Model(model_pars=model_pars, compute_pars=compute_pars) # Create Model instance +model, sess = module.fit(model, data_pars=data_pars, model_pars=model_pars, compute_pars=compute_pars, out_pars=out_pars) +#### Inference +ypred = module.predict(model, data_pars, compute_pars, out_pars) # predict pipeline + +%package help +Summary: Development documents and examples for mlmodels +Provides: python3-mlmodels-doc +%description help +### AutoML example in Gluon ([Example notebook](mlmodels/example/gluon_automl.ipynb)) +```python +# import library +import mlmodels +import autogluon as ag +#### Define model and data definitions +model_uri = "model_gluon.gluon_automl.py" +data_pars = {"train": True, "uri_type": "amazon_aws", "dt_name": "Inc"} +model_pars = {"model_type": "tabular", + "learning_rate": ag.space.Real(1e-4, 1e-2, default=5e-4, log=True), + "activation": ag.space.Categorical(*tuple(["relu", "softrelu", "tanh"])), + "layers": ag.space.Categorical( + *tuple([[100], [1000], [200, 100], [300, 200, 100]])), + 'dropout_prob': ag.space.Real(0.0, 0.5, default=0.1), + 'num_boost_round': 10, + 'num_leaves': ag.space.Int(lower=26, upper=30, default=36) + } +compute_pars = { + "hp_tune": True, + "num_epochs": 10, + "time_limits": 120, + "num_trials": 5, + "search_strategy": "skopt" +} +out_pars = { + "out_path": "dataset/" +} +#### Load Parameters and Train +from mlmodels.models import module_load +module = module_load( model_uri= model_uri ) # Load file definition +model = module.Model(model_pars=model_pars, compute_pars=compute_pars) # Create Model instance +model, sess = module.fit(model, data_pars=data_pars, model_pars=model_pars, compute_pars=compute_pars, out_pars=out_pars) +#### Inference +ypred = module.predict(model, data_pars, compute_pars, out_pars) # predict pipeline + +%prep +%autosetup -n mlmodels-0.38.1 + +%build +%py3_build + +%install +%py3_install +install -d -m755 %{buildroot}/%{_pkgdocdir} +if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi +if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi +if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi +if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi +pushd %{buildroot} +if [ -d usr/lib ]; then + find usr/lib -type f -printf "\"/%h/%f\"\n" >> filelist.lst +fi +if [ -d usr/lib64 ]; then + find usr/lib64 -type f -printf "\"/%h/%f\"\n" >> filelist.lst +fi +if [ -d usr/bin ]; then + find usr/bin -type f -printf "\"/%h/%f\"\n" >> filelist.lst +fi +if [ -d usr/sbin ]; then + find usr/sbin -type f -printf "\"/%h/%f\"\n" >> filelist.lst +fi +touch doclist.lst +if [ -d usr/share/man ]; then + find usr/share/man -type f -printf "\"/%h/%f.gz\"\n" >> doclist.lst +fi +popd +mv %{buildroot}/filelist.lst . +mv %{buildroot}/doclist.lst . + +%files -n python3-mlmodels -f filelist.lst +%dir %{python3_sitelib}/* + +%files help -f doclist.lst +%{_docdir}/* + +%changelog +* Tue Jun 20 2023 Python_Bot <Python_Bot@openeuler.org> - 0.38.1-1 +- Package Spec generated @@ -0,0 +1 @@ +f545272393d79fa38c62f5d030d9dc85 mlmodels-0.38.1.tar.gz |