diff options
-rw-r--r-- | .gitignore | 1 | ||||
-rw-r--r-- | python-cth-sentence-split.spec | 147 | ||||
-rw-r--r-- | sources | 1 |
3 files changed, 149 insertions, 0 deletions
@@ -0,0 +1 @@ +/CTH_sentence_split-0.0.24.tar.gz diff --git a/python-cth-sentence-split.spec b/python-cth-sentence-split.spec new file mode 100644 index 0000000..7aca3e2 --- /dev/null +++ b/python-cth-sentence-split.spec @@ -0,0 +1,147 @@ +%global _empty_manifest_terminate_build 0 +Name: python-CTH-sentence-split +Version: 0.0.24 +Release: 1 +Summary: Chinese (Traditional), Taiwanese and Hakka's sentence split tool. +License: GNU Affero General Public License v3 +URL: https://github.com/eran0926/CTH_sentence_split +Source0: https://mirrors.nju.edu.cn/pypi/web/packages/b9/8c/b55da20f545b3ebbaf88c4418fae379554c33edf796cca7704e09e476c5b/CTH_sentence_split-0.0.24.tar.gz +BuildArch: noarch + + +%description +# CTH_sentence_split + +[PyPI](https://pypi.org/project/CTH-sentence-split/) +[Github](https://github.com/eran0926/CTH_sentence_split) + +## Info +This program is a system that you can split a sentence with Chinese (Traditional), Taiwanese and Hakka's dictionary. +It will return a list. + +## LICENSE +This program in licensed under the GNU AGPLv3 or later. +You should have received a copy of the GNU Affero General Public License v3.0 along with this program. +If not,see [https://www.gnu.org/licenses/agpl-3.0-standalone.html](https://www.gnu.org/licenses/agpl-3.0-standalone.html). + +## Install +Install with `pip3 install -U CTH_sentence_split` + +## Use +```python= +import CTH_sentence_split as sp +sp.split("sentence you want to split") +``` + + + + + +%package -n python3-CTH-sentence-split +Summary: Chinese (Traditional), Taiwanese and Hakka's sentence split tool. +Provides: python-CTH-sentence-split +BuildRequires: python3-devel +BuildRequires: python3-setuptools +BuildRequires: python3-pip +%description -n python3-CTH-sentence-split +# CTH_sentence_split + +[PyPI](https://pypi.org/project/CTH-sentence-split/) +[Github](https://github.com/eran0926/CTH_sentence_split) + +## Info +This program is a system that you can split a sentence with Chinese (Traditional), Taiwanese and Hakka's dictionary. +It will return a list. + +## LICENSE +This program in licensed under the GNU AGPLv3 or later. +You should have received a copy of the GNU Affero General Public License v3.0 along with this program. +If not,see [https://www.gnu.org/licenses/agpl-3.0-standalone.html](https://www.gnu.org/licenses/agpl-3.0-standalone.html). + +## Install +Install with `pip3 install -U CTH_sentence_split` + +## Use +```python= +import CTH_sentence_split as sp +sp.split("sentence you want to split") +``` + + + + + +%package help +Summary: Development documents and examples for CTH-sentence-split +Provides: python3-CTH-sentence-split-doc +%description help +# CTH_sentence_split + +[PyPI](https://pypi.org/project/CTH-sentence-split/) +[Github](https://github.com/eran0926/CTH_sentence_split) + +## Info +This program is a system that you can split a sentence with Chinese (Traditional), Taiwanese and Hakka's dictionary. +It will return a list. + +## LICENSE +This program in licensed under the GNU AGPLv3 or later. +You should have received a copy of the GNU Affero General Public License v3.0 along with this program. +If not,see [https://www.gnu.org/licenses/agpl-3.0-standalone.html](https://www.gnu.org/licenses/agpl-3.0-standalone.html). + +## Install +Install with `pip3 install -U CTH_sentence_split` + +## Use +```python= +import CTH_sentence_split as sp +sp.split("sentence you want to split") +``` + + + + + +%prep +%autosetup -n CTH-sentence-split-0.0.24 + +%build +%py3_build + +%install +%py3_install +install -d -m755 %{buildroot}/%{_pkgdocdir} +if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi +if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi +if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi +if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi +pushd %{buildroot} +if [ -d usr/lib ]; then + find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/lib64 ]; then + find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/bin ]; then + find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/sbin ]; then + find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst +fi +touch doclist.lst +if [ -d usr/share/man ]; then + find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst +fi +popd +mv %{buildroot}/filelist.lst . +mv %{buildroot}/doclist.lst . + +%files -n python3-CTH-sentence-split -f filelist.lst +%dir %{python3_sitelib}/* + +%files help -f doclist.lst +%{_docdir}/* + +%changelog +* Wed May 10 2023 Python_Bot <Python_Bot@openeuler.org> - 0.0.24-1 +- Package Spec generated @@ -0,0 +1 @@ +56846176e1d08ca77e92d1c188076fe1 CTH_sentence_split-0.0.24.tar.gz |