summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-05-10 07:53:03 +0000
committerCoprDistGit <infra@openeuler.org>2023-05-10 07:53:03 +0000
commite45251ceaeb7a0f2477731c43a60e5a484a1a876 (patch)
treef03f8a8de0d689ca8baef248995de74841ad39c0
parent5d7762735a7ad2d606889f1583f1097dfbf8182b (diff)
automatic import of python-cth-sentence-split
-rw-r--r--.gitignore1
-rw-r--r--python-cth-sentence-split.spec147
-rw-r--r--sources1
3 files changed, 149 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..0ab347c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/CTH_sentence_split-0.0.24.tar.gz
diff --git a/python-cth-sentence-split.spec b/python-cth-sentence-split.spec
new file mode 100644
index 0000000..7aca3e2
--- /dev/null
+++ b/python-cth-sentence-split.spec
@@ -0,0 +1,147 @@
+%global _empty_manifest_terminate_build 0
+Name: python-CTH-sentence-split
+Version: 0.0.24
+Release: 1
+Summary: Chinese (Traditional), Taiwanese and Hakka's sentence split tool.
+License: GNU Affero General Public License v3
+URL: https://github.com/eran0926/CTH_sentence_split
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/b9/8c/b55da20f545b3ebbaf88c4418fae379554c33edf796cca7704e09e476c5b/CTH_sentence_split-0.0.24.tar.gz
+BuildArch: noarch
+
+
+%description
+# CTH_sentence_split
+
+[PyPI](https://pypi.org/project/CTH-sentence-split/)
+[Github](https://github.com/eran0926/CTH_sentence_split)
+
+## Info
+This program is a system that you can split a sentence with Chinese (Traditional), Taiwanese and Hakka's dictionary.
+It will return a list.
+
+## LICENSE
+This program in licensed under the GNU AGPLv3 or later.
+You should have received a copy of the GNU Affero General Public License v3.0 along with this program.
+If not,see [https://www.gnu.org/licenses/agpl-3.0-standalone.html](https://www.gnu.org/licenses/agpl-3.0-standalone.html).
+
+## Install
+Install with `pip3 install -U CTH_sentence_split`
+
+## Use
+```python=
+import CTH_sentence_split as sp
+sp.split("sentence you want to split")
+```
+
+
+
+
+
+%package -n python3-CTH-sentence-split
+Summary: Chinese (Traditional), Taiwanese and Hakka's sentence split tool.
+Provides: python-CTH-sentence-split
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-CTH-sentence-split
+# CTH_sentence_split
+
+[PyPI](https://pypi.org/project/CTH-sentence-split/)
+[Github](https://github.com/eran0926/CTH_sentence_split)
+
+## Info
+This program is a system that you can split a sentence with Chinese (Traditional), Taiwanese and Hakka's dictionary.
+It will return a list.
+
+## LICENSE
+This program in licensed under the GNU AGPLv3 or later.
+You should have received a copy of the GNU Affero General Public License v3.0 along with this program.
+If not,see [https://www.gnu.org/licenses/agpl-3.0-standalone.html](https://www.gnu.org/licenses/agpl-3.0-standalone.html).
+
+## Install
+Install with `pip3 install -U CTH_sentence_split`
+
+## Use
+```python=
+import CTH_sentence_split as sp
+sp.split("sentence you want to split")
+```
+
+
+
+
+
+%package help
+Summary: Development documents and examples for CTH-sentence-split
+Provides: python3-CTH-sentence-split-doc
+%description help
+# CTH_sentence_split
+
+[PyPI](https://pypi.org/project/CTH-sentence-split/)
+[Github](https://github.com/eran0926/CTH_sentence_split)
+
+## Info
+This program is a system that you can split a sentence with Chinese (Traditional), Taiwanese and Hakka's dictionary.
+It will return a list.
+
+## LICENSE
+This program in licensed under the GNU AGPLv3 or later.
+You should have received a copy of the GNU Affero General Public License v3.0 along with this program.
+If not,see [https://www.gnu.org/licenses/agpl-3.0-standalone.html](https://www.gnu.org/licenses/agpl-3.0-standalone.html).
+
+## Install
+Install with `pip3 install -U CTH_sentence_split`
+
+## Use
+```python=
+import CTH_sentence_split as sp
+sp.split("sentence you want to split")
+```
+
+
+
+
+
+%prep
+%autosetup -n CTH-sentence-split-0.0.24
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-CTH-sentence-split -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Wed May 10 2023 Python_Bot <Python_Bot@openeuler.org> - 0.0.24-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..d3e41ae
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+56846176e1d08ca77e92d1c188076fe1 CTH_sentence_split-0.0.24.tar.gz