summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-05-30 17:09:45 +0000
committerCoprDistGit <infra@openeuler.org>2023-05-30 17:09:45 +0000
commit0e9340ac2853ff7141a85df22c3a6afb1ae8cb86 (patch)
treefdd4fccfa029ea847bd7c7ee853c8350d77b1fb3
parente9ee1e1a1dc40bded6b4139b760a50a8a1b40e74 (diff)
automatic import of python-datapipelines
-rw-r--r--.gitignore1
-rw-r--r--python-datapipelines.spec93
-rw-r--r--sources1
3 files changed, 95 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..2307f36 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/datapipelines-1.0.7.tar.gz
diff --git a/python-datapipelines.spec b/python-datapipelines.spec
new file mode 100644
index 0000000..9c3bae3
--- /dev/null
+++ b/python-datapipelines.spec
@@ -0,0 +1,93 @@
+%global _empty_manifest_terminate_build 0
+Name: python-datapipelines
+Version: 1.0.7
+Release: 1
+Summary: Caching abstraction layer for orchestrating multiple cache tiers
+License: MIT
+URL: https://github.com/meraki-analytics/datapipelines
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/b4/86/812f428f88d3b9e10be6178b54508580a2355ae67d50797e62ea47fef9c3/datapipelines-1.0.7.tar.gz
+BuildArch: noarch
+
+
+%description
+A data pipeline collects, stores, and processes data. This package provides a framework for facilitating this process.
+Data is collected from ``DataSources``, stored in ``DataSinks``, and processed using ``Transformers``.
+``DataSources`` are entities that *provide* data to the pipeline. Examples include databases, in-memory caches, and REST APIs.
+``DataSinks`` are entities that *store* data provided by ``DataSources``. Examples include databases and in-memory caches. Nearly all data sinks will also be data sources because storing data is usually unhelpful if you cannot get that data out. We refer to an entity that is both a data source and data sink as a *data store*.
+``Transformers`` are entities that *transform* or process data from one data type to another. For example, a transformer may transform a Word document to a PDF.
+The ``DataPipeline`` consists of a list of ``DataStores`` and ``DataSinks`` that communicate via ``Transformers``.
+The data sources and sinks are ordered in the data pipeline, and their order determines the order in which data is requested. Generally speaking, slower data stores/sinks should go towards the end of the pipeline.
+Not every data type needs to be supported by every data sink or data store. If a data sink/store does not support a requested type of data, that data sink/source is simply skipped in the pipeline.
+
+%package -n python3-datapipelines
+Summary: Caching abstraction layer for orchestrating multiple cache tiers
+Provides: python-datapipelines
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-datapipelines
+A data pipeline collects, stores, and processes data. This package provides a framework for facilitating this process.
+Data is collected from ``DataSources``, stored in ``DataSinks``, and processed using ``Transformers``.
+``DataSources`` are entities that *provide* data to the pipeline. Examples include databases, in-memory caches, and REST APIs.
+``DataSinks`` are entities that *store* data provided by ``DataSources``. Examples include databases and in-memory caches. Nearly all data sinks will also be data sources because storing data is usually unhelpful if you cannot get that data out. We refer to an entity that is both a data source and data sink as a *data store*.
+``Transformers`` are entities that *transform* or process data from one data type to another. For example, a transformer may transform a Word document to a PDF.
+The ``DataPipeline`` consists of a list of ``DataStores`` and ``DataSinks`` that communicate via ``Transformers``.
+The data sources and sinks are ordered in the data pipeline, and their order determines the order in which data is requested. Generally speaking, slower data stores/sinks should go towards the end of the pipeline.
+Not every data type needs to be supported by every data sink or data store. If a data sink/store does not support a requested type of data, that data sink/source is simply skipped in the pipeline.
+
+%package help
+Summary: Development documents and examples for datapipelines
+Provides: python3-datapipelines-doc
+%description help
+A data pipeline collects, stores, and processes data. This package provides a framework for facilitating this process.
+Data is collected from ``DataSources``, stored in ``DataSinks``, and processed using ``Transformers``.
+``DataSources`` are entities that *provide* data to the pipeline. Examples include databases, in-memory caches, and REST APIs.
+``DataSinks`` are entities that *store* data provided by ``DataSources``. Examples include databases and in-memory caches. Nearly all data sinks will also be data sources because storing data is usually unhelpful if you cannot get that data out. We refer to an entity that is both a data source and data sink as a *data store*.
+``Transformers`` are entities that *transform* or process data from one data type to another. For example, a transformer may transform a Word document to a PDF.
+The ``DataPipeline`` consists of a list of ``DataStores`` and ``DataSinks`` that communicate via ``Transformers``.
+The data sources and sinks are ordered in the data pipeline, and their order determines the order in which data is requested. Generally speaking, slower data stores/sinks should go towards the end of the pipeline.
+Not every data type needs to be supported by every data sink or data store. If a data sink/store does not support a requested type of data, that data sink/source is simply skipped in the pipeline.
+
+%prep
+%autosetup -n datapipelines-1.0.7
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-datapipelines -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Tue May 30 2023 Python_Bot <Python_Bot@openeuler.org> - 1.0.7-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..3e64407
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+537e875824fe2328a5fd4982ce946dfd datapipelines-1.0.7.tar.gz