summaryrefslogtreecommitdiff
path: root/python-nubium-schemas.spec
diff options
context:
space:
mode:
Diffstat (limited to 'python-nubium-schemas.spec')
-rw-r--r--python-nubium-schemas.spec142
1 files changed, 142 insertions, 0 deletions
diff --git a/python-nubium-schemas.spec b/python-nubium-schemas.spec
new file mode 100644
index 0000000..bfc73ff
--- /dev/null
+++ b/python-nubium-schemas.spec
@@ -0,0 +1,142 @@
+%global _empty_manifest_terminate_build 0
+Name: python-nubium-schemas
+Version: 2.0.0
+Release: 1
+Summary: Python dictionary representations of Avro Schema for the nubium project
+License: MIT License
+URL: https://gitlab.corp.redhat.com/mkt-ops-de/nubium-schemas.git
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/fe/be/c5aa612aec94bbb45996789cf6a2923156c1ba9ac7c81df6c4501b6b4a3e/nubium-schemas-2.0.0.tar.gz
+BuildArch: noarch
+
+Requires: python3-dataclasses-avroschema
+Requires: python3-pydantic
+Requires: python3-black
+Requires: python3-pip-tools
+Requires: python3-pytest
+Requires: python3-pytest-cov
+Requires: python3-setuptools-scm
+
+%description
+A python package containing dictionary representations of Avro Schemas,
+for use with the nubium project.
+
+## Usage Examples
+The first step is to include the latest version of the schema library in
+the requirements for the app.
+For code examples, go to https://gitlab.corp.redhat.com/ebrennan/python-avro-classes.git
+
+When a git tag is created, the `.gitlab-ci.yml` pipeline will automatically
+trigger `upload_package.sh`, creating a new pypi version.
+
+### Faust
+1) Import one of the schema dictionaries from the package
+1) Dump the dictionary to a string using `json.dumps`
+1) Define a serializer using the `FaustSerializer` class
+
+### Confluent Kafka
+1) Import the schema dictionary
+1) Dump the schema to a string using `json.dumps`
+1) Create a schema object using `confluent_kafka.avro.loads`
+1) Use the schema when instantiating Avro producer and consumer clients
+
+
+%package -n python3-nubium-schemas
+Summary: Python dictionary representations of Avro Schema for the nubium project
+Provides: python-nubium-schemas
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-nubium-schemas
+A python package containing dictionary representations of Avro Schemas,
+for use with the nubium project.
+
+## Usage Examples
+The first step is to include the latest version of the schema library in
+the requirements for the app.
+For code examples, go to https://gitlab.corp.redhat.com/ebrennan/python-avro-classes.git
+
+When a git tag is created, the `.gitlab-ci.yml` pipeline will automatically
+trigger `upload_package.sh`, creating a new pypi version.
+
+### Faust
+1) Import one of the schema dictionaries from the package
+1) Dump the dictionary to a string using `json.dumps`
+1) Define a serializer using the `FaustSerializer` class
+
+### Confluent Kafka
+1) Import the schema dictionary
+1) Dump the schema to a string using `json.dumps`
+1) Create a schema object using `confluent_kafka.avro.loads`
+1) Use the schema when instantiating Avro producer and consumer clients
+
+
+%package help
+Summary: Development documents and examples for nubium-schemas
+Provides: python3-nubium-schemas-doc
+%description help
+A python package containing dictionary representations of Avro Schemas,
+for use with the nubium project.
+
+## Usage Examples
+The first step is to include the latest version of the schema library in
+the requirements for the app.
+For code examples, go to https://gitlab.corp.redhat.com/ebrennan/python-avro-classes.git
+
+When a git tag is created, the `.gitlab-ci.yml` pipeline will automatically
+trigger `upload_package.sh`, creating a new pypi version.
+
+### Faust
+1) Import one of the schema dictionaries from the package
+1) Dump the dictionary to a string using `json.dumps`
+1) Define a serializer using the `FaustSerializer` class
+
+### Confluent Kafka
+1) Import the schema dictionary
+1) Dump the schema to a string using `json.dumps`
+1) Create a schema object using `confluent_kafka.avro.loads`
+1) Use the schema when instantiating Avro producer and consumer clients
+
+
+%prep
+%autosetup -n nubium-schemas-2.0.0
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-nubium-schemas -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Thu May 18 2023 Python_Bot <Python_Bot@openeuler.org> - 2.0.0-1
+- Package Spec generated