summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-05-10 04:47:35 +0000
committerCoprDistGit <infra@openeuler.org>2023-05-10 04:47:35 +0000
commit30a5675716693999b2e60a88c782b52a68473dd7 (patch)
treeec325b04ed96b7f8c6d1c1975fc3ae1319f60079
parent9c3ec056711165202a6ab94ca7313d843fb0ef63 (diff)
automatic import of python-aiozipstreamopeneuler20.03
-rw-r--r--.gitignore1
-rw-r--r--python-aiozipstream.spec513
-rw-r--r--sources1
3 files changed, 515 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..71dc580 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/aiozipstream-0.4.tar.gz
diff --git a/python-aiozipstream.spec b/python-aiozipstream.spec
new file mode 100644
index 0000000..e5cf1ea
--- /dev/null
+++ b/python-aiozipstream.spec
@@ -0,0 +1,513 @@
+%global _empty_manifest_terminate_build 0
+Name: python-aiozipstream
+Version: 0.4
+Release: 1
+Summary: Creating zip files on the fly
+License: BSD
+URL: https://github.com/kbbdy/zipstream
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/0f/1b/97b8d72faeb6cd6b44c23fdc45d054b00366ce153896e76e375e63f80a68/aiozipstream-0.4.tar.gz
+BuildArch: noarch
+
+
+%description
+# AioZipStream
+
+This is a fork of [ZipStream](https://github.com/kbbdy/zipstream). Simple python library for streaming ZIP files which are created dynamically, without using any temporary files.
+
+- No temporary files, data is streamed directly
+- Supported `deflate` compression method
+- Small memory usage, straming is realised using yield statement
+- Archive structure is created on the fly, and all data can be created during stream
+- Files included into archive can be generated on the fly using Python generators
+- Asynchronous AioZipStream and classic ZipStream are available
+- Zip32 format compatible files
+- Independent from python's standard ZipFile implementation
+- Almost no dependencies: only `aiofiles` in some circumstances (see AioZipStream section for details)
+- Zip64 support is also planned in future (far future, because I never hitted 4GB file size limit ;-) )
+
+### Required Python version:
+
+`ZipStream` is compatible with **Python 2.7**.
+
+`AioZipStream` require **Python 3.6**. For earlier versions `AioZipStream` is not available for import.
+
+
+## Usage:
+
+List of files to archive is stored as list of dicts. Why dicts? Because there are possible additional parameters for each file, and more parameters are planned in future.
+
+Sample list of files to archive:
+
+```python
+files = [
+ # file /tmp/file.dat will be added to archive under `file.dat` name.
+ {'file':'/tmp/file.dat'},
+
+ # same file as previous under own name: `completly_different.foo`
+ # and will be compressed using `deflate` compression method
+ {'file':'/tmp/file.dat',
+ 'name':'completly_different.foo',
+ 'compression':'deflate'}
+ ]
+```
+
+It's time to stream / archive:
+
+```python
+zs = ZipStream(files)
+with open("example.zip", "wb") as fout:
+ for data in zs.stream():
+ fout.write(data)
+```
+
+Any iterable source of binary data can be used in place of regular files. Using generator as input for file must be represented by `stream` field instead of `file`, additional `name` parameter is also required.
+
+```python
+
+def source_of_bytes():
+ yield b"123456789"
+ yield b"abcdefgh"
+ yield b"I am a binary data"
+
+files = [....
+ # file will be generated dynamically under name my_data.bin
+ {'stream': source_of_bytes(), 'name': 'my_data.bin'},
+ ]
+```
+
+Keep in mind, that data should be served in chunks of reasonable size, because in case of using stream, `ZipStream` class is not able to split data by self.
+
+List of files to stream can be also generated on the fly, during streaming:
+
+```python
+import os
+from zipstream import ZipStream
+
+def files_to_stream_with_foo_in_name(dirname):
+ # all files from selected firectory
+ for f in os.listdir(dirname):
+ fp = os.path.join(dirname, f)
+ if os.path.isfile(fp):
+ yield {'file': fp,
+ 'name': "foo_" + os.path.basename(fp)}
+ # and our generator too
+ yield {'stream': source_of_bytes(),
+ 'name': 'my_data.bin',
+ 'compression': 'deflate'}
+
+zs = ZipStream(files_to_stream_with_foo_in_name('\tmp\some-files'))
+```
+
+## Asynchronous AioZipStream
+
+:warning: **To use asynchronous AioZipStream at least Python 3.6 version is required**. AioZipStream is using asynchronous generator syntax, wchich is avilable from 3.6 version.
+
+To work with local files addtional `aiofiles` library is required. If You plan to stream only dynamically generated content, then `aiofiles` is not required.
+
+See [aiofiles github repo](https://github.com/Tinche/aiofiles) for details about `aiofiles`.
+
+
+### Sample of asynchronous zip streaming
+
+Any generator used to create data on the fly, must be defined as `async`:
+
+```python
+async def content_generator():
+ yield b'foo baz'
+ asyncio.sleep(0.1) # we simulate little slow source of data
+ data = await remote_data_source()
+ yield bytes(data, 'utf-8') # always remember to yield binary data
+ asyncio.sleep(0.5)
+ yield b"the end"
+```
+
+Also zip streaming must be inside `async` function. Note usage `aiofiles.open` instead of `open`, which is asynchronous and will not block event loop during disk access.
+
+```python
+from zipstream import AioZipStream
+
+async def zip_async(zipname, files):
+ aiozip = AioZipStream(files, chunksize=32768)
+ async with aiofiles.open(zipname, mode='wb') as z:
+ async for chunk in aiozip.stream():
+ await z.write(chunk)
+```
+
+Here is going list of files to send:
+
+```python
+files = [
+ {'file': '/tmp/car.jpeg'},
+ {'file': '/tmp/aaa.mp3', 'name': 'music.mp3'},
+ {'stream': content_generator(),
+ 'name': 'random_stuff.txt'}
+]
+```
+
+Start asyncio loop and stream result to file:
+
+```python
+loop = asyncio.get_event_loop()
+loop.run_until_complete(zip_async('example.zip', files))
+loop.stop()
+```
+
+## Examples
+
+See `examples` directory for complete code and working examples of ZipStream and AioZipStream.
+
+
+
+
+%package -n python3-aiozipstream
+Summary: Creating zip files on the fly
+Provides: python-aiozipstream
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-aiozipstream
+# AioZipStream
+
+This is a fork of [ZipStream](https://github.com/kbbdy/zipstream). Simple python library for streaming ZIP files which are created dynamically, without using any temporary files.
+
+- No temporary files, data is streamed directly
+- Supported `deflate` compression method
+- Small memory usage, straming is realised using yield statement
+- Archive structure is created on the fly, and all data can be created during stream
+- Files included into archive can be generated on the fly using Python generators
+- Asynchronous AioZipStream and classic ZipStream are available
+- Zip32 format compatible files
+- Independent from python's standard ZipFile implementation
+- Almost no dependencies: only `aiofiles` in some circumstances (see AioZipStream section for details)
+- Zip64 support is also planned in future (far future, because I never hitted 4GB file size limit ;-) )
+
+### Required Python version:
+
+`ZipStream` is compatible with **Python 2.7**.
+
+`AioZipStream` require **Python 3.6**. For earlier versions `AioZipStream` is not available for import.
+
+
+## Usage:
+
+List of files to archive is stored as list of dicts. Why dicts? Because there are possible additional parameters for each file, and more parameters are planned in future.
+
+Sample list of files to archive:
+
+```python
+files = [
+ # file /tmp/file.dat will be added to archive under `file.dat` name.
+ {'file':'/tmp/file.dat'},
+
+ # same file as previous under own name: `completly_different.foo`
+ # and will be compressed using `deflate` compression method
+ {'file':'/tmp/file.dat',
+ 'name':'completly_different.foo',
+ 'compression':'deflate'}
+ ]
+```
+
+It's time to stream / archive:
+
+```python
+zs = ZipStream(files)
+with open("example.zip", "wb") as fout:
+ for data in zs.stream():
+ fout.write(data)
+```
+
+Any iterable source of binary data can be used in place of regular files. Using generator as input for file must be represented by `stream` field instead of `file`, additional `name` parameter is also required.
+
+```python
+
+def source_of_bytes():
+ yield b"123456789"
+ yield b"abcdefgh"
+ yield b"I am a binary data"
+
+files = [....
+ # file will be generated dynamically under name my_data.bin
+ {'stream': source_of_bytes(), 'name': 'my_data.bin'},
+ ]
+```
+
+Keep in mind, that data should be served in chunks of reasonable size, because in case of using stream, `ZipStream` class is not able to split data by self.
+
+List of files to stream can be also generated on the fly, during streaming:
+
+```python
+import os
+from zipstream import ZipStream
+
+def files_to_stream_with_foo_in_name(dirname):
+ # all files from selected firectory
+ for f in os.listdir(dirname):
+ fp = os.path.join(dirname, f)
+ if os.path.isfile(fp):
+ yield {'file': fp,
+ 'name': "foo_" + os.path.basename(fp)}
+ # and our generator too
+ yield {'stream': source_of_bytes(),
+ 'name': 'my_data.bin',
+ 'compression': 'deflate'}
+
+zs = ZipStream(files_to_stream_with_foo_in_name('\tmp\some-files'))
+```
+
+## Asynchronous AioZipStream
+
+:warning: **To use asynchronous AioZipStream at least Python 3.6 version is required**. AioZipStream is using asynchronous generator syntax, wchich is avilable from 3.6 version.
+
+To work with local files addtional `aiofiles` library is required. If You plan to stream only dynamically generated content, then `aiofiles` is not required.
+
+See [aiofiles github repo](https://github.com/Tinche/aiofiles) for details about `aiofiles`.
+
+
+### Sample of asynchronous zip streaming
+
+Any generator used to create data on the fly, must be defined as `async`:
+
+```python
+async def content_generator():
+ yield b'foo baz'
+ asyncio.sleep(0.1) # we simulate little slow source of data
+ data = await remote_data_source()
+ yield bytes(data, 'utf-8') # always remember to yield binary data
+ asyncio.sleep(0.5)
+ yield b"the end"
+```
+
+Also zip streaming must be inside `async` function. Note usage `aiofiles.open` instead of `open`, which is asynchronous and will not block event loop during disk access.
+
+```python
+from zipstream import AioZipStream
+
+async def zip_async(zipname, files):
+ aiozip = AioZipStream(files, chunksize=32768)
+ async with aiofiles.open(zipname, mode='wb') as z:
+ async for chunk in aiozip.stream():
+ await z.write(chunk)
+```
+
+Here is going list of files to send:
+
+```python
+files = [
+ {'file': '/tmp/car.jpeg'},
+ {'file': '/tmp/aaa.mp3', 'name': 'music.mp3'},
+ {'stream': content_generator(),
+ 'name': 'random_stuff.txt'}
+]
+```
+
+Start asyncio loop and stream result to file:
+
+```python
+loop = asyncio.get_event_loop()
+loop.run_until_complete(zip_async('example.zip', files))
+loop.stop()
+```
+
+## Examples
+
+See `examples` directory for complete code and working examples of ZipStream and AioZipStream.
+
+
+
+
+%package help
+Summary: Development documents and examples for aiozipstream
+Provides: python3-aiozipstream-doc
+%description help
+# AioZipStream
+
+This is a fork of [ZipStream](https://github.com/kbbdy/zipstream). Simple python library for streaming ZIP files which are created dynamically, without using any temporary files.
+
+- No temporary files, data is streamed directly
+- Supported `deflate` compression method
+- Small memory usage, straming is realised using yield statement
+- Archive structure is created on the fly, and all data can be created during stream
+- Files included into archive can be generated on the fly using Python generators
+- Asynchronous AioZipStream and classic ZipStream are available
+- Zip32 format compatible files
+- Independent from python's standard ZipFile implementation
+- Almost no dependencies: only `aiofiles` in some circumstances (see AioZipStream section for details)
+- Zip64 support is also planned in future (far future, because I never hitted 4GB file size limit ;-) )
+
+### Required Python version:
+
+`ZipStream` is compatible with **Python 2.7**.
+
+`AioZipStream` require **Python 3.6**. For earlier versions `AioZipStream` is not available for import.
+
+
+## Usage:
+
+List of files to archive is stored as list of dicts. Why dicts? Because there are possible additional parameters for each file, and more parameters are planned in future.
+
+Sample list of files to archive:
+
+```python
+files = [
+ # file /tmp/file.dat will be added to archive under `file.dat` name.
+ {'file':'/tmp/file.dat'},
+
+ # same file as previous under own name: `completly_different.foo`
+ # and will be compressed using `deflate` compression method
+ {'file':'/tmp/file.dat',
+ 'name':'completly_different.foo',
+ 'compression':'deflate'}
+ ]
+```
+
+It's time to stream / archive:
+
+```python
+zs = ZipStream(files)
+with open("example.zip", "wb") as fout:
+ for data in zs.stream():
+ fout.write(data)
+```
+
+Any iterable source of binary data can be used in place of regular files. Using generator as input for file must be represented by `stream` field instead of `file`, additional `name` parameter is also required.
+
+```python
+
+def source_of_bytes():
+ yield b"123456789"
+ yield b"abcdefgh"
+ yield b"I am a binary data"
+
+files = [....
+ # file will be generated dynamically under name my_data.bin
+ {'stream': source_of_bytes(), 'name': 'my_data.bin'},
+ ]
+```
+
+Keep in mind, that data should be served in chunks of reasonable size, because in case of using stream, `ZipStream` class is not able to split data by self.
+
+List of files to stream can be also generated on the fly, during streaming:
+
+```python
+import os
+from zipstream import ZipStream
+
+def files_to_stream_with_foo_in_name(dirname):
+ # all files from selected firectory
+ for f in os.listdir(dirname):
+ fp = os.path.join(dirname, f)
+ if os.path.isfile(fp):
+ yield {'file': fp,
+ 'name': "foo_" + os.path.basename(fp)}
+ # and our generator too
+ yield {'stream': source_of_bytes(),
+ 'name': 'my_data.bin',
+ 'compression': 'deflate'}
+
+zs = ZipStream(files_to_stream_with_foo_in_name('\tmp\some-files'))
+```
+
+## Asynchronous AioZipStream
+
+:warning: **To use asynchronous AioZipStream at least Python 3.6 version is required**. AioZipStream is using asynchronous generator syntax, wchich is avilable from 3.6 version.
+
+To work with local files addtional `aiofiles` library is required. If You plan to stream only dynamically generated content, then `aiofiles` is not required.
+
+See [aiofiles github repo](https://github.com/Tinche/aiofiles) for details about `aiofiles`.
+
+
+### Sample of asynchronous zip streaming
+
+Any generator used to create data on the fly, must be defined as `async`:
+
+```python
+async def content_generator():
+ yield b'foo baz'
+ asyncio.sleep(0.1) # we simulate little slow source of data
+ data = await remote_data_source()
+ yield bytes(data, 'utf-8') # always remember to yield binary data
+ asyncio.sleep(0.5)
+ yield b"the end"
+```
+
+Also zip streaming must be inside `async` function. Note usage `aiofiles.open` instead of `open`, which is asynchronous and will not block event loop during disk access.
+
+```python
+from zipstream import AioZipStream
+
+async def zip_async(zipname, files):
+ aiozip = AioZipStream(files, chunksize=32768)
+ async with aiofiles.open(zipname, mode='wb') as z:
+ async for chunk in aiozip.stream():
+ await z.write(chunk)
+```
+
+Here is going list of files to send:
+
+```python
+files = [
+ {'file': '/tmp/car.jpeg'},
+ {'file': '/tmp/aaa.mp3', 'name': 'music.mp3'},
+ {'stream': content_generator(),
+ 'name': 'random_stuff.txt'}
+]
+```
+
+Start asyncio loop and stream result to file:
+
+```python
+loop = asyncio.get_event_loop()
+loop.run_until_complete(zip_async('example.zip', files))
+loop.stop()
+```
+
+## Examples
+
+See `examples` directory for complete code and working examples of ZipStream and AioZipStream.
+
+
+
+
+%prep
+%autosetup -n aiozipstream-0.4
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-aiozipstream -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Wed May 10 2023 Python_Bot <Python_Bot@openeuler.org> - 0.4-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..70a562d
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+59cbd77ddc821ee964e9965a350d5e05 aiozipstream-0.4.tar.gz