summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-05-31 05:28:54 +0000
committerCoprDistGit <infra@openeuler.org>2023-05-31 05:28:54 +0000
commit6c51d69d81c63df960784641b44b28db3df3889c (patch)
treecbabb8047f875ae8dbe829b85b0f709265feed3e
parent6b25c697e04a17528c4529299e94a08a87021e4f (diff)
automatic import of python-aiohttp-s3-client
-rw-r--r--.gitignore1
-rw-r--r--python-aiohttp-s3-client.spec354
-rw-r--r--sources1
3 files changed, 356 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..2893520 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/aiohttp_s3_client-0.8.2.tar.gz
diff --git a/python-aiohttp-s3-client.spec b/python-aiohttp-s3-client.spec
new file mode 100644
index 0000000..2702002
--- /dev/null
+++ b/python-aiohttp-s3-client.spec
@@ -0,0 +1,354 @@
+%global _empty_manifest_terminate_build 0
+Name: python-aiohttp-s3-client
+Version: 0.8.2
+Release: 1
+Summary: The simple module for putting and getting object from Amazon S3 compatible endpoints
+License: Apache Software License
+URL: https://github.com/aiokitchen/aiohttp-s3-client
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/63/30/fe4fa3e7ba104cf304f8f5386a339214e5eb8a0b0402c050041ccfcc5686/aiohttp_s3_client-0.8.2.tar.gz
+BuildArch: noarch
+
+Requires: python3-aiohttp
+Requires: python3-aiomisc
+Requires: python3-aws-request-signer
+
+%description
+[![PyPI - License](https://img.shields.io/pypi/l/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Wheel](https://img.shields.io/pypi/wheel/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Mypy](http://www.mypy-lang.org/static/mypy_badge.svg)]() [![PyPI](https://img.shields.io/pypi/v/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![PyPI](https://img.shields.io/pypi/pyversions/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Coverage Status](https://coveralls.io/repos/github/mosquito/aiohttp-s3-client/badge.svg?branch=master)](https://coveralls.io/github/mosquito/aiohttp-s3-client?branch=master) ![tox](https://github.com/mosquito/aiohttp-s3-client/workflows/tox/badge.svg?branch=master)
+The simple module for putting and getting object from Amazon S3 compatible endpoints
+## Installation
+```bash
+pip install aiohttp-s3-client
+```
+## Usage
+```python
+from http import HTTPStatus
+from aiohttp import ClientSession
+from aiohttp_s3_client import S3Client
+async with ClientSession(raise_for_status=True) as session:
+ client = S3Client(
+ url="http://s3-url",
+ session=session,
+ access_key_id="key-id",
+ secret_access_key="hackme",
+ region="us-east-1"
+ )
+ # Upload str object to bucket "bucket" and key "str"
+ async with client.put("bucket/str", "hello, world") as resp:
+ assert resp.status == HTTPStatus.OK
+ # Upload bytes object to bucket "bucket" and key "bytes"
+ resp = await client.put("bucket/bytes", b"hello, world")
+ assert resp.status == HTTPStatus.OK
+ # Upload AsyncIterable to bucket "bucket" and key "iterable"
+ async def gen():
+ yield b'some bytes'
+ resp = await client.put("bucket/file", gen())
+ assert resp.status == HTTPStatus.OK
+ # Upload file to bucket "bucket" and key "file"
+ resp = await client.put_file("bucket/file", "/path_to_file")
+ assert resp.status == HTTPStatus.OK
+ # Check object exists using bucket+key
+ resp = await client.head("bucket/key")
+ assert resp == HTTPStatus.OK
+ # Get object by bucket+key
+ resp = await client.get("bucket/key")
+ data = await resp.read()
+ # Delete object using bucket+key
+ resp = await client.delete("bucket/key")
+ assert resp == HTTPStatus.NO_CONTENT
+ # List objects by prefix
+ async for result in client.list_objects_v2("bucket/", prefix="prefix"):
+ # Each result is a list of metadata objects representing an object
+ # stored in the bucket.
+ do_work(result)
+```
+Bucket may be specified as subdomain or in object name:
+```python
+client = S3Client(url="http://bucket.your-s3-host", ...)
+resp = await client.put("key", gen())
+client = S3Client(url="http://your-s3-host", ...)
+resp = await client.put("bucket/key", gen())
+client = S3Client(url="http://your-s3-host/bucket", ...)
+resp = await client.put("key", gen())
+```
+Auth may be specified with keywords or in URL:
+```python
+client = S3Client(url="http://your-s3-host", access_key_id="key_id",
+ secret_access_key="access_key", ...)
+client = S3Client(url="http://key_id:access_key@your-s3-host", ...)
+```
+## Multipart upload
+For uploading large files [multipart uploading](https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html)
+can be used. It allows you to asynchronously upload multiple parts of a file
+to S3.
+S3Client handles retries of part uploads and calculates part hash for integrity checks.
+```python
+client = S3Client()
+await client.put_file_multipart(
+ "test/bigfile.csv",
+ headers={
+ "Content-Type": "text/csv",
+ },
+ workers_count=8,
+)
+```
+## Parallel download to file
+S3 supports `GET` requests with `Range` header. It's possible to download
+objects in parallel with multiple connections for speedup.
+S3Client handles retries of partial requests and makes sure that file won't
+changed during download with `ETag` header.
+If your system supports `pwrite` syscall (linux, macos, etc) it will be used to
+write simultaneously to a single file. Otherwise, each worker will have own file
+which will be concatenated after downloading.
+```python
+client = S3Client()
+await client.get_file_parallel(
+ "dump/bigfile.csv",
+ "/home/user/bigfile.csv",
+ workers_count=8,
+)
+```
+
+%package -n python3-aiohttp-s3-client
+Summary: The simple module for putting and getting object from Amazon S3 compatible endpoints
+Provides: python-aiohttp-s3-client
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-aiohttp-s3-client
+[![PyPI - License](https://img.shields.io/pypi/l/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Wheel](https://img.shields.io/pypi/wheel/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Mypy](http://www.mypy-lang.org/static/mypy_badge.svg)]() [![PyPI](https://img.shields.io/pypi/v/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![PyPI](https://img.shields.io/pypi/pyversions/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Coverage Status](https://coveralls.io/repos/github/mosquito/aiohttp-s3-client/badge.svg?branch=master)](https://coveralls.io/github/mosquito/aiohttp-s3-client?branch=master) ![tox](https://github.com/mosquito/aiohttp-s3-client/workflows/tox/badge.svg?branch=master)
+The simple module for putting and getting object from Amazon S3 compatible endpoints
+## Installation
+```bash
+pip install aiohttp-s3-client
+```
+## Usage
+```python
+from http import HTTPStatus
+from aiohttp import ClientSession
+from aiohttp_s3_client import S3Client
+async with ClientSession(raise_for_status=True) as session:
+ client = S3Client(
+ url="http://s3-url",
+ session=session,
+ access_key_id="key-id",
+ secret_access_key="hackme",
+ region="us-east-1"
+ )
+ # Upload str object to bucket "bucket" and key "str"
+ async with client.put("bucket/str", "hello, world") as resp:
+ assert resp.status == HTTPStatus.OK
+ # Upload bytes object to bucket "bucket" and key "bytes"
+ resp = await client.put("bucket/bytes", b"hello, world")
+ assert resp.status == HTTPStatus.OK
+ # Upload AsyncIterable to bucket "bucket" and key "iterable"
+ async def gen():
+ yield b'some bytes'
+ resp = await client.put("bucket/file", gen())
+ assert resp.status == HTTPStatus.OK
+ # Upload file to bucket "bucket" and key "file"
+ resp = await client.put_file("bucket/file", "/path_to_file")
+ assert resp.status == HTTPStatus.OK
+ # Check object exists using bucket+key
+ resp = await client.head("bucket/key")
+ assert resp == HTTPStatus.OK
+ # Get object by bucket+key
+ resp = await client.get("bucket/key")
+ data = await resp.read()
+ # Delete object using bucket+key
+ resp = await client.delete("bucket/key")
+ assert resp == HTTPStatus.NO_CONTENT
+ # List objects by prefix
+ async for result in client.list_objects_v2("bucket/", prefix="prefix"):
+ # Each result is a list of metadata objects representing an object
+ # stored in the bucket.
+ do_work(result)
+```
+Bucket may be specified as subdomain or in object name:
+```python
+client = S3Client(url="http://bucket.your-s3-host", ...)
+resp = await client.put("key", gen())
+client = S3Client(url="http://your-s3-host", ...)
+resp = await client.put("bucket/key", gen())
+client = S3Client(url="http://your-s3-host/bucket", ...)
+resp = await client.put("key", gen())
+```
+Auth may be specified with keywords or in URL:
+```python
+client = S3Client(url="http://your-s3-host", access_key_id="key_id",
+ secret_access_key="access_key", ...)
+client = S3Client(url="http://key_id:access_key@your-s3-host", ...)
+```
+## Multipart upload
+For uploading large files [multipart uploading](https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html)
+can be used. It allows you to asynchronously upload multiple parts of a file
+to S3.
+S3Client handles retries of part uploads and calculates part hash for integrity checks.
+```python
+client = S3Client()
+await client.put_file_multipart(
+ "test/bigfile.csv",
+ headers={
+ "Content-Type": "text/csv",
+ },
+ workers_count=8,
+)
+```
+## Parallel download to file
+S3 supports `GET` requests with `Range` header. It's possible to download
+objects in parallel with multiple connections for speedup.
+S3Client handles retries of partial requests and makes sure that file won't
+changed during download with `ETag` header.
+If your system supports `pwrite` syscall (linux, macos, etc) it will be used to
+write simultaneously to a single file. Otherwise, each worker will have own file
+which will be concatenated after downloading.
+```python
+client = S3Client()
+await client.get_file_parallel(
+ "dump/bigfile.csv",
+ "/home/user/bigfile.csv",
+ workers_count=8,
+)
+```
+
+%package help
+Summary: Development documents and examples for aiohttp-s3-client
+Provides: python3-aiohttp-s3-client-doc
+%description help
+[![PyPI - License](https://img.shields.io/pypi/l/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Wheel](https://img.shields.io/pypi/wheel/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Mypy](http://www.mypy-lang.org/static/mypy_badge.svg)]() [![PyPI](https://img.shields.io/pypi/v/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![PyPI](https://img.shields.io/pypi/pyversions/aiohttp-s3-client)](https://pypi.org/project/aiohttp-s3-client) [![Coverage Status](https://coveralls.io/repos/github/mosquito/aiohttp-s3-client/badge.svg?branch=master)](https://coveralls.io/github/mosquito/aiohttp-s3-client?branch=master) ![tox](https://github.com/mosquito/aiohttp-s3-client/workflows/tox/badge.svg?branch=master)
+The simple module for putting and getting object from Amazon S3 compatible endpoints
+## Installation
+```bash
+pip install aiohttp-s3-client
+```
+## Usage
+```python
+from http import HTTPStatus
+from aiohttp import ClientSession
+from aiohttp_s3_client import S3Client
+async with ClientSession(raise_for_status=True) as session:
+ client = S3Client(
+ url="http://s3-url",
+ session=session,
+ access_key_id="key-id",
+ secret_access_key="hackme",
+ region="us-east-1"
+ )
+ # Upload str object to bucket "bucket" and key "str"
+ async with client.put("bucket/str", "hello, world") as resp:
+ assert resp.status == HTTPStatus.OK
+ # Upload bytes object to bucket "bucket" and key "bytes"
+ resp = await client.put("bucket/bytes", b"hello, world")
+ assert resp.status == HTTPStatus.OK
+ # Upload AsyncIterable to bucket "bucket" and key "iterable"
+ async def gen():
+ yield b'some bytes'
+ resp = await client.put("bucket/file", gen())
+ assert resp.status == HTTPStatus.OK
+ # Upload file to bucket "bucket" and key "file"
+ resp = await client.put_file("bucket/file", "/path_to_file")
+ assert resp.status == HTTPStatus.OK
+ # Check object exists using bucket+key
+ resp = await client.head("bucket/key")
+ assert resp == HTTPStatus.OK
+ # Get object by bucket+key
+ resp = await client.get("bucket/key")
+ data = await resp.read()
+ # Delete object using bucket+key
+ resp = await client.delete("bucket/key")
+ assert resp == HTTPStatus.NO_CONTENT
+ # List objects by prefix
+ async for result in client.list_objects_v2("bucket/", prefix="prefix"):
+ # Each result is a list of metadata objects representing an object
+ # stored in the bucket.
+ do_work(result)
+```
+Bucket may be specified as subdomain or in object name:
+```python
+client = S3Client(url="http://bucket.your-s3-host", ...)
+resp = await client.put("key", gen())
+client = S3Client(url="http://your-s3-host", ...)
+resp = await client.put("bucket/key", gen())
+client = S3Client(url="http://your-s3-host/bucket", ...)
+resp = await client.put("key", gen())
+```
+Auth may be specified with keywords or in URL:
+```python
+client = S3Client(url="http://your-s3-host", access_key_id="key_id",
+ secret_access_key="access_key", ...)
+client = S3Client(url="http://key_id:access_key@your-s3-host", ...)
+```
+## Multipart upload
+For uploading large files [multipart uploading](https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html)
+can be used. It allows you to asynchronously upload multiple parts of a file
+to S3.
+S3Client handles retries of part uploads and calculates part hash for integrity checks.
+```python
+client = S3Client()
+await client.put_file_multipart(
+ "test/bigfile.csv",
+ headers={
+ "Content-Type": "text/csv",
+ },
+ workers_count=8,
+)
+```
+## Parallel download to file
+S3 supports `GET` requests with `Range` header. It's possible to download
+objects in parallel with multiple connections for speedup.
+S3Client handles retries of partial requests and makes sure that file won't
+changed during download with `ETag` header.
+If your system supports `pwrite` syscall (linux, macos, etc) it will be used to
+write simultaneously to a single file. Otherwise, each worker will have own file
+which will be concatenated after downloading.
+```python
+client = S3Client()
+await client.get_file_parallel(
+ "dump/bigfile.csv",
+ "/home/user/bigfile.csv",
+ workers_count=8,
+)
+```
+
+%prep
+%autosetup -n aiohttp-s3-client-0.8.2
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-aiohttp-s3-client -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Wed May 31 2023 Python_Bot <Python_Bot@openeuler.org> - 0.8.2-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..2055061
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+5792000f5236501bfb9ad32677308c94 aiohttp_s3_client-0.8.2.tar.gz