summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-05-05 11:12:13 +0000
committerCoprDistGit <infra@openeuler.org>2023-05-05 11:12:13 +0000
commit7b06a72a55bb42c2180d9092ea2829238c78e8f1 (patch)
tree771c73e32f5dd44ab9d4e73fd764c6a94821f8e6
parentf9cb1a41541355bc0d26c9a1aeebc0bdb3dbc1f4 (diff)
automatic import of python-sqs-extended-clientopeneuler20.03
-rw-r--r--.gitignore1
-rw-r--r--python-sqs-extended-client.spec454
-rw-r--r--sources1
3 files changed, 456 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..2dbed8b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/sqs-extended-client-0.0.10.tar.gz
diff --git a/python-sqs-extended-client.spec b/python-sqs-extended-client.spec
new file mode 100644
index 0000000..75a4f7b
--- /dev/null
+++ b/python-sqs-extended-client.spec
@@ -0,0 +1,454 @@
+%global _empty_manifest_terminate_build 0
+Name: python-sqs-extended-client
+Version: 0.0.10
+Release: 1
+Summary: AWS SQS extended client functionality from amazon-sqs-java-extended-client-lib
+License: Apache 2.0
+URL: https://github.com/QuiNovas/sqs-extended-client
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/28/c0/60f0ccd54db82edfee28077dd1439d1438cd8b1a139b2b87fb862b8a8869/sqs-extended-client-0.0.10.tar.gz
+BuildArch: noarch
+
+Requires: python3-botoinator
+
+%description
+# sqs-extended-client
+
+### Implements the functionality of [amazon-sqs-java-extended-client-lib](https://github.com/awslabs/amazon-sqs-java-extended-client-lib) in Python
+
+## Installation
+```
+pip install sqs-extended-client
+```
+
+
+## Overview
+sqs-extended-client allows for sending large messages through SQS via S3. This is the same mechanism that the Amazon library
+[amazon-sqs-java-extended-client-lib](https://github.com/awslabs/amazon-sqs-java-extended-client-lib) provides. This library is
+interoperable with that library.
+
+To do this, this library automatically extends the normal boto3 SQS client and Queue resource classes upon import using the [botoinator](https://github.com/QuiNovas/botoinator) library. This allows for further extension or decoration if desired.
+
+## Additional attributes available on `boto3` SQS `client` and `Queue` objects
+* large_payload_support -- the S3 bucket name that will store large messages.
+* message_size_threshold -- the threshold for storing the message in the large messages bucket. Cannot be less than `0` or greater than `262144`. Defaults to `262144`.
+* always_through_s3 -- if `True`, then all messages will be serialized to S3. Defaults to `False`
+* s3 -- the boto3 S3 `resource` object to use to store objects to S3. Use this if you want to control the S3 resource (for example, custom S3 config or credentials). Defaults to `boto3.resource("s3")` on first use if not previously set.
+
+## Usage
+
+#### Note:
+> The s3 bucket must already exist prior to usage, and be accessible by whatever credentials you have available
+
+### Enabling support for large payloads (>256Kb)
+
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+```
+
+### Enabling support for large payloads (>64K)
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.message_size_threshold = 65536
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.message_size_threshold = 65536
+```
+### Enabling support for large payloads for all messages
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.always_through_s3 = True
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.always_through_s3 = True
+```
+### Setting a custom S3 resource
+```python
+import boto3
+from botocore.config import Config
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.s3 = boto3.resource(
+ 's3',
+ config=Config(
+ signature_version='s3v4',
+ s3={
+ "use_accelerate_endpoint": True
+ }
+ )
+)
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.s3 = boto3.resource(
+ 's3',
+ config=Config(
+ signature_version='s3v4',
+ s3={
+ "use_accelerate_endpoint": True
+ }
+ )
+)
+```
+
+
+
+
+%package -n python3-sqs-extended-client
+Summary: AWS SQS extended client functionality from amazon-sqs-java-extended-client-lib
+Provides: python-sqs-extended-client
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-sqs-extended-client
+# sqs-extended-client
+
+### Implements the functionality of [amazon-sqs-java-extended-client-lib](https://github.com/awslabs/amazon-sqs-java-extended-client-lib) in Python
+
+## Installation
+```
+pip install sqs-extended-client
+```
+
+
+## Overview
+sqs-extended-client allows for sending large messages through SQS via S3. This is the same mechanism that the Amazon library
+[amazon-sqs-java-extended-client-lib](https://github.com/awslabs/amazon-sqs-java-extended-client-lib) provides. This library is
+interoperable with that library.
+
+To do this, this library automatically extends the normal boto3 SQS client and Queue resource classes upon import using the [botoinator](https://github.com/QuiNovas/botoinator) library. This allows for further extension or decoration if desired.
+
+## Additional attributes available on `boto3` SQS `client` and `Queue` objects
+* large_payload_support -- the S3 bucket name that will store large messages.
+* message_size_threshold -- the threshold for storing the message in the large messages bucket. Cannot be less than `0` or greater than `262144`. Defaults to `262144`.
+* always_through_s3 -- if `True`, then all messages will be serialized to S3. Defaults to `False`
+* s3 -- the boto3 S3 `resource` object to use to store objects to S3. Use this if you want to control the S3 resource (for example, custom S3 config or credentials). Defaults to `boto3.resource("s3")` on first use if not previously set.
+
+## Usage
+
+#### Note:
+> The s3 bucket must already exist prior to usage, and be accessible by whatever credentials you have available
+
+### Enabling support for large payloads (>256Kb)
+
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+```
+
+### Enabling support for large payloads (>64K)
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.message_size_threshold = 65536
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.message_size_threshold = 65536
+```
+### Enabling support for large payloads for all messages
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.always_through_s3 = True
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.always_through_s3 = True
+```
+### Setting a custom S3 resource
+```python
+import boto3
+from botocore.config import Config
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.s3 = boto3.resource(
+ 's3',
+ config=Config(
+ signature_version='s3v4',
+ s3={
+ "use_accelerate_endpoint": True
+ }
+ )
+)
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.s3 = boto3.resource(
+ 's3',
+ config=Config(
+ signature_version='s3v4',
+ s3={
+ "use_accelerate_endpoint": True
+ }
+ )
+)
+```
+
+
+
+
+%package help
+Summary: Development documents and examples for sqs-extended-client
+Provides: python3-sqs-extended-client-doc
+%description help
+# sqs-extended-client
+
+### Implements the functionality of [amazon-sqs-java-extended-client-lib](https://github.com/awslabs/amazon-sqs-java-extended-client-lib) in Python
+
+## Installation
+```
+pip install sqs-extended-client
+```
+
+
+## Overview
+sqs-extended-client allows for sending large messages through SQS via S3. This is the same mechanism that the Amazon library
+[amazon-sqs-java-extended-client-lib](https://github.com/awslabs/amazon-sqs-java-extended-client-lib) provides. This library is
+interoperable with that library.
+
+To do this, this library automatically extends the normal boto3 SQS client and Queue resource classes upon import using the [botoinator](https://github.com/QuiNovas/botoinator) library. This allows for further extension or decoration if desired.
+
+## Additional attributes available on `boto3` SQS `client` and `Queue` objects
+* large_payload_support -- the S3 bucket name that will store large messages.
+* message_size_threshold -- the threshold for storing the message in the large messages bucket. Cannot be less than `0` or greater than `262144`. Defaults to `262144`.
+* always_through_s3 -- if `True`, then all messages will be serialized to S3. Defaults to `False`
+* s3 -- the boto3 S3 `resource` object to use to store objects to S3. Use this if you want to control the S3 resource (for example, custom S3 config or credentials). Defaults to `boto3.resource("s3")` on first use if not previously set.
+
+## Usage
+
+#### Note:
+> The s3 bucket must already exist prior to usage, and be accessible by whatever credentials you have available
+
+### Enabling support for large payloads (>256Kb)
+
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+```
+
+### Enabling support for large payloads (>64K)
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.message_size_threshold = 65536
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.message_size_threshold = 65536
+```
+### Enabling support for large payloads for all messages
+```python
+import boto3
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.always_through_s3 = True
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.always_through_s3 = True
+```
+### Setting a custom S3 resource
+```python
+import boto3
+from botocore.config import Config
+import sqs_extended_client
+
+# Low level client
+sqs = boto3.client('sqs')
+sqs.large_payload_support = 'my-bucket-name'
+sqs.s3 = boto3.resource(
+ 's3',
+ config=Config(
+ signature_version='s3v4',
+ s3={
+ "use_accelerate_endpoint": True
+ }
+ )
+)
+
+# boto resource
+resource = boto3.resource('sqs')
+queue = resource.Queue('queue-url')
+
+# Or
+queue = resource.create_queue(QueueName='queue-name')
+
+queue.large_payload_support = 'my-bucket-name'
+queue.s3 = boto3.resource(
+ 's3',
+ config=Config(
+ signature_version='s3v4',
+ s3={
+ "use_accelerate_endpoint": True
+ }
+ )
+)
+```
+
+
+
+
+%prep
+%autosetup -n sqs-extended-client-0.0.10
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-sqs-extended-client -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Fri May 05 2023 Python_Bot <Python_Bot@openeuler.org> - 0.0.10-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..a68fae5
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+088cb1ab791e3bc12339ade7325b66a5 sqs-extended-client-0.0.10.tar.gz