summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-05-15 08:56:12 +0000
committerCoprDistGit <infra@openeuler.org>2023-05-15 08:56:12 +0000
commit0cb1c6a0ae799351f89b33f48c4268fe1a314642 (patch)
tree70eabdfbd74da739533817020fe7e8791fa3bae6
parentcd4a93af4455b3efe9841103f8066da3f35a06aa (diff)
automatic import of python-sqs-queue
-rw-r--r--.gitignore1
-rw-r--r--python-sqs-queue.spec306
-rw-r--r--sources1
3 files changed, 308 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..0254296 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/sqs_queue-0.6.5.tar.gz
diff --git a/python-sqs-queue.spec b/python-sqs-queue.spec
new file mode 100644
index 0000000..0ea53d2
--- /dev/null
+++ b/python-sqs-queue.spec
@@ -0,0 +1,306 @@
+%global _empty_manifest_terminate_build 0
+Name: python-sqs-queue
+Version: 0.6.5
+Release: 1
+Summary: AWS SQS queue consumer/publisher
+License: MIT
+URL: http://github.com/Media-Platforms/py-sqs-queue
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/88/07/411edd7e964acf147c6f59c219f28d105d4d00af891c22ab515abbb6f4d0/sqs_queue-0.6.5.tar.gz
+BuildArch: noarch
+
+
+%description
+# py-sqs-queue
+
+Simple Python AWS SQS queue consumer and publisher
+
+## Installation
+
+`python setup.py install`
+
+## Examples
+
+ from sqs_queue import Queue
+
+ my_queue = Queue('YOUR_QUEUE_NAME')
+ for message in my_queue:
+ your_process_fn(message)
+
+Or, if you'd like to leave unprocessable messages in the queue to be retried again later:
+
+ for message in my_queue:
+ try:
+ your_process_fn(message)
+ except YourRetryableError:
+ message.defer()
+ except Exception as e:
+ logger.warn(e)
+
+And, you can publish to the queue as well:
+
+```py
+queue.publish({'MessageId': 123, 'Message': '{"foo": "bar"}'})
+```
+
+If you already have a boto3 queue resource, pass this instead of a name:
+
+```py
+import boto3
+from sqs_queue import Queue
+
+queue_resource = boto3.resource('sqs').Queue('YOUR_QUEUE_NAME')
+
+my_queue = Queue(queue=queue_resource)
+```
+
+## Configuration
+
+You can put your AWS credentials in environment variables or [any of the other places boto3 looks](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html).
+
+## Parameters
+
+
+### `poll_wait` and `poll_sleep`
+
+Behind the scenes, the generator is polling SQS for new messages. When the queue is empty, that
+call will wait up to 20 seconds for new messages, and if it times out before any arrive it will
+sleep for 40 seconds before trying again. Those time intervals are configurable:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', poll_wait=20, poll_sleep=40)
+```
+
+### `drain`
+
+Normally, once the queue is empty, the generator waits for more messages. If you just want to process all existing messages and quit, you can pass this boolean parameter:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', drain=True)
+```
+
+For example, if your queue is long and your consumers are falling behind, you can start a bunch of consumers with `drain=True` and they'll quit when you've caught up.
+
+### `sns`
+
+If your SQS queue is being fed from an SNS topic, you can pass your Queue this boolean parameter, and then your messages will just contain the SNS notification data, so you don't have to fish it out of the SQS message and decode it:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', sns=True)
+```
+When you use this option, the `sns_message_id` is added to the notification data, which can be used to make sure you only process each message once.
+
+
+%package -n python3-sqs-queue
+Summary: AWS SQS queue consumer/publisher
+Provides: python-sqs-queue
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-sqs-queue
+# py-sqs-queue
+
+Simple Python AWS SQS queue consumer and publisher
+
+## Installation
+
+`python setup.py install`
+
+## Examples
+
+ from sqs_queue import Queue
+
+ my_queue = Queue('YOUR_QUEUE_NAME')
+ for message in my_queue:
+ your_process_fn(message)
+
+Or, if you'd like to leave unprocessable messages in the queue to be retried again later:
+
+ for message in my_queue:
+ try:
+ your_process_fn(message)
+ except YourRetryableError:
+ message.defer()
+ except Exception as e:
+ logger.warn(e)
+
+And, you can publish to the queue as well:
+
+```py
+queue.publish({'MessageId': 123, 'Message': '{"foo": "bar"}'})
+```
+
+If you already have a boto3 queue resource, pass this instead of a name:
+
+```py
+import boto3
+from sqs_queue import Queue
+
+queue_resource = boto3.resource('sqs').Queue('YOUR_QUEUE_NAME')
+
+my_queue = Queue(queue=queue_resource)
+```
+
+## Configuration
+
+You can put your AWS credentials in environment variables or [any of the other places boto3 looks](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html).
+
+## Parameters
+
+
+### `poll_wait` and `poll_sleep`
+
+Behind the scenes, the generator is polling SQS for new messages. When the queue is empty, that
+call will wait up to 20 seconds for new messages, and if it times out before any arrive it will
+sleep for 40 seconds before trying again. Those time intervals are configurable:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', poll_wait=20, poll_sleep=40)
+```
+
+### `drain`
+
+Normally, once the queue is empty, the generator waits for more messages. If you just want to process all existing messages and quit, you can pass this boolean parameter:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', drain=True)
+```
+
+For example, if your queue is long and your consumers are falling behind, you can start a bunch of consumers with `drain=True` and they'll quit when you've caught up.
+
+### `sns`
+
+If your SQS queue is being fed from an SNS topic, you can pass your Queue this boolean parameter, and then your messages will just contain the SNS notification data, so you don't have to fish it out of the SQS message and decode it:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', sns=True)
+```
+When you use this option, the `sns_message_id` is added to the notification data, which can be used to make sure you only process each message once.
+
+
+%package help
+Summary: Development documents and examples for sqs-queue
+Provides: python3-sqs-queue-doc
+%description help
+# py-sqs-queue
+
+Simple Python AWS SQS queue consumer and publisher
+
+## Installation
+
+`python setup.py install`
+
+## Examples
+
+ from sqs_queue import Queue
+
+ my_queue = Queue('YOUR_QUEUE_NAME')
+ for message in my_queue:
+ your_process_fn(message)
+
+Or, if you'd like to leave unprocessable messages in the queue to be retried again later:
+
+ for message in my_queue:
+ try:
+ your_process_fn(message)
+ except YourRetryableError:
+ message.defer()
+ except Exception as e:
+ logger.warn(e)
+
+And, you can publish to the queue as well:
+
+```py
+queue.publish({'MessageId': 123, 'Message': '{"foo": "bar"}'})
+```
+
+If you already have a boto3 queue resource, pass this instead of a name:
+
+```py
+import boto3
+from sqs_queue import Queue
+
+queue_resource = boto3.resource('sqs').Queue('YOUR_QUEUE_NAME')
+
+my_queue = Queue(queue=queue_resource)
+```
+
+## Configuration
+
+You can put your AWS credentials in environment variables or [any of the other places boto3 looks](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html).
+
+## Parameters
+
+
+### `poll_wait` and `poll_sleep`
+
+Behind the scenes, the generator is polling SQS for new messages. When the queue is empty, that
+call will wait up to 20 seconds for new messages, and if it times out before any arrive it will
+sleep for 40 seconds before trying again. Those time intervals are configurable:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', poll_wait=20, poll_sleep=40)
+```
+
+### `drain`
+
+Normally, once the queue is empty, the generator waits for more messages. If you just want to process all existing messages and quit, you can pass this boolean parameter:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', drain=True)
+```
+
+For example, if your queue is long and your consumers are falling behind, you can start a bunch of consumers with `drain=True` and they'll quit when you've caught up.
+
+### `sns`
+
+If your SQS queue is being fed from an SNS topic, you can pass your Queue this boolean parameter, and then your messages will just contain the SNS notification data, so you don't have to fish it out of the SQS message and decode it:
+
+```py
+queue = Queue('YOUR_QUEUE_NAME', sns=True)
+```
+When you use this option, the `sns_message_id` is added to the notification data, which can be used to make sure you only process each message once.
+
+
+%prep
+%autosetup -n sqs-queue-0.6.5
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-sqs-queue -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Mon May 15 2023 Python_Bot <Python_Bot@openeuler.org> - 0.6.5-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..372679d
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+0d39524241c651b551b8a3843910d14c sqs_queue-0.6.5.tar.gz