%global _empty_manifest_terminate_build 0 Name: python-domovoi Version: 2.0.2 Release: 1 Summary: AWS Lambda event handler manager License: Apache Software License URL: https://github.com/kislyuk/domovoi Source0: https://mirrors.nju.edu.cn/pypi/web/packages/f3/1d/a20fd0f598eaa6fb40ab156d192aa4c57e0fc17b7378350b1cc62cf69d05/domovoi-2.0.2.tar.gz BuildArch: noarch Requires: python3-boto3 Requires: python3-chalice Requires: python3-enum34 %description *Domovoi* is an extension to `AWS Chalice `_ to handle `AWS Lambda `_ `event sources `_ other than HTTP requests through API Gateway. Domovoi lets you easily configure and deploy a Lambda function to serve HTTP requests through `ALB `_, on a schedule, or in response to a variety of events like an `SNS `_ or `SQS `_ message, S3 event, or custom `state machine `_ transition: import json, boto3, domovoi app = domovoi.Domovoi() # Compared to API Gateway, ALB increases the response timeout from 30s to 900s, but reduces the payload # limit from 10MB to 1MB. It also does not try to negotiate on the Accept/Content-Type headers. @app.alb_target() def serve(event, context): return dict(statusCode=200, statusDescription="200 OK", isBase64Encoded=False, headers={"Content-Type": "application/json"}, body=json.dumps({"hello": "world"})) @app.scheduled_function("cron(0 18 ? * MON-FRI *)") def foo(event, context): context.log("foo invoked at 06:00pm (UTC) every Mon-Fri") return dict(result=True) @app.scheduled_function("rate(1 minute)") def bar(event, context): context.log("bar invoked once a minute") boto3.resource("sns").create_topic(Name="bartender").publish(Message=json.dumps({"beer": 1})) return dict(result="Work work work") @app.sns_topic_subscriber("bartender") def tend(event, context): message = json.loads(event["Records"][0]["Sns"]["Message"]) context.log(dict(beer="Quadrupel", quantity=message["beer"])) # SQS messages are deleted upon successful exit, requeued otherwise. # See https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html @app.sqs_queue_subscriber("my_queue", batch_size=64) def process_queue_messages(event, context): message = json.loads(event["Records"][0]["body"]) message_attributes = event["Records"][0]["messageAttributes"] # You can colocate a state machine definition with an SQS handler to launch a SFN driven lambda from SQS. return app.state_machine.start_execution(**message)["executionArn"] @app.cloudwatch_event_handler(source=["aws.ecs"]) def monitor_ecs_events(event, context): message = json.loads(event["Records"][0]["Sns"]["Message"]) context.log("Got an event from ECS: {}".format(message)) @app.s3_event_handler(bucket="myS3bucket", events=["s3:ObjectCreated:*"], prefix="foo", suffix=".bar") def monitor_s3(event, context): context.log("Got an event from S3: {}".format(event)) # Set use_sns=False, use_sqs=False to subscribe your Lambda directly to S3 events without forwarding them through an SNS-SQS bridge. # That approach has fewer moving parts, but you can only subscribe one Lambda function to events in a given S3 bucket. @app.s3_event_handler(bucket="myS3bucket", events=["s3:ObjectCreated:*"], prefix="foo", suffix=".bar", use_sns=False, use_sqs=False) def monitor_s3(event, context): context.log("Got an event from S3: {}".format(event)) # DynamoDB event format: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html @app.dynamodb_stream_handler(table_name="MyDynamoTable", batch_size=200) def handle_dynamodb_stream(event, context): context.log("Got {} events from DynamoDB".format(len(event["Records"]))) context.log("First event: {}".format(event["Records"][0]["dynamodb"])) # Use the following command to log a CloudWatch Logs message that will trigger this handler: # python -c'import watchtower as w, logging as l; L=l.getLogger(); L.addHandler(w.CloudWatchLogHandler()); L.error(dict(x=8))' # See http://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html for the filter pattern syntax @app.cloudwatch_logs_sub_filter_handler(log_group_name="watchtower", filter_pattern="{$.x = 8}") def monitor_cloudwatch_logs(event, context): print("Got a CWL subscription filter event:", event) # See http://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html # See the "AWS Step Functions state machines" section below for a complete example of setting up a state machine. @app.step_function_task(state_name="Worker", state_machine_definition=state_machine) def worker(event, context): return {"result": event["input"] + 1, "my_state": context.stepfunctions_task_name} %package -n python3-domovoi Summary: AWS Lambda event handler manager Provides: python-domovoi BuildRequires: python3-devel BuildRequires: python3-setuptools BuildRequires: python3-pip %description -n python3-domovoi *Domovoi* is an extension to `AWS Chalice `_ to handle `AWS Lambda `_ `event sources `_ other than HTTP requests through API Gateway. Domovoi lets you easily configure and deploy a Lambda function to serve HTTP requests through `ALB `_, on a schedule, or in response to a variety of events like an `SNS `_ or `SQS `_ message, S3 event, or custom `state machine `_ transition: import json, boto3, domovoi app = domovoi.Domovoi() # Compared to API Gateway, ALB increases the response timeout from 30s to 900s, but reduces the payload # limit from 10MB to 1MB. It also does not try to negotiate on the Accept/Content-Type headers. @app.alb_target() def serve(event, context): return dict(statusCode=200, statusDescription="200 OK", isBase64Encoded=False, headers={"Content-Type": "application/json"}, body=json.dumps({"hello": "world"})) @app.scheduled_function("cron(0 18 ? * MON-FRI *)") def foo(event, context): context.log("foo invoked at 06:00pm (UTC) every Mon-Fri") return dict(result=True) @app.scheduled_function("rate(1 minute)") def bar(event, context): context.log("bar invoked once a minute") boto3.resource("sns").create_topic(Name="bartender").publish(Message=json.dumps({"beer": 1})) return dict(result="Work work work") @app.sns_topic_subscriber("bartender") def tend(event, context): message = json.loads(event["Records"][0]["Sns"]["Message"]) context.log(dict(beer="Quadrupel", quantity=message["beer"])) # SQS messages are deleted upon successful exit, requeued otherwise. # See https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html @app.sqs_queue_subscriber("my_queue", batch_size=64) def process_queue_messages(event, context): message = json.loads(event["Records"][0]["body"]) message_attributes = event["Records"][0]["messageAttributes"] # You can colocate a state machine definition with an SQS handler to launch a SFN driven lambda from SQS. return app.state_machine.start_execution(**message)["executionArn"] @app.cloudwatch_event_handler(source=["aws.ecs"]) def monitor_ecs_events(event, context): message = json.loads(event["Records"][0]["Sns"]["Message"]) context.log("Got an event from ECS: {}".format(message)) @app.s3_event_handler(bucket="myS3bucket", events=["s3:ObjectCreated:*"], prefix="foo", suffix=".bar") def monitor_s3(event, context): context.log("Got an event from S3: {}".format(event)) # Set use_sns=False, use_sqs=False to subscribe your Lambda directly to S3 events without forwarding them through an SNS-SQS bridge. # That approach has fewer moving parts, but you can only subscribe one Lambda function to events in a given S3 bucket. @app.s3_event_handler(bucket="myS3bucket", events=["s3:ObjectCreated:*"], prefix="foo", suffix=".bar", use_sns=False, use_sqs=False) def monitor_s3(event, context): context.log("Got an event from S3: {}".format(event)) # DynamoDB event format: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html @app.dynamodb_stream_handler(table_name="MyDynamoTable", batch_size=200) def handle_dynamodb_stream(event, context): context.log("Got {} events from DynamoDB".format(len(event["Records"]))) context.log("First event: {}".format(event["Records"][0]["dynamodb"])) # Use the following command to log a CloudWatch Logs message that will trigger this handler: # python -c'import watchtower as w, logging as l; L=l.getLogger(); L.addHandler(w.CloudWatchLogHandler()); L.error(dict(x=8))' # See http://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html for the filter pattern syntax @app.cloudwatch_logs_sub_filter_handler(log_group_name="watchtower", filter_pattern="{$.x = 8}") def monitor_cloudwatch_logs(event, context): print("Got a CWL subscription filter event:", event) # See http://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html # See the "AWS Step Functions state machines" section below for a complete example of setting up a state machine. @app.step_function_task(state_name="Worker", state_machine_definition=state_machine) def worker(event, context): return {"result": event["input"] + 1, "my_state": context.stepfunctions_task_name} %package help Summary: Development documents and examples for domovoi Provides: python3-domovoi-doc %description help *Domovoi* is an extension to `AWS Chalice `_ to handle `AWS Lambda `_ `event sources `_ other than HTTP requests through API Gateway. Domovoi lets you easily configure and deploy a Lambda function to serve HTTP requests through `ALB `_, on a schedule, or in response to a variety of events like an `SNS `_ or `SQS `_ message, S3 event, or custom `state machine `_ transition: import json, boto3, domovoi app = domovoi.Domovoi() # Compared to API Gateway, ALB increases the response timeout from 30s to 900s, but reduces the payload # limit from 10MB to 1MB. It also does not try to negotiate on the Accept/Content-Type headers. @app.alb_target() def serve(event, context): return dict(statusCode=200, statusDescription="200 OK", isBase64Encoded=False, headers={"Content-Type": "application/json"}, body=json.dumps({"hello": "world"})) @app.scheduled_function("cron(0 18 ? * MON-FRI *)") def foo(event, context): context.log("foo invoked at 06:00pm (UTC) every Mon-Fri") return dict(result=True) @app.scheduled_function("rate(1 minute)") def bar(event, context): context.log("bar invoked once a minute") boto3.resource("sns").create_topic(Name="bartender").publish(Message=json.dumps({"beer": 1})) return dict(result="Work work work") @app.sns_topic_subscriber("bartender") def tend(event, context): message = json.loads(event["Records"][0]["Sns"]["Message"]) context.log(dict(beer="Quadrupel", quantity=message["beer"])) # SQS messages are deleted upon successful exit, requeued otherwise. # See https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html @app.sqs_queue_subscriber("my_queue", batch_size=64) def process_queue_messages(event, context): message = json.loads(event["Records"][0]["body"]) message_attributes = event["Records"][0]["messageAttributes"] # You can colocate a state machine definition with an SQS handler to launch a SFN driven lambda from SQS. return app.state_machine.start_execution(**message)["executionArn"] @app.cloudwatch_event_handler(source=["aws.ecs"]) def monitor_ecs_events(event, context): message = json.loads(event["Records"][0]["Sns"]["Message"]) context.log("Got an event from ECS: {}".format(message)) @app.s3_event_handler(bucket="myS3bucket", events=["s3:ObjectCreated:*"], prefix="foo", suffix=".bar") def monitor_s3(event, context): context.log("Got an event from S3: {}".format(event)) # Set use_sns=False, use_sqs=False to subscribe your Lambda directly to S3 events without forwarding them through an SNS-SQS bridge. # That approach has fewer moving parts, but you can only subscribe one Lambda function to events in a given S3 bucket. @app.s3_event_handler(bucket="myS3bucket", events=["s3:ObjectCreated:*"], prefix="foo", suffix=".bar", use_sns=False, use_sqs=False) def monitor_s3(event, context): context.log("Got an event from S3: {}".format(event)) # DynamoDB event format: https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html @app.dynamodb_stream_handler(table_name="MyDynamoTable", batch_size=200) def handle_dynamodb_stream(event, context): context.log("Got {} events from DynamoDB".format(len(event["Records"]))) context.log("First event: {}".format(event["Records"][0]["dynamodb"])) # Use the following command to log a CloudWatch Logs message that will trigger this handler: # python -c'import watchtower as w, logging as l; L=l.getLogger(); L.addHandler(w.CloudWatchLogHandler()); L.error(dict(x=8))' # See http://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html for the filter pattern syntax @app.cloudwatch_logs_sub_filter_handler(log_group_name="watchtower", filter_pattern="{$.x = 8}") def monitor_cloudwatch_logs(event, context): print("Got a CWL subscription filter event:", event) # See http://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html # See the "AWS Step Functions state machines" section below for a complete example of setting up a state machine. @app.step_function_task(state_name="Worker", state_machine_definition=state_machine) def worker(event, context): return {"result": event["input"] + 1, "my_state": context.stepfunctions_task_name} %prep %autosetup -n domovoi-2.0.2 %build %py3_build %install %py3_install install -d -m755 %{buildroot}/%{_pkgdocdir} if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi pushd %{buildroot} if [ -d usr/lib ]; then find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst fi if [ -d usr/lib64 ]; then find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst fi if [ -d usr/bin ]; then find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst fi if [ -d usr/sbin ]; then find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst fi touch doclist.lst if [ -d usr/share/man ]; then find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst fi popd mv %{buildroot}/filelist.lst . mv %{buildroot}/doclist.lst . %files -n python3-domovoi -f filelist.lst %dir %{python3_sitelib}/* %files help -f doclist.lst %{_docdir}/* %changelog * Thu May 18 2023 Python_Bot - 2.0.2-1 - Package Spec generated