summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-05-31 03:54:15 +0000
committerCoprDistGit <infra@openeuler.org>2023-05-31 03:54:15 +0000
commit2abaae9241666b59b81d355465392b6a203f270e (patch)
tree085f67e913c8388b9d18b44c7d3c0a0214d78709
parent0801cf5ec421f43faab71cfa875d9eecd5ba2868 (diff)
automatic import of python-aws-cdk-aws-kinesisanalytics-flink-alpha
-rw-r--r--.gitignore1
-rw-r--r--python-aws-cdk-aws-kinesisanalytics-flink-alpha.spec353
-rw-r--r--sources1
3 files changed, 355 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..1e51fa7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/aws-cdk.aws-kinesisanalytics-flink-alpha-2.81.0a0.tar.gz
diff --git a/python-aws-cdk-aws-kinesisanalytics-flink-alpha.spec b/python-aws-cdk-aws-kinesisanalytics-flink-alpha.spec
new file mode 100644
index 0000000..003e2a8
--- /dev/null
+++ b/python-aws-cdk-aws-kinesisanalytics-flink-alpha.spec
@@ -0,0 +1,353 @@
+%global _empty_manifest_terminate_build 0
+Name: python-aws-cdk.aws-kinesisanalytics-flink-alpha
+Version: 2.81.0a0
+Release: 1
+Summary: A CDK Construct Library for Kinesis Analytics Flink applications
+License: Apache-2.0
+URL: https://github.com/aws/aws-cdk
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/a5/70/439e2f03af20ef574ed8c833ae82214bcac6bda9d1c5801360664333fc3d/aws-cdk.aws-kinesisanalytics-flink-alpha-2.81.0a0.tar.gz
+BuildArch: noarch
+
+Requires: python3-aws-cdk-lib
+Requires: python3-constructs
+Requires: python3-jsii
+Requires: python3-publication
+Requires: python3-typeguard
+
+%description
+<!--END STABILITY BANNER-->
+This package provides constructs for creating Kinesis Analytics Flink
+applications. To learn more about using using managed Flink applications, see
+the [AWS developer
+guide](https://docs.aws.amazon.com/kinesisanalytics/latest/java/).
+## Creating Flink Applications
+To create a new Flink application, use the `Application` construct:
+```python
+import path as path
+import aws_cdk.aws_cloudwatch as cloudwatch
+import aws_cdk as core
+import aws_cdk.aws_kinesisanalytics_flink_alpha as flink
+app = core.App()
+stack = core.Stack(app, "FlinkAppTest")
+flink_app = flink.Application(stack, "App",
+ code=flink.ApplicationCode.from_asset(path.join(__dirname, "code-asset")),
+ runtime=flink.Runtime.FLINK_1_11
+)
+cloudwatch.Alarm(stack, "Alarm",
+ metric=flink_app.metric_full_restarts(),
+ evaluation_periods=1,
+ threshold=3
+)
+app.synth()
+```
+The `code` property can use `fromAsset` as shown above to reference a local jar
+file in s3 or `fromBucket` to reference a file in s3.
+```python
+import path as path
+import aws_cdk.aws_s3_assets as assets
+import aws_cdk as core
+import aws_cdk.aws_kinesisanalytics_flink_alpha as flink
+app = core.App()
+stack = core.Stack(app, "FlinkAppCodeFromBucketTest")
+asset = assets.Asset(stack, "CodeAsset",
+ path=path.join(__dirname, "code-asset")
+)
+bucket = asset.bucket
+file_key = asset.s3_object_key
+flink.Application(stack, "App",
+ code=flink.ApplicationCode.from_bucket(bucket, file_key),
+ runtime=flink.Runtime.FLINK_1_11
+)
+app.synth()
+```
+The `propertyGroups` property provides a way of passing arbitrary runtime
+properties to your Flink application. You can use the
+aws-kinesisanalytics-runtime library to [retrieve these
+properties](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-properties.html#how-properties-access).
+```python
+# bucket: s3.Bucket
+flink_app = flink.Application(self, "Application",
+ property_groups={
+ "FlinkApplicationProperties": {
+ "input_stream_name": "my-input-kinesis-stream",
+ "output_stream_name": "my-output-kinesis-stream"
+ }
+ },
+ # ...
+ runtime=flink.Runtime.FLINK_1_15,
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar")
+)
+```
+Flink applications also have specific configuration for passing parameters
+when the Flink job starts. These include parameters for checkpointing,
+snapshotting, monitoring, and parallelism.
+```python
+# bucket: s3.Bucket
+flink_app = flink.Application(self, "Application",
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar"),
+ runtime=flink.Runtime.FLINK_1_15,
+ checkpointing_enabled=True, # default is true
+ checkpoint_interval=Duration.seconds(30), # default is 1 minute
+ min_pause_between_checkpoints=Duration.seconds(10), # default is 5 seconds
+ log_level=flink.LogLevel.ERROR, # default is INFO
+ metrics_level=flink.MetricsLevel.PARALLELISM, # default is APPLICATION
+ auto_scaling_enabled=False, # default is true
+ parallelism=32, # default is 1
+ parallelism_per_kpu=2, # default is 1
+ snapshots_enabled=False, # default is true
+ log_group=logs.LogGroup(self, "LogGroup")
+)
+```
+Flink applications can optionally be deployed in a VPC:
+```python
+# bucket: s3.Bucket
+# vpc: ec2.Vpc
+flink_app = flink.Application(self, "Application",
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar"),
+ runtime=flink.Runtime.FLINK_1_15,
+ vpc=vpc
+)
+```
+
+%package -n python3-aws-cdk.aws-kinesisanalytics-flink-alpha
+Summary: A CDK Construct Library for Kinesis Analytics Flink applications
+Provides: python-aws-cdk.aws-kinesisanalytics-flink-alpha
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-aws-cdk.aws-kinesisanalytics-flink-alpha
+<!--END STABILITY BANNER-->
+This package provides constructs for creating Kinesis Analytics Flink
+applications. To learn more about using using managed Flink applications, see
+the [AWS developer
+guide](https://docs.aws.amazon.com/kinesisanalytics/latest/java/).
+## Creating Flink Applications
+To create a new Flink application, use the `Application` construct:
+```python
+import path as path
+import aws_cdk.aws_cloudwatch as cloudwatch
+import aws_cdk as core
+import aws_cdk.aws_kinesisanalytics_flink_alpha as flink
+app = core.App()
+stack = core.Stack(app, "FlinkAppTest")
+flink_app = flink.Application(stack, "App",
+ code=flink.ApplicationCode.from_asset(path.join(__dirname, "code-asset")),
+ runtime=flink.Runtime.FLINK_1_11
+)
+cloudwatch.Alarm(stack, "Alarm",
+ metric=flink_app.metric_full_restarts(),
+ evaluation_periods=1,
+ threshold=3
+)
+app.synth()
+```
+The `code` property can use `fromAsset` as shown above to reference a local jar
+file in s3 or `fromBucket` to reference a file in s3.
+```python
+import path as path
+import aws_cdk.aws_s3_assets as assets
+import aws_cdk as core
+import aws_cdk.aws_kinesisanalytics_flink_alpha as flink
+app = core.App()
+stack = core.Stack(app, "FlinkAppCodeFromBucketTest")
+asset = assets.Asset(stack, "CodeAsset",
+ path=path.join(__dirname, "code-asset")
+)
+bucket = asset.bucket
+file_key = asset.s3_object_key
+flink.Application(stack, "App",
+ code=flink.ApplicationCode.from_bucket(bucket, file_key),
+ runtime=flink.Runtime.FLINK_1_11
+)
+app.synth()
+```
+The `propertyGroups` property provides a way of passing arbitrary runtime
+properties to your Flink application. You can use the
+aws-kinesisanalytics-runtime library to [retrieve these
+properties](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-properties.html#how-properties-access).
+```python
+# bucket: s3.Bucket
+flink_app = flink.Application(self, "Application",
+ property_groups={
+ "FlinkApplicationProperties": {
+ "input_stream_name": "my-input-kinesis-stream",
+ "output_stream_name": "my-output-kinesis-stream"
+ }
+ },
+ # ...
+ runtime=flink.Runtime.FLINK_1_15,
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar")
+)
+```
+Flink applications also have specific configuration for passing parameters
+when the Flink job starts. These include parameters for checkpointing,
+snapshotting, monitoring, and parallelism.
+```python
+# bucket: s3.Bucket
+flink_app = flink.Application(self, "Application",
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar"),
+ runtime=flink.Runtime.FLINK_1_15,
+ checkpointing_enabled=True, # default is true
+ checkpoint_interval=Duration.seconds(30), # default is 1 minute
+ min_pause_between_checkpoints=Duration.seconds(10), # default is 5 seconds
+ log_level=flink.LogLevel.ERROR, # default is INFO
+ metrics_level=flink.MetricsLevel.PARALLELISM, # default is APPLICATION
+ auto_scaling_enabled=False, # default is true
+ parallelism=32, # default is 1
+ parallelism_per_kpu=2, # default is 1
+ snapshots_enabled=False, # default is true
+ log_group=logs.LogGroup(self, "LogGroup")
+)
+```
+Flink applications can optionally be deployed in a VPC:
+```python
+# bucket: s3.Bucket
+# vpc: ec2.Vpc
+flink_app = flink.Application(self, "Application",
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar"),
+ runtime=flink.Runtime.FLINK_1_15,
+ vpc=vpc
+)
+```
+
+%package help
+Summary: Development documents and examples for aws-cdk.aws-kinesisanalytics-flink-alpha
+Provides: python3-aws-cdk.aws-kinesisanalytics-flink-alpha-doc
+%description help
+<!--END STABILITY BANNER-->
+This package provides constructs for creating Kinesis Analytics Flink
+applications. To learn more about using using managed Flink applications, see
+the [AWS developer
+guide](https://docs.aws.amazon.com/kinesisanalytics/latest/java/).
+## Creating Flink Applications
+To create a new Flink application, use the `Application` construct:
+```python
+import path as path
+import aws_cdk.aws_cloudwatch as cloudwatch
+import aws_cdk as core
+import aws_cdk.aws_kinesisanalytics_flink_alpha as flink
+app = core.App()
+stack = core.Stack(app, "FlinkAppTest")
+flink_app = flink.Application(stack, "App",
+ code=flink.ApplicationCode.from_asset(path.join(__dirname, "code-asset")),
+ runtime=flink.Runtime.FLINK_1_11
+)
+cloudwatch.Alarm(stack, "Alarm",
+ metric=flink_app.metric_full_restarts(),
+ evaluation_periods=1,
+ threshold=3
+)
+app.synth()
+```
+The `code` property can use `fromAsset` as shown above to reference a local jar
+file in s3 or `fromBucket` to reference a file in s3.
+```python
+import path as path
+import aws_cdk.aws_s3_assets as assets
+import aws_cdk as core
+import aws_cdk.aws_kinesisanalytics_flink_alpha as flink
+app = core.App()
+stack = core.Stack(app, "FlinkAppCodeFromBucketTest")
+asset = assets.Asset(stack, "CodeAsset",
+ path=path.join(__dirname, "code-asset")
+)
+bucket = asset.bucket
+file_key = asset.s3_object_key
+flink.Application(stack, "App",
+ code=flink.ApplicationCode.from_bucket(bucket, file_key),
+ runtime=flink.Runtime.FLINK_1_11
+)
+app.synth()
+```
+The `propertyGroups` property provides a way of passing arbitrary runtime
+properties to your Flink application. You can use the
+aws-kinesisanalytics-runtime library to [retrieve these
+properties](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-properties.html#how-properties-access).
+```python
+# bucket: s3.Bucket
+flink_app = flink.Application(self, "Application",
+ property_groups={
+ "FlinkApplicationProperties": {
+ "input_stream_name": "my-input-kinesis-stream",
+ "output_stream_name": "my-output-kinesis-stream"
+ }
+ },
+ # ...
+ runtime=flink.Runtime.FLINK_1_15,
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar")
+)
+```
+Flink applications also have specific configuration for passing parameters
+when the Flink job starts. These include parameters for checkpointing,
+snapshotting, monitoring, and parallelism.
+```python
+# bucket: s3.Bucket
+flink_app = flink.Application(self, "Application",
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar"),
+ runtime=flink.Runtime.FLINK_1_15,
+ checkpointing_enabled=True, # default is true
+ checkpoint_interval=Duration.seconds(30), # default is 1 minute
+ min_pause_between_checkpoints=Duration.seconds(10), # default is 5 seconds
+ log_level=flink.LogLevel.ERROR, # default is INFO
+ metrics_level=flink.MetricsLevel.PARALLELISM, # default is APPLICATION
+ auto_scaling_enabled=False, # default is true
+ parallelism=32, # default is 1
+ parallelism_per_kpu=2, # default is 1
+ snapshots_enabled=False, # default is true
+ log_group=logs.LogGroup(self, "LogGroup")
+)
+```
+Flink applications can optionally be deployed in a VPC:
+```python
+# bucket: s3.Bucket
+# vpc: ec2.Vpc
+flink_app = flink.Application(self, "Application",
+ code=flink.ApplicationCode.from_bucket(bucket, "my-app.jar"),
+ runtime=flink.Runtime.FLINK_1_15,
+ vpc=vpc
+)
+```
+
+%prep
+%autosetup -n aws-cdk.aws-kinesisanalytics-flink-alpha-2.81.0a0
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-aws-cdk.aws-kinesisanalytics-flink-alpha -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Wed May 31 2023 Python_Bot <Python_Bot@openeuler.org> - 2.81.0a0-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..63fd9f1
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+5ae4d2c2dc74b8aa5cef06487621bff3 aws-cdk.aws-kinesisanalytics-flink-alpha-2.81.0a0.tar.gz