summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-04-10 09:12:50 +0000
committerCoprDistGit <infra@openeuler.org>2023-04-10 09:12:50 +0000
commitf4617d4d70d3f7892eecf7bfa75fef293fe9f3e8 (patch)
treef2d8ea3ce948d3b64b0ef13faf180e51a5d764c0
parent14bab512b1c1fde3b100c18202bc524b69730f73 (diff)
automatic import of python-ddlparse
-rw-r--r--.gitignore1
-rw-r--r--python-ddlparse.spec412
-rw-r--r--sources1
3 files changed, 414 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..0261016 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/ddlparse-1.10.0.tar.gz
diff --git a/python-ddlparse.spec b/python-ddlparse.spec
new file mode 100644
index 0000000..c42f8df
--- /dev/null
+++ b/python-ddlparse.spec
@@ -0,0 +1,412 @@
+%global _empty_manifest_terminate_build 0
+Name: python-ddlparse
+Version: 1.10.0
+Release: 1
+Summary: DDL parase and Convert to BigQuery JSON schema
+License: BSD-3-Clause
+URL: http://github.com/shinichi-takii/ddlparse
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/1b/3c/a9ece2adfd4ee8f5030669804df58532d5b8934e955e1399e11379af535e/ddlparse-1.10.0.tar.gz
+BuildArch: noarch
+
+Requires: python3-pyparsing
+
+%description
+## Features
+- DDL parse and get table schema information.
+- Currently, only the `CREATE TABLE` statement is supported.
+- Convert to [BigQuery JSON schema](https://cloud.google.com/bigquery/docs/schemas#creating_a_json_schema_file) and [BigQuery DDL statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language).
+- Supported databases are MySQL/MariaDB, PostgreSQL, Oracle, Redshift.
+## Requirement
+1. Python >= 3.5
+1. [pyparsing](https://github.com/pyparsing/pyparsing)
+## Installation
+### Install
+pip install:
+```bash
+$ pip install ddlparse
+```
+command install:
+```bash
+$ python setup.py install
+```
+### Update
+pip update:
+```bash
+$ pip install ddlparse --upgrade
+```
+## Usage
+### Example
+```python
+import json
+from ddlparse import DdlParse
+sample_ddl = """
+CREATE TABLE My_Schema.Sample_Table (
+ Id integer PRIMARY KEY COMMENT 'User ID',
+ Name varchar(100) NOT NULL COMMENT 'User name',
+ Total bigint NOT NULL,
+ Avg decimal(5,1) NOT NULL,
+ Point int(10) unsigned,
+ Zerofill_Id integer unsigned zerofill NOT NULL,
+ Created_At date, -- Oracle 'DATE' -> BigQuery 'DATETIME'
+ UNIQUE (NAME)
+);
+"""
+# parse pattern (1-1)
+table = DdlParse().parse(sample_ddl)
+# parse pattern (1-2) : Specify source database
+table = DdlParse().parse(ddl=sample_ddl, source_database=DdlParse.DATABASE.oracle)
+# parse pattern (2-1)
+parser = DdlParse(sample_ddl)
+table = parser.parse()
+print("* BigQuery Fields * : normal")
+print(table.to_bigquery_fields())
+# parse pattern (2-2) : Specify source database
+parser = DdlParse(ddl=sample_ddl, source_database=DdlParse.DATABASE.oracle)
+table = parser.parse()
+# parse pattern (3-1)
+parser = DdlParse()
+parser.ddl = sample_ddl
+table = parser.parse()
+# parse pattern (3-2) : Specify source database
+parser = DdlParse()
+parser.source_database = DdlParse.DATABASE.oracle
+parser.ddl = sample_ddl
+table = parser.parse()
+print("* BigQuery Fields * : Oracle")
+print(table.to_bigquery_fields())
+print("* TABLE *")
+print("schema = {} : name = {} : is_temp = {}".format(table.schema, table.name, table.is_temp))
+print("* BigQuery Fields *")
+print(table.to_bigquery_fields())
+print("* BigQuery Fields - column name to lower case / upper case *")
+print(table.to_bigquery_fields(DdlParse.NAME_CASE.lower))
+print(table.to_bigquery_fields(DdlParse.NAME_CASE.upper))
+print("* COLUMN *")
+for col in table.columns.values():
+ col_info = {}
+ col_info["name"] = col.name
+ col_info["data_type"] = col.data_type
+ col_info["length"] = col.length
+ col_info["precision(=length)"] = col.precision
+ col_info["scale"] = col.scale
+ col_info["is_unsigned"] = col.is_unsigned
+ col_info["is_zerofill"] = col.is_zerofill
+ col_info["constraint"] = col.constraint
+ col_info["not_null"] = col.not_null
+ col_info["PK"] = col.primary_key
+ col_info["unique"] = col.unique
+ col_info["auto_increment"] = col.auto_increment
+ col_info["distkey"] = col.distkey
+ col_info["sortkey"] = col.sortkey
+ col_info["encode"] = col.encode
+ col_info["default"] = col.default
+ col_info["character_set"] = col.character_set
+ col_info["bq_legacy_data_type"] = col.bigquery_legacy_data_type
+ col_info["bq_standard_data_type"] = col.bigquery_standard_data_type
+ col_info["comment"] = col.comment
+ col_info["description(=comment)"] = col.description
+ col_info["bigquery_field"] = json.loads(col.to_bigquery_field())
+ print(json.dumps(col_info, indent=2, ensure_ascii=False))
+print("* DDL (CREATE TABLE) statements *")
+print(table.to_bigquery_ddl())
+print("* DDL (CREATE TABLE) statements - dataset name, table name and column name to lower case / upper case *")
+print(table.to_bigquery_ddl(DdlParse.NAME_CASE.lower))
+print(table.to_bigquery_ddl(DdlParse.NAME_CASE.upper))
+print("* Get Column object (case insensitive) *")
+print(table.columns["total"])
+print(table.columns["total"].data_type)
+```
+## License
+[BSD 3-Clause License](https://github.com/shinichi-takii/ddlparse/blob/master/LICENSE.md)
+## Author
+Shinichi Takii <shinichi.takii@shaketh.com>
+## Links
+- Repository : https://github.com/shinichi-takii/ddlparse
+- PyPI Package : https://pypi.org/project/ddlparse/
+## Special Thanks
+- pyparsing : https://github.com/pyparsing/pyparsing
+
+%package -n python3-ddlparse
+Summary: DDL parase and Convert to BigQuery JSON schema
+Provides: python-ddlparse
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-ddlparse
+## Features
+- DDL parse and get table schema information.
+- Currently, only the `CREATE TABLE` statement is supported.
+- Convert to [BigQuery JSON schema](https://cloud.google.com/bigquery/docs/schemas#creating_a_json_schema_file) and [BigQuery DDL statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language).
+- Supported databases are MySQL/MariaDB, PostgreSQL, Oracle, Redshift.
+## Requirement
+1. Python >= 3.5
+1. [pyparsing](https://github.com/pyparsing/pyparsing)
+## Installation
+### Install
+pip install:
+```bash
+$ pip install ddlparse
+```
+command install:
+```bash
+$ python setup.py install
+```
+### Update
+pip update:
+```bash
+$ pip install ddlparse --upgrade
+```
+## Usage
+### Example
+```python
+import json
+from ddlparse import DdlParse
+sample_ddl = """
+CREATE TABLE My_Schema.Sample_Table (
+ Id integer PRIMARY KEY COMMENT 'User ID',
+ Name varchar(100) NOT NULL COMMENT 'User name',
+ Total bigint NOT NULL,
+ Avg decimal(5,1) NOT NULL,
+ Point int(10) unsigned,
+ Zerofill_Id integer unsigned zerofill NOT NULL,
+ Created_At date, -- Oracle 'DATE' -> BigQuery 'DATETIME'
+ UNIQUE (NAME)
+);
+"""
+# parse pattern (1-1)
+table = DdlParse().parse(sample_ddl)
+# parse pattern (1-2) : Specify source database
+table = DdlParse().parse(ddl=sample_ddl, source_database=DdlParse.DATABASE.oracle)
+# parse pattern (2-1)
+parser = DdlParse(sample_ddl)
+table = parser.parse()
+print("* BigQuery Fields * : normal")
+print(table.to_bigquery_fields())
+# parse pattern (2-2) : Specify source database
+parser = DdlParse(ddl=sample_ddl, source_database=DdlParse.DATABASE.oracle)
+table = parser.parse()
+# parse pattern (3-1)
+parser = DdlParse()
+parser.ddl = sample_ddl
+table = parser.parse()
+# parse pattern (3-2) : Specify source database
+parser = DdlParse()
+parser.source_database = DdlParse.DATABASE.oracle
+parser.ddl = sample_ddl
+table = parser.parse()
+print("* BigQuery Fields * : Oracle")
+print(table.to_bigquery_fields())
+print("* TABLE *")
+print("schema = {} : name = {} : is_temp = {}".format(table.schema, table.name, table.is_temp))
+print("* BigQuery Fields *")
+print(table.to_bigquery_fields())
+print("* BigQuery Fields - column name to lower case / upper case *")
+print(table.to_bigquery_fields(DdlParse.NAME_CASE.lower))
+print(table.to_bigquery_fields(DdlParse.NAME_CASE.upper))
+print("* COLUMN *")
+for col in table.columns.values():
+ col_info = {}
+ col_info["name"] = col.name
+ col_info["data_type"] = col.data_type
+ col_info["length"] = col.length
+ col_info["precision(=length)"] = col.precision
+ col_info["scale"] = col.scale
+ col_info["is_unsigned"] = col.is_unsigned
+ col_info["is_zerofill"] = col.is_zerofill
+ col_info["constraint"] = col.constraint
+ col_info["not_null"] = col.not_null
+ col_info["PK"] = col.primary_key
+ col_info["unique"] = col.unique
+ col_info["auto_increment"] = col.auto_increment
+ col_info["distkey"] = col.distkey
+ col_info["sortkey"] = col.sortkey
+ col_info["encode"] = col.encode
+ col_info["default"] = col.default
+ col_info["character_set"] = col.character_set
+ col_info["bq_legacy_data_type"] = col.bigquery_legacy_data_type
+ col_info["bq_standard_data_type"] = col.bigquery_standard_data_type
+ col_info["comment"] = col.comment
+ col_info["description(=comment)"] = col.description
+ col_info["bigquery_field"] = json.loads(col.to_bigquery_field())
+ print(json.dumps(col_info, indent=2, ensure_ascii=False))
+print("* DDL (CREATE TABLE) statements *")
+print(table.to_bigquery_ddl())
+print("* DDL (CREATE TABLE) statements - dataset name, table name and column name to lower case / upper case *")
+print(table.to_bigquery_ddl(DdlParse.NAME_CASE.lower))
+print(table.to_bigquery_ddl(DdlParse.NAME_CASE.upper))
+print("* Get Column object (case insensitive) *")
+print(table.columns["total"])
+print(table.columns["total"].data_type)
+```
+## License
+[BSD 3-Clause License](https://github.com/shinichi-takii/ddlparse/blob/master/LICENSE.md)
+## Author
+Shinichi Takii <shinichi.takii@shaketh.com>
+## Links
+- Repository : https://github.com/shinichi-takii/ddlparse
+- PyPI Package : https://pypi.org/project/ddlparse/
+## Special Thanks
+- pyparsing : https://github.com/pyparsing/pyparsing
+
+%package help
+Summary: Development documents and examples for ddlparse
+Provides: python3-ddlparse-doc
+%description help
+## Features
+- DDL parse and get table schema information.
+- Currently, only the `CREATE TABLE` statement is supported.
+- Convert to [BigQuery JSON schema](https://cloud.google.com/bigquery/docs/schemas#creating_a_json_schema_file) and [BigQuery DDL statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language).
+- Supported databases are MySQL/MariaDB, PostgreSQL, Oracle, Redshift.
+## Requirement
+1. Python >= 3.5
+1. [pyparsing](https://github.com/pyparsing/pyparsing)
+## Installation
+### Install
+pip install:
+```bash
+$ pip install ddlparse
+```
+command install:
+```bash
+$ python setup.py install
+```
+### Update
+pip update:
+```bash
+$ pip install ddlparse --upgrade
+```
+## Usage
+### Example
+```python
+import json
+from ddlparse import DdlParse
+sample_ddl = """
+CREATE TABLE My_Schema.Sample_Table (
+ Id integer PRIMARY KEY COMMENT 'User ID',
+ Name varchar(100) NOT NULL COMMENT 'User name',
+ Total bigint NOT NULL,
+ Avg decimal(5,1) NOT NULL,
+ Point int(10) unsigned,
+ Zerofill_Id integer unsigned zerofill NOT NULL,
+ Created_At date, -- Oracle 'DATE' -> BigQuery 'DATETIME'
+ UNIQUE (NAME)
+);
+"""
+# parse pattern (1-1)
+table = DdlParse().parse(sample_ddl)
+# parse pattern (1-2) : Specify source database
+table = DdlParse().parse(ddl=sample_ddl, source_database=DdlParse.DATABASE.oracle)
+# parse pattern (2-1)
+parser = DdlParse(sample_ddl)
+table = parser.parse()
+print("* BigQuery Fields * : normal")
+print(table.to_bigquery_fields())
+# parse pattern (2-2) : Specify source database
+parser = DdlParse(ddl=sample_ddl, source_database=DdlParse.DATABASE.oracle)
+table = parser.parse()
+# parse pattern (3-1)
+parser = DdlParse()
+parser.ddl = sample_ddl
+table = parser.parse()
+# parse pattern (3-2) : Specify source database
+parser = DdlParse()
+parser.source_database = DdlParse.DATABASE.oracle
+parser.ddl = sample_ddl
+table = parser.parse()
+print("* BigQuery Fields * : Oracle")
+print(table.to_bigquery_fields())
+print("* TABLE *")
+print("schema = {} : name = {} : is_temp = {}".format(table.schema, table.name, table.is_temp))
+print("* BigQuery Fields *")
+print(table.to_bigquery_fields())
+print("* BigQuery Fields - column name to lower case / upper case *")
+print(table.to_bigquery_fields(DdlParse.NAME_CASE.lower))
+print(table.to_bigquery_fields(DdlParse.NAME_CASE.upper))
+print("* COLUMN *")
+for col in table.columns.values():
+ col_info = {}
+ col_info["name"] = col.name
+ col_info["data_type"] = col.data_type
+ col_info["length"] = col.length
+ col_info["precision(=length)"] = col.precision
+ col_info["scale"] = col.scale
+ col_info["is_unsigned"] = col.is_unsigned
+ col_info["is_zerofill"] = col.is_zerofill
+ col_info["constraint"] = col.constraint
+ col_info["not_null"] = col.not_null
+ col_info["PK"] = col.primary_key
+ col_info["unique"] = col.unique
+ col_info["auto_increment"] = col.auto_increment
+ col_info["distkey"] = col.distkey
+ col_info["sortkey"] = col.sortkey
+ col_info["encode"] = col.encode
+ col_info["default"] = col.default
+ col_info["character_set"] = col.character_set
+ col_info["bq_legacy_data_type"] = col.bigquery_legacy_data_type
+ col_info["bq_standard_data_type"] = col.bigquery_standard_data_type
+ col_info["comment"] = col.comment
+ col_info["description(=comment)"] = col.description
+ col_info["bigquery_field"] = json.loads(col.to_bigquery_field())
+ print(json.dumps(col_info, indent=2, ensure_ascii=False))
+print("* DDL (CREATE TABLE) statements *")
+print(table.to_bigquery_ddl())
+print("* DDL (CREATE TABLE) statements - dataset name, table name and column name to lower case / upper case *")
+print(table.to_bigquery_ddl(DdlParse.NAME_CASE.lower))
+print(table.to_bigquery_ddl(DdlParse.NAME_CASE.upper))
+print("* Get Column object (case insensitive) *")
+print(table.columns["total"])
+print(table.columns["total"].data_type)
+```
+## License
+[BSD 3-Clause License](https://github.com/shinichi-takii/ddlparse/blob/master/LICENSE.md)
+## Author
+Shinichi Takii <shinichi.takii@shaketh.com>
+## Links
+- Repository : https://github.com/shinichi-takii/ddlparse
+- PyPI Package : https://pypi.org/project/ddlparse/
+## Special Thanks
+- pyparsing : https://github.com/pyparsing/pyparsing
+
+%prep
+%autosetup -n ddlparse-1.10.0
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-ddlparse -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Mon Apr 10 2023 Python_Bot <Python_Bot@openeuler.org> - 1.10.0-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..877c393
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+fc58e30162d5540e0a696094227487bb ddlparse-1.10.0.tar.gz