From 833f6602983e357ce91353444a7edbfa183a3819 Mon Sep 17 00:00:00 2001 From: CoprDistGit Date: Wed, 31 May 2023 05:36:08 +0000 Subject: automatic import of python-googlewrapper --- .gitignore | 1 + python-googlewrapper.spec | 420 ++++++++++++++++++++++++++++++++++++++++++++++ sources | 1 + 3 files changed, 422 insertions(+) create mode 100644 python-googlewrapper.spec create mode 100644 sources diff --git a/.gitignore b/.gitignore index e69de29..3966059 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1 @@ +/googlewrapper-0.2.11.tar.gz diff --git a/python-googlewrapper.spec b/python-googlewrapper.spec new file mode 100644 index 0000000..a4364bd --- /dev/null +++ b/python-googlewrapper.spec @@ -0,0 +1,420 @@ +%global _empty_manifest_terminate_build 0 +Name: python-googlewrapper +Version: 0.2.11 +Release: 1 +Summary: Simple API wrapper for Google Products +License: MIT +URL: https://github.com/jaceiverson/googlewrapper +Source0: https://mirrors.nju.edu.cn/pypi/web/packages/a5/a1/e8674ccac89dbc930b7ef837277325a080e3e79c12c3418d75a76eea1ee9/googlewrapper-0.2.11.tar.gz +BuildArch: noarch + +Requires: python3-google-api-python-client +Requires: python3-oauth2client +Requires: python3-pandas +Requires: python3-pygsheets +Requires: python3-google-cloud-bigquery +Requires: python3-pandas-gbq +Requires: python3-pdcompare +Requires: python3-pytest +Requires: python3-pytest-cov +Requires: python3-mypy +Requires: python3-flake8 +Requires: python3-tox + +%description +# googlewrapper + +[![PyPI Latest Release](https://img.shields.io/pypi/v/googlewrapper.svg)](https://pypi.org/project/googlewrapper/) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + + +General Connector Classes for Google Products + +__Current Wrappers Available__ + + - Google Analytics + - Google Search Console + - Google Calendar + - Google Big Query + - Google PageSpeed API + - Google Sheets + - Google Docs + +_Wrappers In the Pipeline_ +- Gmail +- Google Maps + +# STEPS + 1) Acquire Google Credentials from API Console + 2) Install this package + 3) Create Connection in Python + 4) Use product wrapper to make API calls (see links to individual docs above) + +## Acquire Google Credentials from API Console +First we will need to get our own Google Project set up so we can get our credentials. If you don't have experience, you can do so here Google API Console + +After you have your project set up, oAuth configured, and the optional service account (only for Google Big Query connections), you are good to install this package. + +Make sure to download your oAuth credentials and save them to your working directory as 'client_secret.json'. + +## Installation +``` +pip install googlewrapper +``` +OR +``` +python -m pip install googlewrapper +``` + +### Virtual Environment +For each project it is reccomended to create a virtualenv. Here is a simple guide on virtual environments. + +## Combining Products Examples +### Example 1 +> Take a list of URLs from Sheets, grab Search Console Data, and import it into Big Query. + +```py +from googlewrapper import GoogleSearchConsole, GoogleSheets, GoogleBigQuery +import datetime as dt + +# init our objects +sheets = GoogleSheets(YOUR_URL_HERE) +gsc = GoogleSearchConsole() +gbq = GoogleBigQuery() + +# get our urls we want to pull +# remember that sheet1 is default +sites = sheets.get_column(1) + +''' +this one is a bit more technical +we can pull our column Branded Words right +from sheets then assign it to a dictionary to use +in our GSC object. + +Make sure that your url column is the index for +your df. This will happen by default if the urls +are in the first column in google sheets +''' +branded_list = sheets.df()['Branded Words'].to_dict() + +# assign those sheets to GSC +gsc.set_sites(sites) +# assign other GSC variables +gsc.set_date(dt.date(2021,1,1)) +gsc.set_dims(['page','date','query']) + +# get our data +gsc_data = gsc.get_data() + +# print the total clicks/impressions and avg position +# for all the sites we just pulled data for +# send them to Big Query +for site in gsc_data: + print(f"{site}'s Data\n"\ + f"Clicks: {gsc_data[site]['Clicks'].sum()}\n"\ + f"Impressions: {gsc_data[site]['Impressions'].sum()}\n"\ + f"Avg Position: {gsc_data[site]['Position'].mean()}\n\n") + # now we will send our data into our GBQ tables for storage + # we will assign the dataset name to be our url + # we will assign table to be gsc + gbq.set_dataset(site) + gbq.set_table('gsc') + # send the data to GBQ + gbq.send(gsc_data[site]) +``` + +## Pull Requests/Suggestions +I'd love to hear your feedback and suggestions. If you see something and you want to give it a shot and fix it, feel free to clone and make a pull request. OR you can submit and issue/feature request on GitHub. + +## Thanks for using my code +

+If you found this library useful, I'd appreciate a coffee. Thanks. +
+
+Buy Me A Coffee +

+ + +%package -n python3-googlewrapper +Summary: Simple API wrapper for Google Products +Provides: python-googlewrapper +BuildRequires: python3-devel +BuildRequires: python3-setuptools +BuildRequires: python3-pip +%description -n python3-googlewrapper +# googlewrapper + +[![PyPI Latest Release](https://img.shields.io/pypi/v/googlewrapper.svg)](https://pypi.org/project/googlewrapper/) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + + +General Connector Classes for Google Products + +__Current Wrappers Available__ + + - Google Analytics + - Google Search Console + - Google Calendar + - Google Big Query + - Google PageSpeed API + - Google Sheets + - Google Docs + +_Wrappers In the Pipeline_ +- Gmail +- Google Maps + +# STEPS + 1) Acquire Google Credentials from API Console + 2) Install this package + 3) Create Connection in Python + 4) Use product wrapper to make API calls (see links to individual docs above) + +## Acquire Google Credentials from API Console +First we will need to get our own Google Project set up so we can get our credentials. If you don't have experience, you can do so here Google API Console + +After you have your project set up, oAuth configured, and the optional service account (only for Google Big Query connections), you are good to install this package. + +Make sure to download your oAuth credentials and save them to your working directory as 'client_secret.json'. + +## Installation +``` +pip install googlewrapper +``` +OR +``` +python -m pip install googlewrapper +``` + +### Virtual Environment +For each project it is reccomended to create a virtualenv. Here is a simple guide on virtual environments. + +## Combining Products Examples +### Example 1 +> Take a list of URLs from Sheets, grab Search Console Data, and import it into Big Query. + +```py +from googlewrapper import GoogleSearchConsole, GoogleSheets, GoogleBigQuery +import datetime as dt + +# init our objects +sheets = GoogleSheets(YOUR_URL_HERE) +gsc = GoogleSearchConsole() +gbq = GoogleBigQuery() + +# get our urls we want to pull +# remember that sheet1 is default +sites = sheets.get_column(1) + +''' +this one is a bit more technical +we can pull our column Branded Words right +from sheets then assign it to a dictionary to use +in our GSC object. + +Make sure that your url column is the index for +your df. This will happen by default if the urls +are in the first column in google sheets +''' +branded_list = sheets.df()['Branded Words'].to_dict() + +# assign those sheets to GSC +gsc.set_sites(sites) +# assign other GSC variables +gsc.set_date(dt.date(2021,1,1)) +gsc.set_dims(['page','date','query']) + +# get our data +gsc_data = gsc.get_data() + +# print the total clicks/impressions and avg position +# for all the sites we just pulled data for +# send them to Big Query +for site in gsc_data: + print(f"{site}'s Data\n"\ + f"Clicks: {gsc_data[site]['Clicks'].sum()}\n"\ + f"Impressions: {gsc_data[site]['Impressions'].sum()}\n"\ + f"Avg Position: {gsc_data[site]['Position'].mean()}\n\n") + # now we will send our data into our GBQ tables for storage + # we will assign the dataset name to be our url + # we will assign table to be gsc + gbq.set_dataset(site) + gbq.set_table('gsc') + # send the data to GBQ + gbq.send(gsc_data[site]) +``` + +## Pull Requests/Suggestions +I'd love to hear your feedback and suggestions. If you see something and you want to give it a shot and fix it, feel free to clone and make a pull request. OR you can submit and issue/feature request on GitHub. + +## Thanks for using my code +

+If you found this library useful, I'd appreciate a coffee. Thanks. +
+
+Buy Me A Coffee +

+ + +%package help +Summary: Development documents and examples for googlewrapper +Provides: python3-googlewrapper-doc +%description help +# googlewrapper + +[![PyPI Latest Release](https://img.shields.io/pypi/v/googlewrapper.svg)](https://pypi.org/project/googlewrapper/) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + + +General Connector Classes for Google Products + +__Current Wrappers Available__ + + - Google Analytics + - Google Search Console + - Google Calendar + - Google Big Query + - Google PageSpeed API + - Google Sheets + - Google Docs + +_Wrappers In the Pipeline_ +- Gmail +- Google Maps + +# STEPS + 1) Acquire Google Credentials from API Console + 2) Install this package + 3) Create Connection in Python + 4) Use product wrapper to make API calls (see links to individual docs above) + +## Acquire Google Credentials from API Console +First we will need to get our own Google Project set up so we can get our credentials. If you don't have experience, you can do so here Google API Console + +After you have your project set up, oAuth configured, and the optional service account (only for Google Big Query connections), you are good to install this package. + +Make sure to download your oAuth credentials and save them to your working directory as 'client_secret.json'. + +## Installation +``` +pip install googlewrapper +``` +OR +``` +python -m pip install googlewrapper +``` + +### Virtual Environment +For each project it is reccomended to create a virtualenv. Here is a simple guide on virtual environments. + +## Combining Products Examples +### Example 1 +> Take a list of URLs from Sheets, grab Search Console Data, and import it into Big Query. + +```py +from googlewrapper import GoogleSearchConsole, GoogleSheets, GoogleBigQuery +import datetime as dt + +# init our objects +sheets = GoogleSheets(YOUR_URL_HERE) +gsc = GoogleSearchConsole() +gbq = GoogleBigQuery() + +# get our urls we want to pull +# remember that sheet1 is default +sites = sheets.get_column(1) + +''' +this one is a bit more technical +we can pull our column Branded Words right +from sheets then assign it to a dictionary to use +in our GSC object. + +Make sure that your url column is the index for +your df. This will happen by default if the urls +are in the first column in google sheets +''' +branded_list = sheets.df()['Branded Words'].to_dict() + +# assign those sheets to GSC +gsc.set_sites(sites) +# assign other GSC variables +gsc.set_date(dt.date(2021,1,1)) +gsc.set_dims(['page','date','query']) + +# get our data +gsc_data = gsc.get_data() + +# print the total clicks/impressions and avg position +# for all the sites we just pulled data for +# send them to Big Query +for site in gsc_data: + print(f"{site}'s Data\n"\ + f"Clicks: {gsc_data[site]['Clicks'].sum()}\n"\ + f"Impressions: {gsc_data[site]['Impressions'].sum()}\n"\ + f"Avg Position: {gsc_data[site]['Position'].mean()}\n\n") + # now we will send our data into our GBQ tables for storage + # we will assign the dataset name to be our url + # we will assign table to be gsc + gbq.set_dataset(site) + gbq.set_table('gsc') + # send the data to GBQ + gbq.send(gsc_data[site]) +``` + +## Pull Requests/Suggestions +I'd love to hear your feedback and suggestions. If you see something and you want to give it a shot and fix it, feel free to clone and make a pull request. OR you can submit and issue/feature request on GitHub. + +## Thanks for using my code +

+If you found this library useful, I'd appreciate a coffee. Thanks. +
+
+Buy Me A Coffee +

+ + +%prep +%autosetup -n googlewrapper-0.2.11 + +%build +%py3_build + +%install +%py3_install +install -d -m755 %{buildroot}/%{_pkgdocdir} +if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi +if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi +if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi +if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi +pushd %{buildroot} +if [ -d usr/lib ]; then + find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/lib64 ]; then + find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/bin ]; then + find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/sbin ]; then + find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst +fi +touch doclist.lst +if [ -d usr/share/man ]; then + find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst +fi +popd +mv %{buildroot}/filelist.lst . +mv %{buildroot}/doclist.lst . + +%files -n python3-googlewrapper -f filelist.lst +%dir %{python3_sitelib}/* + +%files help -f doclist.lst +%{_docdir}/* + +%changelog +* Wed May 31 2023 Python_Bot - 0.2.11-1 +- Package Spec generated diff --git a/sources b/sources new file mode 100644 index 0000000..9bd12c8 --- /dev/null +++ b/sources @@ -0,0 +1 @@ +8f5bde76342d2462bb22f90d966fcc1d googlewrapper-0.2.11.tar.gz -- cgit v1.2.3