From 5d713a8a0b11e72a15210e05b581b392f1fcac2c Mon Sep 17 00:00:00 2001 From: CoprDistGit Date: Wed, 12 Apr 2023 01:49:50 +0000 Subject: automatic import of python-proxy-requests --- .gitignore | 1 + python-proxy-requests.spec | 663 +++++++++++++++++++++++++++++++++++++++++++++ sources | 1 + 3 files changed, 665 insertions(+) create mode 100644 python-proxy-requests.spec create mode 100644 sources diff --git a/.gitignore b/.gitignore index e69de29..81b01e9 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1 @@ +/proxy_requests-0.5.2.tar.gz diff --git a/python-proxy-requests.spec b/python-proxy-requests.spec new file mode 100644 index 0000000..6d26c1d --- /dev/null +++ b/python-proxy-requests.spec @@ -0,0 +1,663 @@ +%global _empty_manifest_terminate_build 0 +Name: python-proxy-requests +Version: 0.5.2 +Release: 1 +Summary: Make HTTP requests with scraped proxies +License: MIT License +URL: https://github.com/rootVIII/proxy_requests +Source0: https://mirrors.nju.edu.cn/pypi/web/packages/bd/e9/aa76c6139a8f1c90382a3667b427991846279b95bf71804e8cfdbf99ed6c/proxy_requests-0.5.2.tar.gz +BuildArch: noarch + + +%description +## Python Proxy Requests | make an http GET/POST with a proxy scraped from https://www.sslproxies.org/ + +

+NOTE: This repository has been archived and is no longer maintained after urllib3==1.26.2 +

+The ProxyRequests class first scrapes proxies from the web. Then it recursively attempts to make a request if the initial request with a proxy is unsuccessful. +

+Either copy the code and put where you want it, or download via pip: +

+pip install proxy-requests (or pip3) +
+from proxy_requests import ProxyRequests +

+or if you need the Basic Auth subclass as well: +
+from proxy_requests import ProxyRequests, ProxyRequestsBasicAuth +

+If the above import statement is used, method calls will be identical to the ones shown below. Pass a fully qualified URL when initializing an instance. +

+System Requirements: Python 3 and the requests module. +

+Runs on Linux and Windows (and Mac probably) - It may take a moment to run depending on the current proxy. +
+Each request with a proxy is set with an 3 second timeout in the event that the request takes too long (before trying the next proxy socket in the queue). +

+Proxies are randomly popped from the queue. +

+The ProxyRequestBasicAuth subclass has the methods get(), get_with_headers(), post(), post_with_headers(), post_file(), and post_file_with_headers() that will override the Parent methods. +
+ +GET: +
+    
+r = ProxyRequests('https://api.ipify.org')
+r.get()
+    
+
+ +GET with headers: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.get_with_headers()
+    
+
+ +POST: +
+    
+r = ProxyRequests('url here')
+r.post({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST with headers: +
+    
+r = ProxyRequests('url here')
+r.set_headers({'name': 'rootVIII', 'secret_message': '7Yufs9KIfj33d'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST FILE: +
+    
+r = ProxyRequests('url here')
+r.set_file('test.txt')
+r.post_file()
+    
+
+ +POST FILE with headers: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+    
+
+ +GET with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.get()
+    
+
+ +GET with headers & Basic Authentication: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.get_with_headers()
+    
+
+ +POST with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.post({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST with headers & Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers({'header_key': 'header_value'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST FILE with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_file('test.txt')
+r.post_file()
+    
+
+ +POST FILE with headers & Basic Authentication: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+    
+
+

+Response Methods +

+ Returns a string: +
+print(r) +
+Or if you want the raw content as bytes: +
+r.get_raw() +
+Get the response as JSON (if valid JSON): +
+r.get_json() +
+Get the response headers: +
+print(r.get_headers()) +
+Get the status code: +
+print(r.get_status_code()) +
+Get the URL that was requested: +
+print(r.get_url()) +
+Get the proxy that was used to make the request: +
+print(r.get_proxy_used()) +
+
+To write raw data to a file (including an image): +
+
+    
+
+url = 'https://www.restwords.com/static/ICON.png'
+r = ProxyRequests(url)
+r.get()
+with open('out.png', 'wb') as f:
+    f.write(r.get_raw())
+
+    
+
+
+Dump the response to a file as JSON: +
+
+    
+import json
+with open('test.txt', 'w') as f:
+    json.dump(r.get_json(), f)
+    
+
+

+This was developed on Ubuntu 16.04.4/18.04 LTS. +
+Author: James Loye Colley 2018-2020

+ +%package -n python3-proxy-requests +Summary: Make HTTP requests with scraped proxies +Provides: python-proxy-requests +BuildRequires: python3-devel +BuildRequires: python3-setuptools +BuildRequires: python3-pip +%description -n python3-proxy-requests +## Python Proxy Requests | make an http GET/POST with a proxy scraped from https://www.sslproxies.org/ + +

+NOTE: This repository has been archived and is no longer maintained after urllib3==1.26.2 +

+The ProxyRequests class first scrapes proxies from the web. Then it recursively attempts to make a request if the initial request with a proxy is unsuccessful. +

+Either copy the code and put where you want it, or download via pip: +

+pip install proxy-requests (or pip3) +
+from proxy_requests import ProxyRequests +

+or if you need the Basic Auth subclass as well: +
+from proxy_requests import ProxyRequests, ProxyRequestsBasicAuth +

+If the above import statement is used, method calls will be identical to the ones shown below. Pass a fully qualified URL when initializing an instance. +

+System Requirements: Python 3 and the requests module. +

+Runs on Linux and Windows (and Mac probably) - It may take a moment to run depending on the current proxy. +
+Each request with a proxy is set with an 3 second timeout in the event that the request takes too long (before trying the next proxy socket in the queue). +

+Proxies are randomly popped from the queue. +

+The ProxyRequestBasicAuth subclass has the methods get(), get_with_headers(), post(), post_with_headers(), post_file(), and post_file_with_headers() that will override the Parent methods. +
+ +GET: +
+    
+r = ProxyRequests('https://api.ipify.org')
+r.get()
+    
+
+ +GET with headers: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.get_with_headers()
+    
+
+ +POST: +
+    
+r = ProxyRequests('url here')
+r.post({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST with headers: +
+    
+r = ProxyRequests('url here')
+r.set_headers({'name': 'rootVIII', 'secret_message': '7Yufs9KIfj33d'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST FILE: +
+    
+r = ProxyRequests('url here')
+r.set_file('test.txt')
+r.post_file()
+    
+
+ +POST FILE with headers: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+    
+
+ +GET with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.get()
+    
+
+ +GET with headers & Basic Authentication: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.get_with_headers()
+    
+
+ +POST with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.post({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST with headers & Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers({'header_key': 'header_value'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST FILE with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_file('test.txt')
+r.post_file()
+    
+
+ +POST FILE with headers & Basic Authentication: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+    
+
+

+Response Methods +

+ Returns a string: +
+print(r) +
+Or if you want the raw content as bytes: +
+r.get_raw() +
+Get the response as JSON (if valid JSON): +
+r.get_json() +
+Get the response headers: +
+print(r.get_headers()) +
+Get the status code: +
+print(r.get_status_code()) +
+Get the URL that was requested: +
+print(r.get_url()) +
+Get the proxy that was used to make the request: +
+print(r.get_proxy_used()) +
+
+To write raw data to a file (including an image): +
+
+    
+
+url = 'https://www.restwords.com/static/ICON.png'
+r = ProxyRequests(url)
+r.get()
+with open('out.png', 'wb') as f:
+    f.write(r.get_raw())
+
+    
+
+
+Dump the response to a file as JSON: +
+
+    
+import json
+with open('test.txt', 'w') as f:
+    json.dump(r.get_json(), f)
+    
+
+

+This was developed on Ubuntu 16.04.4/18.04 LTS. +
+Author: James Loye Colley 2018-2020

+ +%package help +Summary: Development documents and examples for proxy-requests +Provides: python3-proxy-requests-doc +%description help +## Python Proxy Requests | make an http GET/POST with a proxy scraped from https://www.sslproxies.org/ + +

+NOTE: This repository has been archived and is no longer maintained after urllib3==1.26.2 +

+The ProxyRequests class first scrapes proxies from the web. Then it recursively attempts to make a request if the initial request with a proxy is unsuccessful. +

+Either copy the code and put where you want it, or download via pip: +

+pip install proxy-requests (or pip3) +
+from proxy_requests import ProxyRequests +

+or if you need the Basic Auth subclass as well: +
+from proxy_requests import ProxyRequests, ProxyRequestsBasicAuth +

+If the above import statement is used, method calls will be identical to the ones shown below. Pass a fully qualified URL when initializing an instance. +

+System Requirements: Python 3 and the requests module. +

+Runs on Linux and Windows (and Mac probably) - It may take a moment to run depending on the current proxy. +
+Each request with a proxy is set with an 3 second timeout in the event that the request takes too long (before trying the next proxy socket in the queue). +

+Proxies are randomly popped from the queue. +

+The ProxyRequestBasicAuth subclass has the methods get(), get_with_headers(), post(), post_with_headers(), post_file(), and post_file_with_headers() that will override the Parent methods. +
+ +GET: +
+    
+r = ProxyRequests('https://api.ipify.org')
+r.get()
+    
+
+ +GET with headers: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.get_with_headers()
+    
+
+ +POST: +
+    
+r = ProxyRequests('url here')
+r.post({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST with headers: +
+    
+r = ProxyRequests('url here')
+r.set_headers({'name': 'rootVIII', 'secret_message': '7Yufs9KIfj33d'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST FILE: +
+    
+r = ProxyRequests('url here')
+r.set_file('test.txt')
+r.post_file()
+    
+
+ +POST FILE with headers: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+    
+
+ +GET with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.get()
+    
+
+ +GET with headers & Basic Authentication: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.get_with_headers()
+    
+
+ +POST with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.post({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST with headers & Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers({'header_key': 'header_value'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+    
+
+ +POST FILE with Basic Authentication: +
+    
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_file('test.txt')
+r.post_file()
+    
+
+ +POST FILE with headers & Basic Authentication: +
+    
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+    
+
+

+Response Methods +

+ Returns a string: +
+print(r) +
+Or if you want the raw content as bytes: +
+r.get_raw() +
+Get the response as JSON (if valid JSON): +
+r.get_json() +
+Get the response headers: +
+print(r.get_headers()) +
+Get the status code: +
+print(r.get_status_code()) +
+Get the URL that was requested: +
+print(r.get_url()) +
+Get the proxy that was used to make the request: +
+print(r.get_proxy_used()) +
+
+To write raw data to a file (including an image): +
+
+    
+
+url = 'https://www.restwords.com/static/ICON.png'
+r = ProxyRequests(url)
+r.get()
+with open('out.png', 'wb') as f:
+    f.write(r.get_raw())
+
+    
+
+
+Dump the response to a file as JSON: +
+
+    
+import json
+with open('test.txt', 'w') as f:
+    json.dump(r.get_json(), f)
+    
+
+

+This was developed on Ubuntu 16.04.4/18.04 LTS. +
+Author: James Loye Colley 2018-2020

+ +%prep +%autosetup -n proxy-requests-0.5.2 + +%build +%py3_build + +%install +%py3_install +install -d -m755 %{buildroot}/%{_pkgdocdir} +if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi +if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi +if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi +if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi +pushd %{buildroot} +if [ -d usr/lib ]; then + find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/lib64 ]; then + find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/bin ]; then + find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/sbin ]; then + find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst +fi +touch doclist.lst +if [ -d usr/share/man ]; then + find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst +fi +popd +mv %{buildroot}/filelist.lst . +mv %{buildroot}/doclist.lst . + +%files -n python3-proxy-requests -f filelist.lst +%dir %{python3_sitelib}/* + +%files help -f doclist.lst +%{_docdir}/* + +%changelog +* Wed Apr 12 2023 Python_Bot - 0.5.2-1 +- Package Spec generated diff --git a/sources b/sources new file mode 100644 index 0000000..a75d23a --- /dev/null +++ b/sources @@ -0,0 +1 @@ +eb467b8ede2f625b87f64119e2859562 proxy_requests-0.5.2.tar.gz -- cgit v1.2.3