summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--python-proxy-requests.spec663
-rw-r--r--sources1
3 files changed, 665 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..81b01e9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/proxy_requests-0.5.2.tar.gz
diff --git a/python-proxy-requests.spec b/python-proxy-requests.spec
new file mode 100644
index 0000000..6d26c1d
--- /dev/null
+++ b/python-proxy-requests.spec
@@ -0,0 +1,663 @@
+%global _empty_manifest_terminate_build 0
+Name: python-proxy-requests
+Version: 0.5.2
+Release: 1
+Summary: Make HTTP requests with scraped proxies
+License: MIT License
+URL: https://github.com/rootVIII/proxy_requests
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/bd/e9/aa76c6139a8f1c90382a3667b427991846279b95bf71804e8cfdbf99ed6c/proxy_requests-0.5.2.tar.gz
+BuildArch: noarch
+
+
+%description
+## Python Proxy Requests | make an http GET/POST with a proxy scraped from https://www.sslproxies.org/
+
+<br><br>
+NOTE: This repository has been archived and is no longer maintained after urllib3==1.26.2
+<br><br>
+The ProxyRequests class first scrapes proxies from the web. Then it recursively attempts to make a request if the initial request with a proxy is unsuccessful.
+<br><br>
+Either copy the code and put where you want it, or download via pip:
+<br><br>
+<code>pip install proxy-requests</code> (or pip3)
+<br>
+<code>from proxy_requests import ProxyRequests</code>
+<br><br>
+or if you need the Basic Auth subclass as well:
+<br>
+<code>from proxy_requests import ProxyRequests, ProxyRequestsBasicAuth</code>
+<br><br>
+If the above import statement is used, method calls will be identical to the ones shown below. Pass a fully qualified URL when initializing an instance.
+<br><br>
+System Requirements: <b>Python 3</b> and the requests module.
+<br><br>
+Runs on Linux and Windows (and Mac probably) - <b>It may take a moment to run depending on the current proxy.</b>
+<br>
+Each request with a proxy is set with an 3 second timeout in the event that the request takes too long (before trying the next proxy socket in the queue).
+<br><br>
+Proxies are randomly popped from the queue.
+<br><br>
+The ProxyRequestBasicAuth subclass has the methods get(), get_with_headers(), post(), post_with_headers(), post_file(), and post_file_with_headers() that will override the Parent methods.
+<br>
+
+<b>GET:</b>
+<pre>
+ <code>
+r = ProxyRequests('https://api.ipify.org')
+r.get()
+ </code>
+</pre>
+
+<b>GET with headers:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.get_with_headers()
+ </code>
+</pre>
+
+<b>POST:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.post({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST with headers:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.set_headers({'name': 'rootVIII', 'secret_message': '7Yufs9KIfj33d'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST FILE:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.set_file('test.txt')
+r.post_file()
+ </code>
+</pre>
+
+<b>POST FILE with headers:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+ </code>
+</pre>
+
+<b>GET with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.get()
+ </code>
+</pre>
+
+<b>GET with headers & Basic Authentication:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.get_with_headers()
+ </code>
+</pre>
+
+<b>POST with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.post({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST with headers & Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers({'header_key': 'header_value'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST FILE with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_file('test.txt')
+r.post_file()
+ </code>
+</pre>
+
+<b>POST FILE with headers & Basic Authentication:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+ </code>
+</pre>
+<br><br>
+<b>Response Methods</b>
+<br><br>
+ <b>Returns a string:</b>
+<br>
+<code>print(r)</code>
+<br>
+<b>Or if you want the raw content as bytes: </b>
+<br>
+<code>r.get_raw()</code>
+<br>
+<b>Get the response as JSON (if valid JSON):</b>
+<br>
+<code>r.get_json()</code>
+<br>
+<b>Get the response headers:</b>
+<br>
+<code>print(r.get_headers())</code>
+<br>
+<b>Get the status code:</b>
+<br>
+<code>print(r.get_status_code())</code>
+<br>
+<b>Get the URL that was requested:</b>
+<br>
+<code>print(r.get_url())</code>
+<br>
+<b>Get the proxy that was used to make the request:</b>
+<br>
+<code>print(r.get_proxy_used())</code>
+<br>
+<br>
+<b>To write raw data to a file (including an image):</b>
+<br>
+<pre>
+ <code>
+
+url = 'https://www.restwords.com/static/ICON.png'
+r = ProxyRequests(url)
+r.get()
+with open('out.png', 'wb') as f:
+ f.write(r.get_raw())
+
+ </code>
+</pre>
+<br>
+<b>Dump the response to a file as JSON:</b>
+<br>
+<pre>
+ <code>
+import json
+with open('test.txt', 'w') as f:
+ json.dump(r.get_json(), f)
+ </code>
+</pre>
+<br><br>
+This was developed on Ubuntu 16.04.4/18.04 LTS.
+<hr>
+<b>Author: James Loye Colley 2018-2020</b><br><br>
+
+%package -n python3-proxy-requests
+Summary: Make HTTP requests with scraped proxies
+Provides: python-proxy-requests
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-proxy-requests
+## Python Proxy Requests | make an http GET/POST with a proxy scraped from https://www.sslproxies.org/
+
+<br><br>
+NOTE: This repository has been archived and is no longer maintained after urllib3==1.26.2
+<br><br>
+The ProxyRequests class first scrapes proxies from the web. Then it recursively attempts to make a request if the initial request with a proxy is unsuccessful.
+<br><br>
+Either copy the code and put where you want it, or download via pip:
+<br><br>
+<code>pip install proxy-requests</code> (or pip3)
+<br>
+<code>from proxy_requests import ProxyRequests</code>
+<br><br>
+or if you need the Basic Auth subclass as well:
+<br>
+<code>from proxy_requests import ProxyRequests, ProxyRequestsBasicAuth</code>
+<br><br>
+If the above import statement is used, method calls will be identical to the ones shown below. Pass a fully qualified URL when initializing an instance.
+<br><br>
+System Requirements: <b>Python 3</b> and the requests module.
+<br><br>
+Runs on Linux and Windows (and Mac probably) - <b>It may take a moment to run depending on the current proxy.</b>
+<br>
+Each request with a proxy is set with an 3 second timeout in the event that the request takes too long (before trying the next proxy socket in the queue).
+<br><br>
+Proxies are randomly popped from the queue.
+<br><br>
+The ProxyRequestBasicAuth subclass has the methods get(), get_with_headers(), post(), post_with_headers(), post_file(), and post_file_with_headers() that will override the Parent methods.
+<br>
+
+<b>GET:</b>
+<pre>
+ <code>
+r = ProxyRequests('https://api.ipify.org')
+r.get()
+ </code>
+</pre>
+
+<b>GET with headers:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.get_with_headers()
+ </code>
+</pre>
+
+<b>POST:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.post({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST with headers:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.set_headers({'name': 'rootVIII', 'secret_message': '7Yufs9KIfj33d'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST FILE:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.set_file('test.txt')
+r.post_file()
+ </code>
+</pre>
+
+<b>POST FILE with headers:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+ </code>
+</pre>
+
+<b>GET with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.get()
+ </code>
+</pre>
+
+<b>GET with headers & Basic Authentication:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.get_with_headers()
+ </code>
+</pre>
+
+<b>POST with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.post({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST with headers & Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers({'header_key': 'header_value'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST FILE with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_file('test.txt')
+r.post_file()
+ </code>
+</pre>
+
+<b>POST FILE with headers & Basic Authentication:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+ </code>
+</pre>
+<br><br>
+<b>Response Methods</b>
+<br><br>
+ <b>Returns a string:</b>
+<br>
+<code>print(r)</code>
+<br>
+<b>Or if you want the raw content as bytes: </b>
+<br>
+<code>r.get_raw()</code>
+<br>
+<b>Get the response as JSON (if valid JSON):</b>
+<br>
+<code>r.get_json()</code>
+<br>
+<b>Get the response headers:</b>
+<br>
+<code>print(r.get_headers())</code>
+<br>
+<b>Get the status code:</b>
+<br>
+<code>print(r.get_status_code())</code>
+<br>
+<b>Get the URL that was requested:</b>
+<br>
+<code>print(r.get_url())</code>
+<br>
+<b>Get the proxy that was used to make the request:</b>
+<br>
+<code>print(r.get_proxy_used())</code>
+<br>
+<br>
+<b>To write raw data to a file (including an image):</b>
+<br>
+<pre>
+ <code>
+
+url = 'https://www.restwords.com/static/ICON.png'
+r = ProxyRequests(url)
+r.get()
+with open('out.png', 'wb') as f:
+ f.write(r.get_raw())
+
+ </code>
+</pre>
+<br>
+<b>Dump the response to a file as JSON:</b>
+<br>
+<pre>
+ <code>
+import json
+with open('test.txt', 'w') as f:
+ json.dump(r.get_json(), f)
+ </code>
+</pre>
+<br><br>
+This was developed on Ubuntu 16.04.4/18.04 LTS.
+<hr>
+<b>Author: James Loye Colley 2018-2020</b><br><br>
+
+%package help
+Summary: Development documents and examples for proxy-requests
+Provides: python3-proxy-requests-doc
+%description help
+## Python Proxy Requests | make an http GET/POST with a proxy scraped from https://www.sslproxies.org/
+
+<br><br>
+NOTE: This repository has been archived and is no longer maintained after urllib3==1.26.2
+<br><br>
+The ProxyRequests class first scrapes proxies from the web. Then it recursively attempts to make a request if the initial request with a proxy is unsuccessful.
+<br><br>
+Either copy the code and put where you want it, or download via pip:
+<br><br>
+<code>pip install proxy-requests</code> (or pip3)
+<br>
+<code>from proxy_requests import ProxyRequests</code>
+<br><br>
+or if you need the Basic Auth subclass as well:
+<br>
+<code>from proxy_requests import ProxyRequests, ProxyRequestsBasicAuth</code>
+<br><br>
+If the above import statement is used, method calls will be identical to the ones shown below. Pass a fully qualified URL when initializing an instance.
+<br><br>
+System Requirements: <b>Python 3</b> and the requests module.
+<br><br>
+Runs on Linux and Windows (and Mac probably) - <b>It may take a moment to run depending on the current proxy.</b>
+<br>
+Each request with a proxy is set with an 3 second timeout in the event that the request takes too long (before trying the next proxy socket in the queue).
+<br><br>
+Proxies are randomly popped from the queue.
+<br><br>
+The ProxyRequestBasicAuth subclass has the methods get(), get_with_headers(), post(), post_with_headers(), post_file(), and post_file_with_headers() that will override the Parent methods.
+<br>
+
+<b>GET:</b>
+<pre>
+ <code>
+r = ProxyRequests('https://api.ipify.org')
+r.get()
+ </code>
+</pre>
+
+<b>GET with headers:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.get_with_headers()
+ </code>
+</pre>
+
+<b>POST:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.post({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST with headers:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.set_headers({'name': 'rootVIII', 'secret_message': '7Yufs9KIfj33d'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST FILE:</b>
+<pre>
+ <code>
+r = ProxyRequests('url here')
+r.set_file('test.txt')
+r.post_file()
+ </code>
+</pre>
+
+<b>POST FILE with headers:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequests('url here')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+ </code>
+</pre>
+
+<b>GET with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.get()
+ </code>
+</pre>
+
+<b>GET with headers & Basic Authentication:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.get_with_headers()
+ </code>
+</pre>
+
+<b>POST with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.post({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST with headers & Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers({'header_key': 'header_value'})
+r.post_with_headers({'key1': 'value1', 'key2': 'value2'})
+ </code>
+</pre>
+
+<b>POST FILE with Basic Authentication:</b>
+<pre>
+ <code>
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_file('test.txt')
+r.post_file()
+ </code>
+</pre>
+
+<b>POST FILE with headers & Basic Authentication:</b>
+<pre>
+ <code>
+h = {'User-Agent': 'NCSA Mosaic/3.0 (Windows 95)'}
+r = ProxyRequestsBasicAuth('url here', 'username', 'password')
+r.set_headers(h)
+r.set_file('test.txt')
+r.post_file_with_headers()
+ </code>
+</pre>
+<br><br>
+<b>Response Methods</b>
+<br><br>
+ <b>Returns a string:</b>
+<br>
+<code>print(r)</code>
+<br>
+<b>Or if you want the raw content as bytes: </b>
+<br>
+<code>r.get_raw()</code>
+<br>
+<b>Get the response as JSON (if valid JSON):</b>
+<br>
+<code>r.get_json()</code>
+<br>
+<b>Get the response headers:</b>
+<br>
+<code>print(r.get_headers())</code>
+<br>
+<b>Get the status code:</b>
+<br>
+<code>print(r.get_status_code())</code>
+<br>
+<b>Get the URL that was requested:</b>
+<br>
+<code>print(r.get_url())</code>
+<br>
+<b>Get the proxy that was used to make the request:</b>
+<br>
+<code>print(r.get_proxy_used())</code>
+<br>
+<br>
+<b>To write raw data to a file (including an image):</b>
+<br>
+<pre>
+ <code>
+
+url = 'https://www.restwords.com/static/ICON.png'
+r = ProxyRequests(url)
+r.get()
+with open('out.png', 'wb') as f:
+ f.write(r.get_raw())
+
+ </code>
+</pre>
+<br>
+<b>Dump the response to a file as JSON:</b>
+<br>
+<pre>
+ <code>
+import json
+with open('test.txt', 'w') as f:
+ json.dump(r.get_json(), f)
+ </code>
+</pre>
+<br><br>
+This was developed on Ubuntu 16.04.4/18.04 LTS.
+<hr>
+<b>Author: James Loye Colley 2018-2020</b><br><br>
+
+%prep
+%autosetup -n proxy-requests-0.5.2
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-proxy-requests -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Wed Apr 12 2023 Python_Bot <Python_Bot@openeuler.org> - 0.5.2-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..a75d23a
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+eb467b8ede2f625b87f64119e2859562 proxy_requests-0.5.2.tar.gz