summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--python-scrapy-fake-useragent.spec95
-rw-r--r--sources1
3 files changed, 97 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..109283a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/scrapy-fake-useragent-1.4.4.macosx-10.9-x86_64.tar.gz
diff --git a/python-scrapy-fake-useragent.spec b/python-scrapy-fake-useragent.spec
new file mode 100644
index 0000000..5033aa1
--- /dev/null
+++ b/python-scrapy-fake-useragent.spec
@@ -0,0 +1,95 @@
+%global _empty_manifest_terminate_build 0
+Name: python-scrapy-fake-useragent
+Version: 1.4.4
+Release: 1
+Summary: Use a random User-Agent provided by fake-useragent for every request
+License: New BSD License
+URL: https://github.com/alecxe/scrapy-fake-useragent
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/50/67/936f24eb4486c6340bc6ffd65b68ebb23a1121fbe6db9280acd88fb95716/scrapy-fake-useragent-1.4.4.macosx-10.9-x86_64.tar.gz
+BuildArch: noarch
+
+Requires: python3-fake-useragent
+Requires: python3-faker
+
+%description
+Random User-Agent middleware for Scrapy scraping framework based on
+`fake-useragent <https://pypi.python.org/pypi/fake-useragent>`__, which picks up ``User-Agent`` strings
+based on `usage statistics <http://www.w3schools.com/browsers/browsers_stats.asp>`__
+from a `real world database <http://useragentstring.com/>`__, but also has the option to configure a generator
+of fake UA strings, as a backup, powered by
+`Faker <https://faker.readthedocs.io/en/stable/providers/faker.providers.user_agent.html>`__.
+It also has the possibility of extending the
+capabilities of the middleware, by adding your own providers.
+
+%package -n python3-scrapy-fake-useragent
+Summary: Use a random User-Agent provided by fake-useragent for every request
+Provides: python-scrapy-fake-useragent
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-scrapy-fake-useragent
+Random User-Agent middleware for Scrapy scraping framework based on
+`fake-useragent <https://pypi.python.org/pypi/fake-useragent>`__, which picks up ``User-Agent`` strings
+based on `usage statistics <http://www.w3schools.com/browsers/browsers_stats.asp>`__
+from a `real world database <http://useragentstring.com/>`__, but also has the option to configure a generator
+of fake UA strings, as a backup, powered by
+`Faker <https://faker.readthedocs.io/en/stable/providers/faker.providers.user_agent.html>`__.
+It also has the possibility of extending the
+capabilities of the middleware, by adding your own providers.
+
+%package help
+Summary: Development documents and examples for scrapy-fake-useragent
+Provides: python3-scrapy-fake-useragent-doc
+%description help
+Random User-Agent middleware for Scrapy scraping framework based on
+`fake-useragent <https://pypi.python.org/pypi/fake-useragent>`__, which picks up ``User-Agent`` strings
+based on `usage statistics <http://www.w3schools.com/browsers/browsers_stats.asp>`__
+from a `real world database <http://useragentstring.com/>`__, but also has the option to configure a generator
+of fake UA strings, as a backup, powered by
+`Faker <https://faker.readthedocs.io/en/stable/providers/faker.providers.user_agent.html>`__.
+It also has the possibility of extending the
+capabilities of the middleware, by adding your own providers.
+
+%prep
+%autosetup -n scrapy-fake-useragent-1.4.4
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-scrapy-fake-useragent -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Mon Apr 10 2023 Python_Bot <Python_Bot@openeuler.org> - 1.4.4-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..f9503bf
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+d6b65d7a44d60e81f59a126a5d5bda60 scrapy-fake-useragent-1.4.4.macosx-10.9-x86_64.tar.gz