summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-04-11 07:22:56 +0000
committerCoprDistGit <infra@openeuler.org>2023-04-11 07:22:56 +0000
commit2077a7fdab2ddc95b321a2a3c17ec0526bdde4e7 (patch)
tree43034eb89205da53d587341e84222b0ed1d08997
parent7a8ca1bd294d5cc296473fc1e1ba919c5a849312 (diff)
automatic import of python-scrapy-random-useragent
-rw-r--r--.gitignore1
-rw-r--r--python-scrapy-random-useragent.spec84
-rw-r--r--sources1
3 files changed, 86 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..b5fb0d4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/scrapy-random-useragent-0.2.tar.gz
diff --git a/python-scrapy-random-useragent.spec b/python-scrapy-random-useragent.spec
new file mode 100644
index 0000000..01b54c9
--- /dev/null
+++ b/python-scrapy-random-useragent.spec
@@ -0,0 +1,84 @@
+%global _empty_manifest_terminate_build 0
+Name: python-scrapy-random-useragent
+Version: 0.2
+Release: 1
+Summary: Scrapy Middleware to set a random User-Agent for every Request.
+License: MIT
+URL: https://github.com/cnu/scrapy-random-useragent
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/23/2e/3a3ae91faf1d5d31526379186285817bda8ff66a221ec7085a9e549c1465/scrapy-random-useragent-0.2.tar.gz
+BuildArch: noarch
+
+
+%description
+Does your scrapy spider get identified and blocked by servers because
+you use the default user-agent or a generic one?
+Use this ``random_useragent`` module and set a random user-agent for
+every request. You are limited only by the number of different
+user-agents you set in a text file.
+
+%package -n python3-scrapy-random-useragent
+Summary: Scrapy Middleware to set a random User-Agent for every Request.
+Provides: python-scrapy-random-useragent
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-scrapy-random-useragent
+Does your scrapy spider get identified and blocked by servers because
+you use the default user-agent or a generic one?
+Use this ``random_useragent`` module and set a random user-agent for
+every request. You are limited only by the number of different
+user-agents you set in a text file.
+
+%package help
+Summary: Development documents and examples for scrapy-random-useragent
+Provides: python3-scrapy-random-useragent-doc
+%description help
+Does your scrapy spider get identified and blocked by servers because
+you use the default user-agent or a generic one?
+Use this ``random_useragent`` module and set a random user-agent for
+every request. You are limited only by the number of different
+user-agents you set in a text file.
+
+%prep
+%autosetup -n scrapy-random-useragent-0.2
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-scrapy-random-useragent -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Tue Apr 11 2023 Python_Bot <Python_Bot@openeuler.org> - 0.2-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..89f8bb2
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+72f21e64f6edf1973441f19e036f62df scrapy-random-useragent-0.2.tar.gz