summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-04-12 03:39:07 +0000
committerCoprDistGit <infra@openeuler.org>2023-04-12 03:39:07 +0000
commit96f4c0d22d363c75c54a8e1d7e63194a78b9c7ad (patch)
tree428835ce39edcc29e106d7276e12cc21a608d380
parent93fe2ca8a91815e4a65d7fbaaf75f7beddb31e35 (diff)
automatic import of python-dm-pybloom
-rw-r--r--.gitignore1
-rw-r--r--python-dm-pybloom.spec175
-rw-r--r--sources1
3 files changed, 177 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..ecbff54 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/dm_pybloom-3.0.4.tar.gz
diff --git a/python-dm-pybloom.spec b/python-dm-pybloom.spec
new file mode 100644
index 0000000..724fb9b
--- /dev/null
+++ b/python-dm-pybloom.spec
@@ -0,0 +1,175 @@
+%global _empty_manifest_terminate_build 0
+Name: python-dm-pybloom
+Version: 3.0.4
+Release: 1
+Summary: Datamaran's fork of Pybloom adapted to Python3
+License: MIT
+URL: https://github.com/datamaranai/python-bloomfilter/
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/6e/27/2e1d03cc38f0cb89149a47702e97895d19ac4d29919c25526e7f7b26db12/dm_pybloom-3.0.4.tar.gz
+BuildArch: noarch
+
+Requires: python3-bitarray
+
+%description
+``dm_pybloom`` is a module that includes a Bloom Filter data structure along with
+an implmentation of Scalable Bloom Filters as discussed in:
+P. Almeida, C.Baquero, N. Preguiça, D. Hutchison, Scalable Bloom Filters,
+(GLOBECOM 2007), IEEE, 2007.
+Bloom filters are great if you understand what amount of bits you need to set
+aside early to store your entire set. Scalable Bloom Filters allow your bloom
+filter bits to grow as a function of false positive probability and size.
+A filter is "full" when at capacity: M * ((ln 2 ^ 2) / abs(ln p)), where M
+is the number of bits and p is the false positive probability. When capacity
+is reached a new filter is then created exponentially larger than the last
+with a tighter probability of false positives and a larger number of hash
+functions.
+ >>> from dm_pybloom import BloomFilter
+ >>> f = BloomFilter(capacity=1000, error_rate=0.001)
+ >>> [f.add(x) for x in range(10)]
+ [False, False, False, False, False, False, False, False, False, False]
+ >>> all([(x in f) for x in range(10)])
+ True
+ >>> 10 in f
+ False
+ >>> 5 in f
+ True
+ >>> f = BloomFilter(capacity=1000, error_rate=0.001)
+ >>> for i in xrange(0, f.capacity):
+ >>> (1.0 - (len(f) / float(f.capacity))) <= f.error_rate + 2e-18
+ True
+ >>> from dm_pybloom import ScalableBloomFilter
+ >>> sbf = ScalableBloomFilter(mode=ScalableBloomFilter.SMALL_SET_GROWTH)
+ >>> count = 10000
+ >>> for i in xrange(0, count):
+ >>> (1.0 - (len(sbf) / float(count))) <= sbf.error_rate + 2e-18
+ True
+ # len(sbf) may not equal the entire input length. 0.01% error is well
+ # below the default 0.1% error threshold. As the capacity goes up, the
+ # error will approach 0.1%.
+
+%package -n python3-dm-pybloom
+Summary: Datamaran's fork of Pybloom adapted to Python3
+Provides: python-dm-pybloom
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-dm-pybloom
+``dm_pybloom`` is a module that includes a Bloom Filter data structure along with
+an implmentation of Scalable Bloom Filters as discussed in:
+P. Almeida, C.Baquero, N. Preguiça, D. Hutchison, Scalable Bloom Filters,
+(GLOBECOM 2007), IEEE, 2007.
+Bloom filters are great if you understand what amount of bits you need to set
+aside early to store your entire set. Scalable Bloom Filters allow your bloom
+filter bits to grow as a function of false positive probability and size.
+A filter is "full" when at capacity: M * ((ln 2 ^ 2) / abs(ln p)), where M
+is the number of bits and p is the false positive probability. When capacity
+is reached a new filter is then created exponentially larger than the last
+with a tighter probability of false positives and a larger number of hash
+functions.
+ >>> from dm_pybloom import BloomFilter
+ >>> f = BloomFilter(capacity=1000, error_rate=0.001)
+ >>> [f.add(x) for x in range(10)]
+ [False, False, False, False, False, False, False, False, False, False]
+ >>> all([(x in f) for x in range(10)])
+ True
+ >>> 10 in f
+ False
+ >>> 5 in f
+ True
+ >>> f = BloomFilter(capacity=1000, error_rate=0.001)
+ >>> for i in xrange(0, f.capacity):
+ >>> (1.0 - (len(f) / float(f.capacity))) <= f.error_rate + 2e-18
+ True
+ >>> from dm_pybloom import ScalableBloomFilter
+ >>> sbf = ScalableBloomFilter(mode=ScalableBloomFilter.SMALL_SET_GROWTH)
+ >>> count = 10000
+ >>> for i in xrange(0, count):
+ >>> (1.0 - (len(sbf) / float(count))) <= sbf.error_rate + 2e-18
+ True
+ # len(sbf) may not equal the entire input length. 0.01% error is well
+ # below the default 0.1% error threshold. As the capacity goes up, the
+ # error will approach 0.1%.
+
+%package help
+Summary: Development documents and examples for dm-pybloom
+Provides: python3-dm-pybloom-doc
+%description help
+``dm_pybloom`` is a module that includes a Bloom Filter data structure along with
+an implmentation of Scalable Bloom Filters as discussed in:
+P. Almeida, C.Baquero, N. Preguiça, D. Hutchison, Scalable Bloom Filters,
+(GLOBECOM 2007), IEEE, 2007.
+Bloom filters are great if you understand what amount of bits you need to set
+aside early to store your entire set. Scalable Bloom Filters allow your bloom
+filter bits to grow as a function of false positive probability and size.
+A filter is "full" when at capacity: M * ((ln 2 ^ 2) / abs(ln p)), where M
+is the number of bits and p is the false positive probability. When capacity
+is reached a new filter is then created exponentially larger than the last
+with a tighter probability of false positives and a larger number of hash
+functions.
+ >>> from dm_pybloom import BloomFilter
+ >>> f = BloomFilter(capacity=1000, error_rate=0.001)
+ >>> [f.add(x) for x in range(10)]
+ [False, False, False, False, False, False, False, False, False, False]
+ >>> all([(x in f) for x in range(10)])
+ True
+ >>> 10 in f
+ False
+ >>> 5 in f
+ True
+ >>> f = BloomFilter(capacity=1000, error_rate=0.001)
+ >>> for i in xrange(0, f.capacity):
+ >>> (1.0 - (len(f) / float(f.capacity))) <= f.error_rate + 2e-18
+ True
+ >>> from dm_pybloom import ScalableBloomFilter
+ >>> sbf = ScalableBloomFilter(mode=ScalableBloomFilter.SMALL_SET_GROWTH)
+ >>> count = 10000
+ >>> for i in xrange(0, count):
+ >>> (1.0 - (len(sbf) / float(count))) <= sbf.error_rate + 2e-18
+ True
+ # len(sbf) may not equal the entire input length. 0.01% error is well
+ # below the default 0.1% error threshold. As the capacity goes up, the
+ # error will approach 0.1%.
+
+%prep
+%autosetup -n dm-pybloom-3.0.4
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-dm-pybloom -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Wed Apr 12 2023 Python_Bot <Python_Bot@openeuler.org> - 3.0.4-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..559c879
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+7ca03bc999a97ce168b2be08d4781c19 dm_pybloom-3.0.4.tar.gz