From bf66920df689d22dfc5a7ed0bcf2d35ad1131e84 Mon Sep 17 00:00:00 2001 From: CoprDistGit Date: Thu, 9 Mar 2023 07:32:19 +0000 Subject: automatic import of python-fastcache --- .gitignore | 1 + python-fastcache.spec | 279 ++++++++++++++++++++++++++++++++++++++++++++++++++ sources | 1 + 3 files changed, 281 insertions(+) create mode 100644 python-fastcache.spec create mode 100644 sources diff --git a/.gitignore b/.gitignore index e69de29..ae07728 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1 @@ +/fastcache-1.1.0.tar.gz diff --git a/python-fastcache.spec b/python-fastcache.spec new file mode 100644 index 0000000..9182ddc --- /dev/null +++ b/python-fastcache.spec @@ -0,0 +1,279 @@ +%global _empty_manifest_terminate_build 0 +Name: python-fastcache +Version: 1.1.0 +Release: 1 +Summary: C implementation of Python 3 functools.lru_cache +License: MIT +URL: https://github.com/pbrady/fastcache +Source0: https://mirrors.nju.edu.cn/pypi/web/packages/5f/a3/b280cba4b4abfe5f5bdc643e6c9d81bf3b9dc2148a11e5df06b6ba85a560/fastcache-1.1.0.tar.gz +BuildArch: noarch + + +%description +C implementation of Python 3 functools.lru_cache. Provides speedup of 10-30x +over standard library. Passes test suite from standard library for lru_cache. + +Provides 2 Least Recently Used caching function decorators: + + clru_cache - built-in (faster) + >>> from fastcache import clru_cache, __version__ + >>> __version__ + '1.1.0' + >>> @clru_cache(maxsize=325, typed=False) + ... def fib(n): + ... """Terrible Fibonacci number generator.""" + ... return n if n < 2 else fib(n-1) + fib(n-2) + ... + >>> fib(300) + 222232244629420445529739893461909967206666939096499764990979600 + >>> fib.cache_info() + CacheInfo(hits=298, misses=301, maxsize=325, currsize=301) + >>> print(fib.__doc__) + Terrible Fibonacci number generator. + >>> fib.cache_clear() + >>> fib.cache_info() + CacheInfo(hits=0, misses=0, maxsize=325, currsize=0) + >>> fib.__wrapped__(300) + 222232244629420445529739893461909967206666939096499764990979600 + >>> type(fib) + >>> + + lru_cache - python wrapper around clru_cache + >>> from fastcache import lru_cache + >>> @lru_cache(maxsize=128, typed=False) + ... def f(a, b): + ... pass + ... + >>> type(f) + >>> + + + (c)lru_cache(maxsize=128, typed=False, state=None, unhashable='error') + + Least-recently-used cache decorator. + + If *maxsize* is set to None, the LRU features are disabled and the cache + can grow without bound. + + If *typed* is True, arguments of different types will be cached separately. + For example, f(3.0) and f(3) will be treated as distinct calls with + distinct results. + + If *state* is a list or dict, the items will be incorporated into the + argument hash. + + The result of calling the cached function with unhashable (mutable) + arguments depends on the value of *unhashable*: + + If *unhashable* is 'error', a TypeError will be raised. + + If *unhashable* is 'warning', a UserWarning will be raised, and + the wrapped function will be called with the supplied arguments. + A miss will be recorded in the cache statistics. + + If *unhashable* is 'ignore', the wrapped function will be called + with the supplied arguments. A miss will will be recorded in + the cache statistics. + + View the cache statistics named tuple (hits, misses, maxsize, currsize) + with f.cache_info(). Clear the cache and statistics with f.cache_clear(). + Access the underlying function with f.__wrapped__. + + See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used + +%package -n python3-fastcache +Summary: C implementation of Python 3 functools.lru_cache +Provides: python-fastcache +BuildRequires: python3-devel +BuildRequires: python3-setuptools +BuildRequires: python3-pip +%description -n python3-fastcache +C implementation of Python 3 functools.lru_cache. Provides speedup of 10-30x +over standard library. Passes test suite from standard library for lru_cache. + +Provides 2 Least Recently Used caching function decorators: + + clru_cache - built-in (faster) + >>> from fastcache import clru_cache, __version__ + >>> __version__ + '1.1.0' + >>> @clru_cache(maxsize=325, typed=False) + ... def fib(n): + ... """Terrible Fibonacci number generator.""" + ... return n if n < 2 else fib(n-1) + fib(n-2) + ... + >>> fib(300) + 222232244629420445529739893461909967206666939096499764990979600 + >>> fib.cache_info() + CacheInfo(hits=298, misses=301, maxsize=325, currsize=301) + >>> print(fib.__doc__) + Terrible Fibonacci number generator. + >>> fib.cache_clear() + >>> fib.cache_info() + CacheInfo(hits=0, misses=0, maxsize=325, currsize=0) + >>> fib.__wrapped__(300) + 222232244629420445529739893461909967206666939096499764990979600 + >>> type(fib) + >>> + + lru_cache - python wrapper around clru_cache + >>> from fastcache import lru_cache + >>> @lru_cache(maxsize=128, typed=False) + ... def f(a, b): + ... pass + ... + >>> type(f) + >>> + + + (c)lru_cache(maxsize=128, typed=False, state=None, unhashable='error') + + Least-recently-used cache decorator. + + If *maxsize* is set to None, the LRU features are disabled and the cache + can grow without bound. + + If *typed* is True, arguments of different types will be cached separately. + For example, f(3.0) and f(3) will be treated as distinct calls with + distinct results. + + If *state* is a list or dict, the items will be incorporated into the + argument hash. + + The result of calling the cached function with unhashable (mutable) + arguments depends on the value of *unhashable*: + + If *unhashable* is 'error', a TypeError will be raised. + + If *unhashable* is 'warning', a UserWarning will be raised, and + the wrapped function will be called with the supplied arguments. + A miss will be recorded in the cache statistics. + + If *unhashable* is 'ignore', the wrapped function will be called + with the supplied arguments. A miss will will be recorded in + the cache statistics. + + View the cache statistics named tuple (hits, misses, maxsize, currsize) + with f.cache_info(). Clear the cache and statistics with f.cache_clear(). + Access the underlying function with f.__wrapped__. + + See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used + +%package help +Summary: Development documents and examples for fastcache +Provides: python3-fastcache-doc +%description help +C implementation of Python 3 functools.lru_cache. Provides speedup of 10-30x +over standard library. Passes test suite from standard library for lru_cache. + +Provides 2 Least Recently Used caching function decorators: + + clru_cache - built-in (faster) + >>> from fastcache import clru_cache, __version__ + >>> __version__ + '1.1.0' + >>> @clru_cache(maxsize=325, typed=False) + ... def fib(n): + ... """Terrible Fibonacci number generator.""" + ... return n if n < 2 else fib(n-1) + fib(n-2) + ... + >>> fib(300) + 222232244629420445529739893461909967206666939096499764990979600 + >>> fib.cache_info() + CacheInfo(hits=298, misses=301, maxsize=325, currsize=301) + >>> print(fib.__doc__) + Terrible Fibonacci number generator. + >>> fib.cache_clear() + >>> fib.cache_info() + CacheInfo(hits=0, misses=0, maxsize=325, currsize=0) + >>> fib.__wrapped__(300) + 222232244629420445529739893461909967206666939096499764990979600 + >>> type(fib) + >>> + + lru_cache - python wrapper around clru_cache + >>> from fastcache import lru_cache + >>> @lru_cache(maxsize=128, typed=False) + ... def f(a, b): + ... pass + ... + >>> type(f) + >>> + + + (c)lru_cache(maxsize=128, typed=False, state=None, unhashable='error') + + Least-recently-used cache decorator. + + If *maxsize* is set to None, the LRU features are disabled and the cache + can grow without bound. + + If *typed* is True, arguments of different types will be cached separately. + For example, f(3.0) and f(3) will be treated as distinct calls with + distinct results. + + If *state* is a list or dict, the items will be incorporated into the + argument hash. + + The result of calling the cached function with unhashable (mutable) + arguments depends on the value of *unhashable*: + + If *unhashable* is 'error', a TypeError will be raised. + + If *unhashable* is 'warning', a UserWarning will be raised, and + the wrapped function will be called with the supplied arguments. + A miss will be recorded in the cache statistics. + + If *unhashable* is 'ignore', the wrapped function will be called + with the supplied arguments. A miss will will be recorded in + the cache statistics. + + View the cache statistics named tuple (hits, misses, maxsize, currsize) + with f.cache_info(). Clear the cache and statistics with f.cache_clear(). + Access the underlying function with f.__wrapped__. + + See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used + +%prep +%autosetup -n fastcache-1.1.0 + +%build +%py3_build + +%install +%py3_install +install -d -m755 %{buildroot}/%{_pkgdocdir} +if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi +if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi +if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi +if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi +pushd %{buildroot} +if [ -d usr/lib ]; then + find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/lib64 ]; then + find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/bin ]; then + find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst +fi +if [ -d usr/sbin ]; then + find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst +fi +touch doclist.lst +if [ -d usr/share/man ]; then + find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst +fi +popd +mv %{buildroot}/filelist.lst . +mv %{buildroot}/doclist.lst . + +%files -n python3-fastcache -f filelist.lst +%dir %{python3_sitelib}/* + +%files help -f doclist.lst +%{_docdir}/* + +%changelog +* Thu Mar 09 2023 Python_Bot - 1.1.0-1 +- Package Spec generated diff --git a/sources b/sources new file mode 100644 index 0000000..c236f92 --- /dev/null +++ b/sources @@ -0,0 +1 @@ +fff901f2f906d7a32098949fa26204e6 fastcache-1.1.0.tar.gz -- cgit v1.2.3