summaryrefslogtreecommitdiff
path: root/python-keras-pos-embd.spec
diff options
context:
space:
mode:
Diffstat (limited to 'python-keras-pos-embd.spec')
-rw-r--r--python-keras-pos-embd.spec351
1 files changed, 351 insertions, 0 deletions
diff --git a/python-keras-pos-embd.spec b/python-keras-pos-embd.spec
new file mode 100644
index 0000000..eb5af27
--- /dev/null
+++ b/python-keras-pos-embd.spec
@@ -0,0 +1,351 @@
+%global _empty_manifest_terminate_build 0
+Name: python-keras-pos-embd
+Version: 0.13.0
+Release: 1
+Summary: Position embedding layers in Keras
+License: MIT
+URL: https://github.com/CyberZHG/keras-pos-embd
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/a3/f0/8803f9ac4cddd9d2640347bde2f451d7d33bbc99d761f2bc00fb15911bf6/keras-pos-embd-0.13.0.tar.gz
+BuildArch: noarch
+
+
+%description
+# Keras Position Embedding
+
+[![Version](https://img.shields.io/pypi/v/keras-pos-embd.svg)](https://pypi.org/project/keras-pos-embd/)
+
+\[[中文](https://github.com/CyberZHG/keras-pos-embd/blob/master/README.zh-CN.md)|[English](https://github.com/CyberZHG/keras-pos-embd/blob/master/README.md)\]
+
+Position embedding layers in Keras.
+
+## Install
+
+```bash
+pip install keras-pos-embd
+```
+
+## Usage
+
+### Trainable Embedding
+
+```python
+from tensorflow import keras
+from keras_pos_embd import PositionEmbedding
+
+model = keras.models.Sequential()
+model.add(PositionEmbedding(
+ input_shape=(None,),
+ input_dim=10, # The maximum absolute value of positions.
+ output_dim=2, # The dimension of embeddings.
+ mask_zero=10000, # The index that presents padding (because `0` will be used in relative positioning).
+ mode=PositionEmbedding.MODE_EXPAND,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+Note that you don't need to enable `mask_zero` if you want to add/concatenate other layers like word embeddings with masks:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import PositionEmbedding
+
+model = keras.models.Sequential()
+model.add(keras.layers.Embedding(
+ input_shape=(None,),
+ input_dim=10,
+ output_dim=5,
+ mask_zero=True,
+))
+model.add(PositionEmbedding(
+ input_dim=100,
+ output_dim=5,
+ mode=PositionEmbedding.MODE_ADD,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+### Sin & Cos Embedding
+
+The [sine and cosine embedding](https://arxiv.org/pdf/1706.03762) has no trainable weights. The layer has three modes, it works just like `PositionEmbedding` in `expand` mode:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import TrigPosEmbedding
+
+model = keras.models.Sequential()
+model.add(TrigPosEmbedding(
+ input_shape=(None,),
+ output_dim=30, # The dimension of embeddings.
+ mode=TrigPosEmbedding.MODE_EXPAND, # Use `expand` mode
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+If you want to add this embedding to existed embedding, then there is no need to add a position input in `add` mode:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import TrigPosEmbedding
+
+model = keras.models.Sequential()
+model.add(keras.layers.Embedding(
+ input_shape=(None,),
+ input_dim=10,
+ output_dim=5,
+ mask_zero=True,
+))
+model.add(TrigPosEmbedding(
+ output_dim=5,
+ mode=TrigPosEmbedding.MODE_ADD,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+%package -n python3-keras-pos-embd
+Summary: Position embedding layers in Keras
+Provides: python-keras-pos-embd
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-keras-pos-embd
+# Keras Position Embedding
+
+[![Version](https://img.shields.io/pypi/v/keras-pos-embd.svg)](https://pypi.org/project/keras-pos-embd/)
+
+\[[中文](https://github.com/CyberZHG/keras-pos-embd/blob/master/README.zh-CN.md)|[English](https://github.com/CyberZHG/keras-pos-embd/blob/master/README.md)\]
+
+Position embedding layers in Keras.
+
+## Install
+
+```bash
+pip install keras-pos-embd
+```
+
+## Usage
+
+### Trainable Embedding
+
+```python
+from tensorflow import keras
+from keras_pos_embd import PositionEmbedding
+
+model = keras.models.Sequential()
+model.add(PositionEmbedding(
+ input_shape=(None,),
+ input_dim=10, # The maximum absolute value of positions.
+ output_dim=2, # The dimension of embeddings.
+ mask_zero=10000, # The index that presents padding (because `0` will be used in relative positioning).
+ mode=PositionEmbedding.MODE_EXPAND,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+Note that you don't need to enable `mask_zero` if you want to add/concatenate other layers like word embeddings with masks:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import PositionEmbedding
+
+model = keras.models.Sequential()
+model.add(keras.layers.Embedding(
+ input_shape=(None,),
+ input_dim=10,
+ output_dim=5,
+ mask_zero=True,
+))
+model.add(PositionEmbedding(
+ input_dim=100,
+ output_dim=5,
+ mode=PositionEmbedding.MODE_ADD,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+### Sin & Cos Embedding
+
+The [sine and cosine embedding](https://arxiv.org/pdf/1706.03762) has no trainable weights. The layer has three modes, it works just like `PositionEmbedding` in `expand` mode:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import TrigPosEmbedding
+
+model = keras.models.Sequential()
+model.add(TrigPosEmbedding(
+ input_shape=(None,),
+ output_dim=30, # The dimension of embeddings.
+ mode=TrigPosEmbedding.MODE_EXPAND, # Use `expand` mode
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+If you want to add this embedding to existed embedding, then there is no need to add a position input in `add` mode:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import TrigPosEmbedding
+
+model = keras.models.Sequential()
+model.add(keras.layers.Embedding(
+ input_shape=(None,),
+ input_dim=10,
+ output_dim=5,
+ mask_zero=True,
+))
+model.add(TrigPosEmbedding(
+ output_dim=5,
+ mode=TrigPosEmbedding.MODE_ADD,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+%package help
+Summary: Development documents and examples for keras-pos-embd
+Provides: python3-keras-pos-embd-doc
+%description help
+# Keras Position Embedding
+
+[![Version](https://img.shields.io/pypi/v/keras-pos-embd.svg)](https://pypi.org/project/keras-pos-embd/)
+
+\[[中文](https://github.com/CyberZHG/keras-pos-embd/blob/master/README.zh-CN.md)|[English](https://github.com/CyberZHG/keras-pos-embd/blob/master/README.md)\]
+
+Position embedding layers in Keras.
+
+## Install
+
+```bash
+pip install keras-pos-embd
+```
+
+## Usage
+
+### Trainable Embedding
+
+```python
+from tensorflow import keras
+from keras_pos_embd import PositionEmbedding
+
+model = keras.models.Sequential()
+model.add(PositionEmbedding(
+ input_shape=(None,),
+ input_dim=10, # The maximum absolute value of positions.
+ output_dim=2, # The dimension of embeddings.
+ mask_zero=10000, # The index that presents padding (because `0` will be used in relative positioning).
+ mode=PositionEmbedding.MODE_EXPAND,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+Note that you don't need to enable `mask_zero` if you want to add/concatenate other layers like word embeddings with masks:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import PositionEmbedding
+
+model = keras.models.Sequential()
+model.add(keras.layers.Embedding(
+ input_shape=(None,),
+ input_dim=10,
+ output_dim=5,
+ mask_zero=True,
+))
+model.add(PositionEmbedding(
+ input_dim=100,
+ output_dim=5,
+ mode=PositionEmbedding.MODE_ADD,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+### Sin & Cos Embedding
+
+The [sine and cosine embedding](https://arxiv.org/pdf/1706.03762) has no trainable weights. The layer has three modes, it works just like `PositionEmbedding` in `expand` mode:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import TrigPosEmbedding
+
+model = keras.models.Sequential()
+model.add(TrigPosEmbedding(
+ input_shape=(None,),
+ output_dim=30, # The dimension of embeddings.
+ mode=TrigPosEmbedding.MODE_EXPAND, # Use `expand` mode
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+If you want to add this embedding to existed embedding, then there is no need to add a position input in `add` mode:
+
+```python
+from tensorflow import keras
+from keras_pos_embd import TrigPosEmbedding
+
+model = keras.models.Sequential()
+model.add(keras.layers.Embedding(
+ input_shape=(None,),
+ input_dim=10,
+ output_dim=5,
+ mask_zero=True,
+))
+model.add(TrigPosEmbedding(
+ output_dim=5,
+ mode=TrigPosEmbedding.MODE_ADD,
+))
+model.compile('adam', 'mse')
+model.summary()
+```
+
+%prep
+%autosetup -n keras-pos-embd-0.13.0
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-keras-pos-embd -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Tue Apr 11 2023 Python_Bot <Python_Bot@openeuler.org> - 0.13.0-1
+- Package Spec generated