%global _empty_manifest_terminate_build 0
Name: python-adapter-transformers
Version: 3.2.1
Release: 1
Summary: A friendly fork of HuggingFace's Transformers, adding Adapters to PyTorch language models
License: Apache
URL: https://github.com/adapter-hub/adapter-transformers
Source0: https://mirrors.nju.edu.cn/pypi/web/packages/6a/92/ef97a3c8f7433d272e7df3a312d32935aaaab425a68158007276dda04042/adapter-transformers-3.2.1.tar.gz
BuildArch: noarch
Requires: python3-filelock
Requires: python3-huggingface-hub
Requires: python3-numpy
Requires: python3-packaging
Requires: python3-pyyaml
Requires: python3-regex
Requires: python3-requests
Requires: python3-tokenizers
Requires: python3-tqdm
Requires: python3-importlib-metadata
Requires: python3-accelerate
Requires: python3-tensorflow
Requires: python3-onnxconverter-common
Requires: python3-tf2onnx
Requires: python3-tensorflow-text
Requires: python3-keras-nlp
Requires: python3-torch
Requires: python3-jax
Requires: python3-jaxlib
Requires: python3-flax
Requires: python3-optax
Requires: python3-sentencepiece
Requires: python3-protobuf
Requires: python3-tokenizers
Requires: python3-torchaudio
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-Pillow
Requires: python3-optuna
Requires: python3-ray[tune]
Requires: python3-sigopt
Requires: python3-timm
Requires: python3-codecarbon
Requires: python3-accelerate
Requires: python3-decord
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-codecarbon
Requires: python3-deepspeed
Requires: python3-accelerate
Requires: python3-deepspeed
Requires: python3-accelerate
Requires: python3-pytest
Requires: python3-pytest-xdist
Requires: python3-timeout-decorator
Requires: python3-parameterized
Requires: python3-psutil
Requires: python3-datasets
Requires: python3-dill
Requires: python3-evaluate
Requires: python3-pytest-timeout
Requires: python3-black
Requires: python3-sacrebleu
Requires: python3-rouge-score
Requires: python3-nltk
Requires: python3-GitPython
Requires: python3-hf-doc-builder
Requires: python3-protobuf
Requires: python3-sacremoses
Requires: python3-rjieba
Requires: python3-safetensors
Requires: python3-beautifulsoup4
Requires: python3-faiss-cpu
Requires: python3-cookiecutter
Requires: python3-optuna
Requires: python3-sentencepiece
Requires: python3-tensorflow
Requires: python3-onnxconverter-common
Requires: python3-tf2onnx
Requires: python3-tensorflow-text
Requires: python3-keras-nlp
Requires: python3-torch
Requires: python3-jax
Requires: python3-jaxlib
Requires: python3-flax
Requires: python3-optax
Requires: python3-sentencepiece
Requires: python3-protobuf
Requires: python3-tokenizers
Requires: python3-torchaudio
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-Pillow
Requires: python3-optuna
Requires: python3-ray[tune]
Requires: python3-sigopt
Requires: python3-timm
Requires: python3-codecarbon
Requires: python3-accelerate
Requires: python3-decord
Requires: python3-pytest
Requires: python3-pytest-xdist
Requires: python3-timeout-decorator
Requires: python3-parameterized
Requires: python3-psutil
Requires: python3-datasets
Requires: python3-dill
Requires: python3-evaluate
Requires: python3-pytest-timeout
Requires: python3-black
Requires: python3-sacrebleu
Requires: python3-rouge-score
Requires: python3-nltk
Requires: python3-GitPython
Requires: python3-hf-doc-builder
Requires: python3-sacremoses
Requires: python3-rjieba
Requires: python3-safetensors
Requires: python3-beautifulsoup4
Requires: python3-faiss-cpu
Requires: python3-cookiecutter
Requires: python3-isort
Requires: python3-flake8
Requires: python3-fugashi
Requires: python3-ipadic
Requires: python3-unidic-lite
Requires: python3-unidic
Requires: python3-sudachipy
Requires: python3-sudachidict-core
Requires: python3-rhoknp
Requires: python3-docutils
Requires: python3-myst-parser
Requires: python3-sphinx
Requires: python3-sphinx-markdown-tables
Requires: python3-sphinx-rtd-theme
Requires: python3-sphinx-copybutton
Requires: python3-sphinxext-opengraph
Requires: python3-sphinx-intl
Requires: python3-sphinx-multiversion
Requires: python3-scikit-learn
Requires: python3-pytest
Requires: python3-pytest-xdist
Requires: python3-timeout-decorator
Requires: python3-parameterized
Requires: python3-psutil
Requires: python3-datasets
Requires: python3-dill
Requires: python3-evaluate
Requires: python3-pytest-timeout
Requires: python3-black
Requires: python3-sacrebleu
Requires: python3-rouge-score
Requires: python3-nltk
Requires: python3-GitPython
Requires: python3-hf-doc-builder
Requires: python3-protobuf
Requires: python3-sacremoses
Requires: python3-rjieba
Requires: python3-safetensors
Requires: python3-beautifulsoup4
Requires: python3-faiss-cpu
Requires: python3-cookiecutter
Requires: python3-tensorflow
Requires: python3-onnxconverter-common
Requires: python3-tf2onnx
Requires: python3-tensorflow-text
Requires: python3-keras-nlp
Requires: python3-sentencepiece
Requires: python3-tokenizers
Requires: python3-Pillow
Requires: python3-isort
Requires: python3-flake8
Requires: python3-docutils
Requires: python3-myst-parser
Requires: python3-sphinx
Requires: python3-sphinx-markdown-tables
Requires: python3-sphinx-rtd-theme
Requires: python3-sphinx-copybutton
Requires: python3-sphinxext-opengraph
Requires: python3-sphinx-intl
Requires: python3-sphinx-multiversion
Requires: python3-scikit-learn
Requires: python3-onnxruntime
Requires: python3-onnxruntime-tools
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-pytest
Requires: python3-pytest-xdist
Requires: python3-timeout-decorator
Requires: python3-parameterized
Requires: python3-psutil
Requires: python3-datasets
Requires: python3-dill
Requires: python3-evaluate
Requires: python3-pytest-timeout
Requires: python3-black
Requires: python3-sacrebleu
Requires: python3-rouge-score
Requires: python3-nltk
Requires: python3-GitPython
Requires: python3-hf-doc-builder
Requires: python3-protobuf
Requires: python3-sacremoses
Requires: python3-rjieba
Requires: python3-safetensors
Requires: python3-beautifulsoup4
Requires: python3-faiss-cpu
Requires: python3-cookiecutter
Requires: python3-torch
Requires: python3-sentencepiece
Requires: python3-tokenizers
Requires: python3-torchaudio
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-Pillow
Requires: python3-optuna
Requires: python3-ray[tune]
Requires: python3-sigopt
Requires: python3-timm
Requires: python3-codecarbon
Requires: python3-isort
Requires: python3-flake8
Requires: python3-fugashi
Requires: python3-ipadic
Requires: python3-unidic-lite
Requires: python3-unidic
Requires: python3-sudachipy
Requires: python3-sudachidict-core
Requires: python3-rhoknp
Requires: python3-docutils
Requires: python3-myst-parser
Requires: python3-sphinx
Requires: python3-sphinx-markdown-tables
Requires: python3-sphinx-rtd-theme
Requires: python3-sphinx-copybutton
Requires: python3-sphinxext-opengraph
Requires: python3-sphinx-intl
Requires: python3-sphinx-multiversion
Requires: python3-scikit-learn
Requires: python3-onnxruntime
Requires: python3-onnxruntime-tools
Requires: python3-tensorflow
Requires: python3-onnxconverter-common
Requires: python3-tf2onnx
Requires: python3-tensorflow-text
Requires: python3-keras-nlp
Requires: python3-torch
Requires: python3-jax
Requires: python3-jaxlib
Requires: python3-flax
Requires: python3-optax
Requires: python3-sentencepiece
Requires: python3-protobuf
Requires: python3-tokenizers
Requires: python3-torchaudio
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-Pillow
Requires: python3-optuna
Requires: python3-ray[tune]
Requires: python3-sigopt
Requires: python3-timm
Requires: python3-codecarbon
Requires: python3-accelerate
Requires: python3-decord
Requires: python3-docutils
Requires: python3-myst-parser
Requires: python3-sphinx
Requires: python3-sphinx-markdown-tables
Requires: python3-sphinx-rtd-theme
Requires: python3-sphinx-copybutton
Requires: python3-sphinxext-opengraph
Requires: python3-sphinx-intl
Requires: python3-sphinx-multiversion
Requires: python3-docutils
Requires: python3-myst-parser
Requires: python3-sphinx
Requires: python3-sphinx-markdown-tables
Requires: python3-sphinx-rtd-theme
Requires: python3-sphinx-copybutton
Requires: python3-sphinxext-opengraph
Requires: python3-sphinx-intl
Requires: python3-sphinx-multiversion
Requires: python3-fairscale
Requires: python3-jax
Requires: python3-jaxlib
Requires: python3-flax
Requires: python3-optax
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-ftfy
Requires: python3-optuna
Requires: python3-ray[tune]
Requires: python3-sigopt
Requires: python3-fugashi
Requires: python3-ipadic
Requires: python3-unidic-lite
Requires: python3-unidic
Requires: python3-sudachipy
Requires: python3-sudachidict-core
Requires: python3-rhoknp
Requires: python3-cookiecutter
Requires: python3-natten
Requires: python3-onnxconverter-common
Requires: python3-tf2onnx
Requires: python3-onnxruntime
Requires: python3-onnxruntime-tools
Requires: python3-onnxruntime
Requires: python3-onnxruntime-tools
Requires: python3-optuna
Requires: python3-black
Requires: python3-datasets
Requires: python3-isort
Requires: python3-flake8
Requires: python3-GitPython
Requires: python3-hf-doc-builder
Requires: python3-ray[tune]
Requires: python3-faiss-cpu
Requires: python3-datasets
Requires: python3-sagemaker
Requires: python3-sentencepiece
Requires: python3-protobuf
Requires: python3-pydantic
Requires: python3-uvicorn
Requires: python3-fastapi
Requires: python3-starlette
Requires: python3-sigopt
Requires: python3-scikit-learn
Requires: python3-torchaudio
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-pytest
Requires: python3-pytest-xdist
Requires: python3-timeout-decorator
Requires: python3-parameterized
Requires: python3-psutil
Requires: python3-datasets
Requires: python3-dill
Requires: python3-evaluate
Requires: python3-pytest-timeout
Requires: python3-black
Requires: python3-sacrebleu
Requires: python3-rouge-score
Requires: python3-nltk
Requires: python3-GitPython
Requires: python3-hf-doc-builder
Requires: python3-protobuf
Requires: python3-sacremoses
Requires: python3-rjieba
Requires: python3-safetensors
Requires: python3-beautifulsoup4
Requires: python3-faiss-cpu
Requires: python3-cookiecutter
Requires: python3-tensorflow
Requires: python3-onnxconverter-common
Requires: python3-tf2onnx
Requires: python3-tensorflow-text
Requires: python3-keras-nlp
Requires: python3-tensorflow-cpu
Requires: python3-onnxconverter-common
Requires: python3-tf2onnx
Requires: python3-tensorflow-text
Requires: python3-keras-nlp
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-timm
Requires: python3-tokenizers
Requires: python3-torch
Requires: python3-torchaudio
Requires: python3-librosa
Requires: python3-pyctcdecode
Requires: python3-phonemizer
Requires: python3-kenlm
Requires: python3-filelock
Requires: python3-huggingface-hub
Requires: python3-importlib-metadata
Requires: python3-numpy
Requires: python3-packaging
Requires: python3-protobuf
Requires: python3-regex
Requires: python3-requests
Requires: python3-sentencepiece
Requires: python3-torch
Requires: python3-tokenizers
Requires: python3-tqdm
Requires: python3-decord
Requires: python3-Pillow
%description
adapter-transformers
A friendly fork of HuggingFace's Transformers, adding Adapters to PyTorch language models

[](https://github.com/adapter-hub/adapter-transformers/blob/master/LICENSE)
[](https://pypi.org/project/adapter-transformers/)
`adapter-transformers` is an extension of [HuggingFace's Transformers](https://github.com/huggingface/transformers) library, integrating adapters into state-of-the-art language models by incorporating **[AdapterHub](https://adapterhub.ml)**, a central repository for pre-trained adapter modules.
_💡 Important: This library can be used as a drop-in replacement for HuggingFace Transformers and regularly synchronizes new upstream changes.
Thus, most files in this repository are direct copies from the HuggingFace Transformers source, modified only with changes required for the adapter implementations._
## Installation
`adapter-transformers` currently supports **Python 3.8+** and **PyTorch 1.12.1+**.
After [installing PyTorch](https://pytorch.org/get-started/locally/), you can install `adapter-transformers` from PyPI ...
```
pip install -U adapter-transformers
```
... or from source by cloning the repository:
```
git clone https://github.com/adapter-hub/adapter-transformers.git
cd adapter-transformers
pip install .
```
## Getting Started
HuggingFace's great documentation on getting started with _Transformers_ can be found [here](https://huggingface.co/transformers/index.html). `adapter-transformers` is fully compatible with _Transformers_.
To get started with adapters, refer to these locations:
- **[Colab notebook tutorials](https://github.com/Adapter-Hub/adapter-transformers/tree/master/notebooks)**, a series notebooks providing an introduction to all the main concepts of (adapter-)transformers and AdapterHub
- **https://docs.adapterhub.ml**, our documentation on training and using adapters with _adapter-transformers_
- **https://adapterhub.ml** to explore available pre-trained adapter modules and share your own adapters
- **[Examples folder](https://github.com/Adapter-Hub/adapter-transformers/tree/master/examples/pytorch)** of this repository containing HuggingFace's example training scripts, many adapted for training adapters
## Implemented Methods
Currently, adapter-transformers integrates all architectures and methods listed below:
| Method | Paper(s) | Quick Links |
| --- | --- | --- |
| Bottleneck adapters | [Houlsby et al. (2019)](https://arxiv.org/pdf/1902.00751.pdf)
[Bapna and Firat (2019)](https://arxiv.org/pdf/1909.08478.pdf) | [Quickstart](https://docs.adapterhub.ml/quickstart.html), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/01_Adapter_Training.ipynb) |
| AdapterFusion | [Pfeiffer et al. (2021)](https://aclanthology.org/2021.eacl-main.39.pdf) | [Docs: Training](https://docs.adapterhub.ml/training.html#train-adapterfusion), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/03_Adapter_Fusion.ipynb) |
| MAD-X,
Invertible adapters | [Pfeiffer et al. (2020)](https://aclanthology.org/2020.emnlp-main.617/) | [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/04_Cross_Lingual_Transfer.ipynb) |
| AdapterDrop | [Rücklé et al. (2021)](https://arxiv.org/pdf/2010.11918.pdf) | [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/05_Adapter_Drop_Training.ipynb) |
| MAD-X 2.0,
Embedding training | [Pfeiffer et al. (2021)](https://arxiv.org/pdf/2012.15562.pdf) | [Docs: Embeddings](https://docs.adapterhub.ml/embeddings.html), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/08_NER_Wikiann.ipynb) |
| Prefix Tuning | [Li and Liang (2021)](https://arxiv.org/pdf/2101.00190.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#prefix-tuning) |
| Parallel adapters,
Mix-and-Match adapters | [He et al. (2021)](https://arxiv.org/pdf/2110.04366.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#mix-and-match-adapters) |
| Compacter | [Mahabadi et al. (2021)](https://arxiv.org/pdf/2106.04647.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#compacter) |
| LoRA | [Hu et al. (2021)](https://arxiv.org/pdf/2106.09685.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#lora) |
| (IA)^3 | [Liu et al. (2022)](https://arxiv.org/pdf/2205.05638.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#ia-3) |
| UniPELT | [Mao et al. (2022)](https://arxiv.org/pdf/2110.07577.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#unipelt) |
## Supported Models
We currently support the PyTorch versions of all models listed on the **[Model Overview](https://docs.adapterhub.ml/model_overview.html) page** in our documentation.
## Citation
If you use this library for your work, please consider citing our paper [AdapterHub: A Framework for Adapting Transformers](https://arxiv.org/abs/2007.07779):
```
@inproceedings{pfeiffer2020AdapterHub,
title={AdapterHub: A Framework for Adapting Transformers},
author={Pfeiffer, Jonas and
R{\"u}ckl{\'e}, Andreas and
Poth, Clifton and
Kamath, Aishwarya and
Vuli{\'c}, Ivan and
Ruder, Sebastian and
Cho, Kyunghyun and
Gurevych, Iryna},
booktitle={Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations},
pages={46--54},
year={2020}
}
```
%package -n python3-adapter-transformers
Summary: A friendly fork of HuggingFace's Transformers, adding Adapters to PyTorch language models
Provides: python-adapter-transformers
BuildRequires: python3-devel
BuildRequires: python3-setuptools
BuildRequires: python3-pip
%description -n python3-adapter-transformers
adapter-transformers
A friendly fork of HuggingFace's Transformers, adding Adapters to PyTorch language models

[](https://github.com/adapter-hub/adapter-transformers/blob/master/LICENSE)
[](https://pypi.org/project/adapter-transformers/)
`adapter-transformers` is an extension of [HuggingFace's Transformers](https://github.com/huggingface/transformers) library, integrating adapters into state-of-the-art language models by incorporating **[AdapterHub](https://adapterhub.ml)**, a central repository for pre-trained adapter modules.
_💡 Important: This library can be used as a drop-in replacement for HuggingFace Transformers and regularly synchronizes new upstream changes.
Thus, most files in this repository are direct copies from the HuggingFace Transformers source, modified only with changes required for the adapter implementations._
## Installation
`adapter-transformers` currently supports **Python 3.8+** and **PyTorch 1.12.1+**.
After [installing PyTorch](https://pytorch.org/get-started/locally/), you can install `adapter-transformers` from PyPI ...
```
pip install -U adapter-transformers
```
... or from source by cloning the repository:
```
git clone https://github.com/adapter-hub/adapter-transformers.git
cd adapter-transformers
pip install .
```
## Getting Started
HuggingFace's great documentation on getting started with _Transformers_ can be found [here](https://huggingface.co/transformers/index.html). `adapter-transformers` is fully compatible with _Transformers_.
To get started with adapters, refer to these locations:
- **[Colab notebook tutorials](https://github.com/Adapter-Hub/adapter-transformers/tree/master/notebooks)**, a series notebooks providing an introduction to all the main concepts of (adapter-)transformers and AdapterHub
- **https://docs.adapterhub.ml**, our documentation on training and using adapters with _adapter-transformers_
- **https://adapterhub.ml** to explore available pre-trained adapter modules and share your own adapters
- **[Examples folder](https://github.com/Adapter-Hub/adapter-transformers/tree/master/examples/pytorch)** of this repository containing HuggingFace's example training scripts, many adapted for training adapters
## Implemented Methods
Currently, adapter-transformers integrates all architectures and methods listed below:
| Method | Paper(s) | Quick Links |
| --- | --- | --- |
| Bottleneck adapters | [Houlsby et al. (2019)](https://arxiv.org/pdf/1902.00751.pdf)
[Bapna and Firat (2019)](https://arxiv.org/pdf/1909.08478.pdf) | [Quickstart](https://docs.adapterhub.ml/quickstart.html), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/01_Adapter_Training.ipynb) |
| AdapterFusion | [Pfeiffer et al. (2021)](https://aclanthology.org/2021.eacl-main.39.pdf) | [Docs: Training](https://docs.adapterhub.ml/training.html#train-adapterfusion), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/03_Adapter_Fusion.ipynb) |
| MAD-X,
Invertible adapters | [Pfeiffer et al. (2020)](https://aclanthology.org/2020.emnlp-main.617/) | [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/04_Cross_Lingual_Transfer.ipynb) |
| AdapterDrop | [Rücklé et al. (2021)](https://arxiv.org/pdf/2010.11918.pdf) | [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/05_Adapter_Drop_Training.ipynb) |
| MAD-X 2.0,
Embedding training | [Pfeiffer et al. (2021)](https://arxiv.org/pdf/2012.15562.pdf) | [Docs: Embeddings](https://docs.adapterhub.ml/embeddings.html), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/08_NER_Wikiann.ipynb) |
| Prefix Tuning | [Li and Liang (2021)](https://arxiv.org/pdf/2101.00190.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#prefix-tuning) |
| Parallel adapters,
Mix-and-Match adapters | [He et al. (2021)](https://arxiv.org/pdf/2110.04366.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#mix-and-match-adapters) |
| Compacter | [Mahabadi et al. (2021)](https://arxiv.org/pdf/2106.04647.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#compacter) |
| LoRA | [Hu et al. (2021)](https://arxiv.org/pdf/2106.09685.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#lora) |
| (IA)^3 | [Liu et al. (2022)](https://arxiv.org/pdf/2205.05638.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#ia-3) |
| UniPELT | [Mao et al. (2022)](https://arxiv.org/pdf/2110.07577.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#unipelt) |
## Supported Models
We currently support the PyTorch versions of all models listed on the **[Model Overview](https://docs.adapterhub.ml/model_overview.html) page** in our documentation.
## Citation
If you use this library for your work, please consider citing our paper [AdapterHub: A Framework for Adapting Transformers](https://arxiv.org/abs/2007.07779):
```
@inproceedings{pfeiffer2020AdapterHub,
title={AdapterHub: A Framework for Adapting Transformers},
author={Pfeiffer, Jonas and
R{\"u}ckl{\'e}, Andreas and
Poth, Clifton and
Kamath, Aishwarya and
Vuli{\'c}, Ivan and
Ruder, Sebastian and
Cho, Kyunghyun and
Gurevych, Iryna},
booktitle={Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations},
pages={46--54},
year={2020}
}
```
%package help
Summary: Development documents and examples for adapter-transformers
Provides: python3-adapter-transformers-doc
%description help
adapter-transformers
A friendly fork of HuggingFace's Transformers, adding Adapters to PyTorch language models

[](https://github.com/adapter-hub/adapter-transformers/blob/master/LICENSE)
[](https://pypi.org/project/adapter-transformers/)
`adapter-transformers` is an extension of [HuggingFace's Transformers](https://github.com/huggingface/transformers) library, integrating adapters into state-of-the-art language models by incorporating **[AdapterHub](https://adapterhub.ml)**, a central repository for pre-trained adapter modules.
_💡 Important: This library can be used as a drop-in replacement for HuggingFace Transformers and regularly synchronizes new upstream changes.
Thus, most files in this repository are direct copies from the HuggingFace Transformers source, modified only with changes required for the adapter implementations._
## Installation
`adapter-transformers` currently supports **Python 3.8+** and **PyTorch 1.12.1+**.
After [installing PyTorch](https://pytorch.org/get-started/locally/), you can install `adapter-transformers` from PyPI ...
```
pip install -U adapter-transformers
```
... or from source by cloning the repository:
```
git clone https://github.com/adapter-hub/adapter-transformers.git
cd adapter-transformers
pip install .
```
## Getting Started
HuggingFace's great documentation on getting started with _Transformers_ can be found [here](https://huggingface.co/transformers/index.html). `adapter-transformers` is fully compatible with _Transformers_.
To get started with adapters, refer to these locations:
- **[Colab notebook tutorials](https://github.com/Adapter-Hub/adapter-transformers/tree/master/notebooks)**, a series notebooks providing an introduction to all the main concepts of (adapter-)transformers and AdapterHub
- **https://docs.adapterhub.ml**, our documentation on training and using adapters with _adapter-transformers_
- **https://adapterhub.ml** to explore available pre-trained adapter modules and share your own adapters
- **[Examples folder](https://github.com/Adapter-Hub/adapter-transformers/tree/master/examples/pytorch)** of this repository containing HuggingFace's example training scripts, many adapted for training adapters
## Implemented Methods
Currently, adapter-transformers integrates all architectures and methods listed below:
| Method | Paper(s) | Quick Links |
| --- | --- | --- |
| Bottleneck adapters | [Houlsby et al. (2019)](https://arxiv.org/pdf/1902.00751.pdf)
[Bapna and Firat (2019)](https://arxiv.org/pdf/1909.08478.pdf) | [Quickstart](https://docs.adapterhub.ml/quickstart.html), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/01_Adapter_Training.ipynb) |
| AdapterFusion | [Pfeiffer et al. (2021)](https://aclanthology.org/2021.eacl-main.39.pdf) | [Docs: Training](https://docs.adapterhub.ml/training.html#train-adapterfusion), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/03_Adapter_Fusion.ipynb) |
| MAD-X,
Invertible adapters | [Pfeiffer et al. (2020)](https://aclanthology.org/2020.emnlp-main.617/) | [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/04_Cross_Lingual_Transfer.ipynb) |
| AdapterDrop | [Rücklé et al. (2021)](https://arxiv.org/pdf/2010.11918.pdf) | [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/05_Adapter_Drop_Training.ipynb) |
| MAD-X 2.0,
Embedding training | [Pfeiffer et al. (2021)](https://arxiv.org/pdf/2012.15562.pdf) | [Docs: Embeddings](https://docs.adapterhub.ml/embeddings.html), [Notebook](https://colab.research.google.com/github/Adapter-Hub/adapter-transformers/blob/master/notebooks/08_NER_Wikiann.ipynb) |
| Prefix Tuning | [Li and Liang (2021)](https://arxiv.org/pdf/2101.00190.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#prefix-tuning) |
| Parallel adapters,
Mix-and-Match adapters | [He et al. (2021)](https://arxiv.org/pdf/2110.04366.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#mix-and-match-adapters) |
| Compacter | [Mahabadi et al. (2021)](https://arxiv.org/pdf/2106.04647.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#compacter) |
| LoRA | [Hu et al. (2021)](https://arxiv.org/pdf/2106.09685.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#lora) |
| (IA)^3 | [Liu et al. (2022)](https://arxiv.org/pdf/2205.05638.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#ia-3) |
| UniPELT | [Mao et al. (2022)](https://arxiv.org/pdf/2110.07577.pdf) | [Docs](https://docs.adapterhub.ml/overview.html#unipelt) |
## Supported Models
We currently support the PyTorch versions of all models listed on the **[Model Overview](https://docs.adapterhub.ml/model_overview.html) page** in our documentation.
## Citation
If you use this library for your work, please consider citing our paper [AdapterHub: A Framework for Adapting Transformers](https://arxiv.org/abs/2007.07779):
```
@inproceedings{pfeiffer2020AdapterHub,
title={AdapterHub: A Framework for Adapting Transformers},
author={Pfeiffer, Jonas and
R{\"u}ckl{\'e}, Andreas and
Poth, Clifton and
Kamath, Aishwarya and
Vuli{\'c}, Ivan and
Ruder, Sebastian and
Cho, Kyunghyun and
Gurevych, Iryna},
booktitle={Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations},
pages={46--54},
year={2020}
}
```
%prep
%autosetup -n adapter-transformers-3.2.1
%build
%py3_build
%install
%py3_install
install -d -m755 %{buildroot}/%{_pkgdocdir}
if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
pushd %{buildroot}
if [ -d usr/lib ]; then
find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
fi
if [ -d usr/lib64 ]; then
find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
fi
if [ -d usr/bin ]; then
find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
fi
if [ -d usr/sbin ]; then
find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
fi
touch doclist.lst
if [ -d usr/share/man ]; then
find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
fi
popd
mv %{buildroot}/filelist.lst .
mv %{buildroot}/doclist.lst .
%files -n python3-adapter-transformers -f filelist.lst
%dir %{python3_sitelib}/*
%files help -f doclist.lst
%{_docdir}/*
%changelog
* Tue Apr 11 2023 Python_Bot - 3.2.1-1
- Package Spec generated