summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2023-05-18 05:16:13 +0000
committerCoprDistGit <infra@openeuler.org>2023-05-18 05:16:13 +0000
commitdc7f4ae518e59b3e6bb95aabb44d5f3d8fa6ee29 (patch)
tree0bf8d8f1b1c653571633e129f05d16eb96c79261
parent5188afb8faf2287a5330f3a62b9e584c97d0db43 (diff)
automatic import of python-torchelastic
-rw-r--r--.gitignore1
-rw-r--r--python-torchelastic.spec240
-rw-r--r--sources1
3 files changed, 242 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..dfc5459 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/torchelastic-0.2.2.tar.gz
diff --git a/python-torchelastic.spec b/python-torchelastic.spec
new file mode 100644
index 0000000..c316582
--- /dev/null
+++ b/python-torchelastic.spec
@@ -0,0 +1,240 @@
+%global _empty_manifest_terminate_build 0
+Name: python-torchelastic
+Version: 0.2.2
+Release: 1
+Summary: PyTorch Elastic Training
+License: BSD-3
+URL: https://github.com/pytorch/elastic
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/4f/b5/6b598fe8881a2de40e5a01100ab5932c8b791b9249ccc99c0d5006443c93/torchelastic-0.2.2.tar.gz
+BuildArch: noarch
+
+Requires: python3-numpy
+Requires: python3-etcd
+Requires: python3-torch
+
+%description
+[![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](LICENSE)[![CircleCI](https://circleci.com/gh/pytorch/elastic.svg?style=svg&circle-token=9bea46e94adbe2f3e0fb2d4054b1b655f2e208c2)](https://circleci.com/gh/pytorch/elastic)
+
+# TorchElastic
+
+TorchElastic allows you to launch distributed PyTorch jobs in a
+fault-tolerant and elastic manner.
+For the latest documentation, please refer to our
+[website](https://pytorch.org/elastic).
+
+
+## Requirements
+torchelastic requires
+* python3 (3.8+)
+* torch
+* etcd
+
+## Installation
+```bash
+pip install torchelastic
+```
+
+## Quickstart
+
+**Fault-tolerant** on `4` nodes, `8` trainers/node, total `4 * 8 = 32` trainers.
+Run the following on all nodes.
+```bash
+python -m torchelastic.distributed.launch
+ --nnodes=4
+ --nproc_per_node=8
+ --rdzv_id=JOB_ID
+ --rdzv_backend=etcd
+ --rdzv_endpoint=ETCD_HOST:ETCD_PORT
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+```
+
+**Elastic on** `1 ~ 4` nodes, `8` trainers/node, total `8 ~ 32` trainers. Job
+starts as soon as `1` node is healthy, you may add up to `4` nodes.
+```bash
+python -m torchelastic.distributed.launch
+ --nnodes=1:4
+ --nproc_per_node=8
+ --rdzv_id=JOB_ID
+ --rdzv_backend=etcd
+ --rdzv_endpoint=ETCD_HOST:ETCD_PORT
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+
+```
+## Contributing
+
+We welcome PRs. See the [CONTRIBUTING](CONTRIBUTING.md) file.
+
+## License
+torchelastic is BSD licensed, as found in the [LICENSE](LICENSE) file.
+
+
+
+
+%package -n python3-torchelastic
+Summary: PyTorch Elastic Training
+Provides: python-torchelastic
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-torchelastic
+[![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](LICENSE)[![CircleCI](https://circleci.com/gh/pytorch/elastic.svg?style=svg&circle-token=9bea46e94adbe2f3e0fb2d4054b1b655f2e208c2)](https://circleci.com/gh/pytorch/elastic)
+
+# TorchElastic
+
+TorchElastic allows you to launch distributed PyTorch jobs in a
+fault-tolerant and elastic manner.
+For the latest documentation, please refer to our
+[website](https://pytorch.org/elastic).
+
+
+## Requirements
+torchelastic requires
+* python3 (3.8+)
+* torch
+* etcd
+
+## Installation
+```bash
+pip install torchelastic
+```
+
+## Quickstart
+
+**Fault-tolerant** on `4` nodes, `8` trainers/node, total `4 * 8 = 32` trainers.
+Run the following on all nodes.
+```bash
+python -m torchelastic.distributed.launch
+ --nnodes=4
+ --nproc_per_node=8
+ --rdzv_id=JOB_ID
+ --rdzv_backend=etcd
+ --rdzv_endpoint=ETCD_HOST:ETCD_PORT
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+```
+
+**Elastic on** `1 ~ 4` nodes, `8` trainers/node, total `8 ~ 32` trainers. Job
+starts as soon as `1` node is healthy, you may add up to `4` nodes.
+```bash
+python -m torchelastic.distributed.launch
+ --nnodes=1:4
+ --nproc_per_node=8
+ --rdzv_id=JOB_ID
+ --rdzv_backend=etcd
+ --rdzv_endpoint=ETCD_HOST:ETCD_PORT
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+
+```
+## Contributing
+
+We welcome PRs. See the [CONTRIBUTING](CONTRIBUTING.md) file.
+
+## License
+torchelastic is BSD licensed, as found in the [LICENSE](LICENSE) file.
+
+
+
+
+%package help
+Summary: Development documents and examples for torchelastic
+Provides: python3-torchelastic-doc
+%description help
+[![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](LICENSE)[![CircleCI](https://circleci.com/gh/pytorch/elastic.svg?style=svg&circle-token=9bea46e94adbe2f3e0fb2d4054b1b655f2e208c2)](https://circleci.com/gh/pytorch/elastic)
+
+# TorchElastic
+
+TorchElastic allows you to launch distributed PyTorch jobs in a
+fault-tolerant and elastic manner.
+For the latest documentation, please refer to our
+[website](https://pytorch.org/elastic).
+
+
+## Requirements
+torchelastic requires
+* python3 (3.8+)
+* torch
+* etcd
+
+## Installation
+```bash
+pip install torchelastic
+```
+
+## Quickstart
+
+**Fault-tolerant** on `4` nodes, `8` trainers/node, total `4 * 8 = 32` trainers.
+Run the following on all nodes.
+```bash
+python -m torchelastic.distributed.launch
+ --nnodes=4
+ --nproc_per_node=8
+ --rdzv_id=JOB_ID
+ --rdzv_backend=etcd
+ --rdzv_endpoint=ETCD_HOST:ETCD_PORT
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+```
+
+**Elastic on** `1 ~ 4` nodes, `8` trainers/node, total `8 ~ 32` trainers. Job
+starts as soon as `1` node is healthy, you may add up to `4` nodes.
+```bash
+python -m torchelastic.distributed.launch
+ --nnodes=1:4
+ --nproc_per_node=8
+ --rdzv_id=JOB_ID
+ --rdzv_backend=etcd
+ --rdzv_endpoint=ETCD_HOST:ETCD_PORT
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+
+```
+## Contributing
+
+We welcome PRs. See the [CONTRIBUTING](CONTRIBUTING.md) file.
+
+## License
+torchelastic is BSD licensed, as found in the [LICENSE](LICENSE) file.
+
+
+
+
+%prep
+%autosetup -n torchelastic-0.2.2
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-torchelastic -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Thu May 18 2023 Python_Bot <Python_Bot@openeuler.org> - 0.2.2-1
+- Package Spec generated
diff --git a/sources b/sources
new file mode 100644
index 0000000..9e38b82
--- /dev/null
+++ b/sources
@@ -0,0 +1 @@
+26edf446974517c052ef47ab0890c938 torchelastic-0.2.2.tar.gz