summaryrefslogtreecommitdiff
path: root/python-oarlauncher.spec
diff options
context:
space:
mode:
Diffstat (limited to 'python-oarlauncher.spec')
-rw-r--r--python-oarlauncher.spec390
1 files changed, 390 insertions, 0 deletions
diff --git a/python-oarlauncher.spec b/python-oarlauncher.spec
new file mode 100644
index 0000000..1185e61
--- /dev/null
+++ b/python-oarlauncher.spec
@@ -0,0 +1,390 @@
+%global _empty_manifest_terminate_build 0
+Name: python-OarLauncher
+Version: 0.2.24
+Release: 1
+Summary: Description
+License: MIT License
+URL: https://github.com/GaetanDesrues/OarLauncher
+Source0: https://mirrors.nju.edu.cn/pypi/web/packages/be/e9/68e052ddc5e4af686d7bfe4055e5e5592118c7d95f51914224759f21c52a/OarLauncher-0.2.24.tar.gz
+BuildArch: noarch
+
+
+%description
+### Simply start oar job array on nef cluster
+
+**NB**: To simply start a stand-alone job, use [`treefiles.start_oar`](https://github.com/GaetanDesrues/TreeFiles/blob/master/treefiles/oar.py#L64-L178).
+
+
+#### Install
+```bash
+pip install --upgrade OarLauncher
+```
+
+
+#### Usage
+```python
+from collections import defaultdict
+import treefiles as tf
+from OarLauncher import ArrayJob
+
+
+# Choose a directory where script and logs are dumped
+out_dir = tf.Tree.new(__file__, "generated").dump(clean=True)
+
+# Create the parameters array
+nb_jobs, data = 10, defaultdict(list)
+for i in range(nb_jobs):
+ data["simu_dir"].append(f"d_{i}")
+ data["infos"].append(f"this is job {i}")
+
+# Path of the script that will be called by each job of the array
+# Each line of data will be sent to this script as json command line argument
+job_script = tf.curDirs(__file__, "job.py")
+
+# Create the job array
+jobs = ArrayJob(out_dir, data, job_script)
+# Setup jobs conf
+jobs.build_oar_command(
+ queue=tf.Queue.BESTEFFORT,
+ to_file=True, # whereas `shell_out` is dumped to file or returned via command line
+ wall_time=tf.walltime(minutes=2),
+ prgm=tf.Program.OARCTL, # `OARCTL` is blocking (main process is running until all jobs end), `OARSUB` is not
+)
+# Write scripts
+jobs.dump(
+ # python_path=[...], # you can give a list of python paths that will be added to PYTHONPATH
+ # MY_ENV=..., # you can also specify PATH envs by passing them as kwargs
+)
+# Start the job array
+shell_out = jobs.run() # blocking operation if prgm=tf.Program.OARCTL
+print(shell_out)
+```
+
+## `tf.start_oar`
+
+```python
+def start_oar(
+ runme_str,
+ logs_dir: Union[tf.Tree, str] = None,
+ array_fname: str = None,
+ wall_time: str = walltime(minutes=1),
+ host: int = 1,
+ core: int = 1,
+ job_name: str = None,
+ queue: str = Queue.DEFAULT,
+ cmd_fname: str = None,
+ runme_args: List[str] = None,
+ do_run: bool = True,
+ with_json: bool = False,
+ notify: List = None,
+ prgm: str = Program.OARSUB,
+ stdout: str = None,
+ stderr: str = None,
+) -> Union[str, List[str]]:
+ """
+ Builds an oar command.
+ Usage example:
+ .. code::
+ cdir = tf.Tree.new(__file__)
+ sdir = cdir.dir("OarOut").dump(clean=True)
+ res = start_oar(
+ runme_str=cdir.path("runme.sh"),
+ logs_dir=sdir,
+ walltime=time(minute=10),
+ queue="besteffort",
+ core=2,
+ cmd_fname=sdir.path("cmd.sh"),
+ do_run=True,
+ )
+ :param runme_str: path to the runme script or command line
+ :param logs_dir: directory for std out/err
+ :param array_fname: path to the arguments file (array file)
+ :param wall_time: wall time of the job
+ :param host: numbre of nodes
+ :param core: number of cores
+ :param job_name: job name
+ :param queue: job queue ['default', 'besteffort']
+ :param cmd_fname: path to a file to save the oar command
+ :param runme_args: list of command line arguments given to the runme script
+ :param do_run: whether to execute the command or not
+ :param with_json: add the -J option in oarsub command
+ :param notify: notify options [List], you may use the class NotifyOar to build this option
+ :param prgm: `oarsub` or `oarctl sub`
+ :param stdout: path for stdout
+ :param stderr: path for stderr, defaults to stdout if None
+ :return: The output of the oar command if `do_run` is True else the oar command
+ """
+```
+
+
+
+%package -n python3-OarLauncher
+Summary: Description
+Provides: python-OarLauncher
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+BuildRequires: python3-pip
+%description -n python3-OarLauncher
+### Simply start oar job array on nef cluster
+
+**NB**: To simply start a stand-alone job, use [`treefiles.start_oar`](https://github.com/GaetanDesrues/TreeFiles/blob/master/treefiles/oar.py#L64-L178).
+
+
+#### Install
+```bash
+pip install --upgrade OarLauncher
+```
+
+
+#### Usage
+```python
+from collections import defaultdict
+import treefiles as tf
+from OarLauncher import ArrayJob
+
+
+# Choose a directory where script and logs are dumped
+out_dir = tf.Tree.new(__file__, "generated").dump(clean=True)
+
+# Create the parameters array
+nb_jobs, data = 10, defaultdict(list)
+for i in range(nb_jobs):
+ data["simu_dir"].append(f"d_{i}")
+ data["infos"].append(f"this is job {i}")
+
+# Path of the script that will be called by each job of the array
+# Each line of data will be sent to this script as json command line argument
+job_script = tf.curDirs(__file__, "job.py")
+
+# Create the job array
+jobs = ArrayJob(out_dir, data, job_script)
+# Setup jobs conf
+jobs.build_oar_command(
+ queue=tf.Queue.BESTEFFORT,
+ to_file=True, # whereas `shell_out` is dumped to file or returned via command line
+ wall_time=tf.walltime(minutes=2),
+ prgm=tf.Program.OARCTL, # `OARCTL` is blocking (main process is running until all jobs end), `OARSUB` is not
+)
+# Write scripts
+jobs.dump(
+ # python_path=[...], # you can give a list of python paths that will be added to PYTHONPATH
+ # MY_ENV=..., # you can also specify PATH envs by passing them as kwargs
+)
+# Start the job array
+shell_out = jobs.run() # blocking operation if prgm=tf.Program.OARCTL
+print(shell_out)
+```
+
+## `tf.start_oar`
+
+```python
+def start_oar(
+ runme_str,
+ logs_dir: Union[tf.Tree, str] = None,
+ array_fname: str = None,
+ wall_time: str = walltime(minutes=1),
+ host: int = 1,
+ core: int = 1,
+ job_name: str = None,
+ queue: str = Queue.DEFAULT,
+ cmd_fname: str = None,
+ runme_args: List[str] = None,
+ do_run: bool = True,
+ with_json: bool = False,
+ notify: List = None,
+ prgm: str = Program.OARSUB,
+ stdout: str = None,
+ stderr: str = None,
+) -> Union[str, List[str]]:
+ """
+ Builds an oar command.
+ Usage example:
+ .. code::
+ cdir = tf.Tree.new(__file__)
+ sdir = cdir.dir("OarOut").dump(clean=True)
+ res = start_oar(
+ runme_str=cdir.path("runme.sh"),
+ logs_dir=sdir,
+ walltime=time(minute=10),
+ queue="besteffort",
+ core=2,
+ cmd_fname=sdir.path("cmd.sh"),
+ do_run=True,
+ )
+ :param runme_str: path to the runme script or command line
+ :param logs_dir: directory for std out/err
+ :param array_fname: path to the arguments file (array file)
+ :param wall_time: wall time of the job
+ :param host: numbre of nodes
+ :param core: number of cores
+ :param job_name: job name
+ :param queue: job queue ['default', 'besteffort']
+ :param cmd_fname: path to a file to save the oar command
+ :param runme_args: list of command line arguments given to the runme script
+ :param do_run: whether to execute the command or not
+ :param with_json: add the -J option in oarsub command
+ :param notify: notify options [List], you may use the class NotifyOar to build this option
+ :param prgm: `oarsub` or `oarctl sub`
+ :param stdout: path for stdout
+ :param stderr: path for stderr, defaults to stdout if None
+ :return: The output of the oar command if `do_run` is True else the oar command
+ """
+```
+
+
+
+%package help
+Summary: Development documents and examples for OarLauncher
+Provides: python3-OarLauncher-doc
+%description help
+### Simply start oar job array on nef cluster
+
+**NB**: To simply start a stand-alone job, use [`treefiles.start_oar`](https://github.com/GaetanDesrues/TreeFiles/blob/master/treefiles/oar.py#L64-L178).
+
+
+#### Install
+```bash
+pip install --upgrade OarLauncher
+```
+
+
+#### Usage
+```python
+from collections import defaultdict
+import treefiles as tf
+from OarLauncher import ArrayJob
+
+
+# Choose a directory where script and logs are dumped
+out_dir = tf.Tree.new(__file__, "generated").dump(clean=True)
+
+# Create the parameters array
+nb_jobs, data = 10, defaultdict(list)
+for i in range(nb_jobs):
+ data["simu_dir"].append(f"d_{i}")
+ data["infos"].append(f"this is job {i}")
+
+# Path of the script that will be called by each job of the array
+# Each line of data will be sent to this script as json command line argument
+job_script = tf.curDirs(__file__, "job.py")
+
+# Create the job array
+jobs = ArrayJob(out_dir, data, job_script)
+# Setup jobs conf
+jobs.build_oar_command(
+ queue=tf.Queue.BESTEFFORT,
+ to_file=True, # whereas `shell_out` is dumped to file or returned via command line
+ wall_time=tf.walltime(minutes=2),
+ prgm=tf.Program.OARCTL, # `OARCTL` is blocking (main process is running until all jobs end), `OARSUB` is not
+)
+# Write scripts
+jobs.dump(
+ # python_path=[...], # you can give a list of python paths that will be added to PYTHONPATH
+ # MY_ENV=..., # you can also specify PATH envs by passing them as kwargs
+)
+# Start the job array
+shell_out = jobs.run() # blocking operation if prgm=tf.Program.OARCTL
+print(shell_out)
+```
+
+## `tf.start_oar`
+
+```python
+def start_oar(
+ runme_str,
+ logs_dir: Union[tf.Tree, str] = None,
+ array_fname: str = None,
+ wall_time: str = walltime(minutes=1),
+ host: int = 1,
+ core: int = 1,
+ job_name: str = None,
+ queue: str = Queue.DEFAULT,
+ cmd_fname: str = None,
+ runme_args: List[str] = None,
+ do_run: bool = True,
+ with_json: bool = False,
+ notify: List = None,
+ prgm: str = Program.OARSUB,
+ stdout: str = None,
+ stderr: str = None,
+) -> Union[str, List[str]]:
+ """
+ Builds an oar command.
+ Usage example:
+ .. code::
+ cdir = tf.Tree.new(__file__)
+ sdir = cdir.dir("OarOut").dump(clean=True)
+ res = start_oar(
+ runme_str=cdir.path("runme.sh"),
+ logs_dir=sdir,
+ walltime=time(minute=10),
+ queue="besteffort",
+ core=2,
+ cmd_fname=sdir.path("cmd.sh"),
+ do_run=True,
+ )
+ :param runme_str: path to the runme script or command line
+ :param logs_dir: directory for std out/err
+ :param array_fname: path to the arguments file (array file)
+ :param wall_time: wall time of the job
+ :param host: numbre of nodes
+ :param core: number of cores
+ :param job_name: job name
+ :param queue: job queue ['default', 'besteffort']
+ :param cmd_fname: path to a file to save the oar command
+ :param runme_args: list of command line arguments given to the runme script
+ :param do_run: whether to execute the command or not
+ :param with_json: add the -J option in oarsub command
+ :param notify: notify options [List], you may use the class NotifyOar to build this option
+ :param prgm: `oarsub` or `oarctl sub`
+ :param stdout: path for stdout
+ :param stderr: path for stderr, defaults to stdout if None
+ :return: The output of the oar command if `do_run` is True else the oar command
+ """
+```
+
+
+
+%prep
+%autosetup -n OarLauncher-0.2.24
+
+%build
+%py3_build
+
+%install
+%py3_install
+install -d -m755 %{buildroot}/%{_pkgdocdir}
+if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi
+if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi
+if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi
+if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi
+pushd %{buildroot}
+if [ -d usr/lib ]; then
+ find usr/lib -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/lib64 ]; then
+ find usr/lib64 -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/bin ]; then
+ find usr/bin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+if [ -d usr/sbin ]; then
+ find usr/sbin -type f -printf "/%h/%f\n" >> filelist.lst
+fi
+touch doclist.lst
+if [ -d usr/share/man ]; then
+ find usr/share/man -type f -printf "/%h/%f.gz\n" >> doclist.lst
+fi
+popd
+mv %{buildroot}/filelist.lst .
+mv %{buildroot}/doclist.lst .
+
+%files -n python3-OarLauncher -f filelist.lst
+%dir %{python3_sitelib}/*
+
+%files help -f doclist.lst
+%{_docdir}/*
+
+%changelog
+* Wed May 10 2023 Python_Bot <Python_Bot@openeuler.org> - 0.2.24-1
+- Package Spec generated